diff --git a/.github/workflows/flyteidl-buf-publish.yml b/.github/workflows/flyteidl-buf-publish.yml index cbb530421c..96bd0085a6 100644 --- a/.github/workflows/flyteidl-buf-publish.yml +++ b/.github/workflows/flyteidl-buf-publish.yml @@ -3,7 +3,7 @@ name: Publish flyteidl Buf Package on: push: branches: - - artifacts-shell + - artifacts-shell-2 - artifacts - master paths: diff --git a/.github/workflows/flyteidl-checks.yml b/.github/workflows/flyteidl-checks.yml index 7e95f2e35c..1811126d91 100644 --- a/.github/workflows/flyteidl-checks.yml +++ b/.github/workflows/flyteidl-checks.yml @@ -50,3 +50,5 @@ jobs: with: component: flyteidl go-version: ${{ needs.unpack-envvars.outputs.go-version }} + secrets: + FLYTE_BOT_PAT: ${{ secrets.FLYTE_BOT_PAT }} diff --git a/.readthedocs.yml b/.readthedocs.yml index b63d11acfc..1dd699bf9b 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -7,13 +7,17 @@ version: 2 build: os: "ubuntu-22.04" tools: - python: "3.11" + python: "mambaforge-22.9" + jobs: + post_install: + - conda-lock install --name $READTHEDOCS_VERSION monodocs-environment.lock.yaml + - conda info + - conda env list + - conda list # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: rsts/conf.py + configuration: docs/conf.py -# Optionally set the version of Python and requirements required to build your docs -python: - install: - - requirements: doc-requirements.in +conda: + environment: docs/build-environment.yaml diff --git a/CHANGELOG/CHANGELOG-v1.10.7-b0.md b/CHANGELOG/CHANGELOG-v1.10.7-b0.md new file mode 100644 index 0000000000..30d8488589 --- /dev/null +++ b/CHANGELOG/CHANGELOG-v1.10.7-b0.md @@ -0,0 +1,3 @@ +# Flyte v1.10.7-b0 Release + +Beta release. diff --git a/CHANGELOG/CHANGELOG-v1.10.7-b1.md b/CHANGELOG/CHANGELOG-v1.10.7-b1.md new file mode 100644 index 0000000000..4336ef670a --- /dev/null +++ b/CHANGELOG/CHANGELOG-v1.10.7-b1.md @@ -0,0 +1,3 @@ +# Flyte v1.10.7-b1 Release + +Pre-release testing. diff --git a/CHANGELOG/CHANGELOG-v1.10.7-b2.md b/CHANGELOG/CHANGELOG-v1.10.7-b2.md new file mode 100644 index 0000000000..993a66dc95 --- /dev/null +++ b/CHANGELOG/CHANGELOG-v1.10.7-b2.md @@ -0,0 +1,3 @@ +# Flyte v1.10.7-b2 Release + +Pre-release testing. diff --git a/README.md b/README.md index 4f5a86fafb..24c2b3aff6 100644 --- a/README.md +++ b/README.md @@ -138,7 +138,7 @@ Join the likes of LinkedIn, Spotify, Freenome, Pachama, Warner Bros. and many ot ## How to stay involved 📆 [Weekly office hours](https://calendly.com/flyte-office-hours-01/30min): Live informal sessions with the Flyte team held every week. Book a 30-minute slot and get your questions answered.
-👥 [Biweekly community sync](https://www.addevent.com/event/EA7823958): A biweekly event where the Flyte team provides updates on the project and community members can share their progress and ask questions.
+👥 [Monthly community sync](https://www.addevent.com/event/EA7823958): Happening the first Tuesday of every month, this is where the Flyte team provides updates on the project, and community members can share their progress and ask questions.
💬 [Slack](https://slack.flyte.org/): Join the Flyte community on Slack to chat with other users, ask questions, and get help.
⚠️ [Newsletter](https://lists.lfaidata.foundation/g/flyte-announce/join): join this group to receive the Flyte Monthly newsletter.
📹 [Youtube](https://www.youtube.com/channel/UCNduEoLOToNo3nFVly-vUTQ): Tune into panel discussions, customer success stories, community updates and feature deep dives.
diff --git a/charts/flyte-binary/README.md b/charts/flyte-binary/README.md index c4fa8cfc5e..db31ef5a42 100644 --- a/charts/flyte-binary/README.md +++ b/charts/flyte-binary/README.md @@ -42,7 +42,7 @@ Chart for basic single Flyte executable deployment | configuration.auth.oidc.clientId | string | `""` | | | configuration.auth.oidc.clientSecret | string | `""` | | | configuration.co-pilot.image.repository | string | `"cr.flyte.org/flyteorg/flytecopilot"` | | -| configuration.co-pilot.image.tag | string | `"v1.10.6"` | | +| configuration.co-pilot.image.tag | string | `"v1.10.7-b2"` | | | configuration.database.dbname | string | `"flyte"` | | | configuration.database.host | string | `"127.0.0.1"` | | | configuration.database.options | string | `"sslmode=disable"` | | diff --git a/charts/flyte-binary/values.yaml b/charts/flyte-binary/values.yaml index b8a8b58c3c..070bd4f82a 100644 --- a/charts/flyte-binary/values.yaml +++ b/charts/flyte-binary/values.yaml @@ -159,7 +159,7 @@ configuration: # repository CoPilot sidecar image repository repository: cr.flyte.org/flyteorg/flytecopilot # FLYTECOPILOT_IMAGE # tag CoPilot sidecar image tag - tag: v1.10.6 # FLYTECOPILOT_TAG + tag: v1.10.7-b2 # FLYTECOPILOT_TAG # agentService Flyte Agent configuration agentService: defaultAgent: diff --git a/charts/flyte-core/README.md b/charts/flyte-core/README.md index 05b179c5d1..5ccb9f60f0 100644 --- a/charts/flyte-core/README.md +++ b/charts/flyte-core/README.md @@ -60,13 +60,15 @@ helm install gateway bitnami/contour -n flyte | cloud_events.eventsPublisher.eventTypes[0] | string | `"all"` | | | cloud_events.eventsPublisher.topicName | string | `"arn:aws:sns:us-east-2:123456:123-my-topic"` | | | cloud_events.type | string | `"aws"` | | -| cluster_resource_manager | object | `{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"podAnnotations":{},"service_account_name":"flyteadmin","standaloneDeployment":false,"templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]}` | Configuration for the Cluster resource manager component. This is an optional component, that enables automatic cluster configuration. This is useful to set default quotas, manage namespaces etc that map to a project/domain | +| cluster_resource_manager | object | `{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"podAnnotations":{},"podEnv":{},"podLabels":{},"service_account_name":"flyteadmin","standaloneDeployment":false,"templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]}` | Configuration for the Cluster resource manager component. This is an optional component, that enables automatic cluster configuration. This is useful to set default quotas, manage namespaces etc that map to a project/domain | | cluster_resource_manager.config | object | `{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}}` | Configmap for ClusterResource parameters | | cluster_resource_manager.config.cluster_resources | object | `{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}` | ClusterResource parameters Refer to the [structure](https://pkg.go.dev/github.com/lyft/flyteadmin@v0.3.37/pkg/runtime/interfaces#ClusterResourceConfig) to customize. | | cluster_resource_manager.config.cluster_resources.refreshInterval | string | `"5m"` | How frequently to run the sync process | | cluster_resource_manager.config.cluster_resources.standaloneDeployment | bool | `false` | Starts the cluster resource manager in standalone mode with requisite auth credentials to call flyteadmin service endpoints | | cluster_resource_manager.enabled | bool | `true` | Enables the Cluster resource manager component | | cluster_resource_manager.podAnnotations | object | `{}` | Annotations for ClusterResource pods | +| cluster_resource_manager.podEnv | object | `{}` | Additional ClusterResource container environment variables | +| cluster_resource_manager.podLabels | object | `{}` | Labels for ClusterResource pods | | cluster_resource_manager.service_account_name | string | `"flyteadmin"` | Service account name to run with | | cluster_resource_manager.templates | list | `[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]` | Resource templates that should be applied | | cluster_resource_manager.templates[0] | object | `{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"}` | Template for namespaces resources | @@ -90,8 +92,8 @@ helm install gateway bitnami/contour -n flyte | configmap.clusters.clusterConfigs | list | `[]` | | | configmap.clusters.labelClusterMap | object | `{}` | | | configmap.console | object | `{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"}` | Configuration for Flyte console UI | -| configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.6","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | -| configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.6","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | +| configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | +| configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | | configmap.core | object | `{"manager":{"pod-application":"flytepropeller","pod-template-container-name":"flytepropeller","pod-template-name":"flytepropeller-template"},"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}}` | Core propeller configuration | | configmap.core.manager | object | `{"pod-application":"flytepropeller","pod-template-container-name":"flytepropeller","pod-template-name":"flytepropeller-template"}` | follows the structure specified [here](https://pkg.go.dev/github.com/flyteorg/flytepropeller/manager/config#Config). | | configmap.core.propeller | object | `{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"}` | follows the structure specified [here](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/config). | @@ -111,8 +113,8 @@ helm install gateway bitnami/contour -n flyte | configmap.schedulerConfig.scheduler.profilerPort | int | `10254` | | | configmap.task_logs | object | `{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":false}}}` | Section that configures how the Task logs are displayed on the UI. This has to be changed based on your actual logging provider. Refer to [structure](https://pkg.go.dev/github.com/lyft/flyteplugins/go/tasks/logs#LogConfig) to understand how to configure various logging engines | | configmap.task_logs.plugins.logs.cloudwatch-enabled | bool | `false` | One option is to enable cloudwatch logging for EKS, update the region and log group accordingly | -| configmap.task_resource_defaults | object | `{"task_resources":{"defaults":{"cpu":"100m","memory":"500Mi","storage":"500Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}` | Task default resources configuration Refer to the full [structure](https://pkg.go.dev/github.com/lyft/flyteadmin@v0.3.37/pkg/runtime/interfaces#TaskResourceConfiguration). | -| configmap.task_resource_defaults.task_resources | object | `{"defaults":{"cpu":"100m","memory":"500Mi","storage":"500Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}` | Task default resources parameters | +| configmap.task_resource_defaults | object | `{"task_resources":{"defaults":{"cpu":"100m","ephemeralStorage":"500Mi","memory":"500Mi"},"limits":{"cpu":2,"ephemeralStorage":"20Mi","gpu":1,"memory":"1Gi"}}}` | Task default resources configuration Refer to the full [structure](https://pkg.go.dev/github.com/lyft/flyteadmin@v0.3.37/pkg/runtime/interfaces#TaskResourceConfiguration). | +| configmap.task_resource_defaults.task_resources | object | `{"defaults":{"cpu":"100m","ephemeralStorage":"500Mi","memory":"500Mi"},"limits":{"cpu":2,"ephemeralStorage":"20Mi","gpu":1,"memory":"1Gi"}}` | Task default resources parameters | | daskoperator | object | `{"enabled":false}` | Optional: Dask Plugin using the Dask Operator | | daskoperator.enabled | bool | `false` | - enable or disable the dask operator deployment installation | | databricks | object | `{"enabled":false,"plugin_config":{"plugins":{"databricks":{"databricksInstance":"dbc-a53b7a3c-614c","entrypointFile":"dbfs:///FileStore/tables/entrypoint.py"}}}}` | Optional: Databricks Plugin allows us to run the spark job on the Databricks platform. | @@ -125,9 +127,11 @@ helm install gateway bitnami/contour -n flyte | datacatalog.extraArgs | object | `{}` | Appends extra command line arguments to the main command | | datacatalog.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | datacatalog.image.repository | string | `"cr.flyte.org/flyteorg/datacatalog"` | Docker image for Datacatalog deployment | -| datacatalog.image.tag | string | `"v1.10.6"` | Docker image tag | +| datacatalog.image.tag | string | `"v1.10.7-b2"` | Docker image tag | | datacatalog.nodeSelector | object | `{}` | nodeSelector for Datacatalog deployment | | datacatalog.podAnnotations | object | `{}` | Annotations for Datacatalog pods | +| datacatalog.podEnv | object | `{}` | Additional Datacatalog container environment variables | +| datacatalog.podLabels | object | `{}` | Labels for Datacatalog pods | | datacatalog.priorityClassName | string | `""` | Sets priorityClassName for datacatalog pod(s). | | datacatalog.replicaCount | int | `1` | Replicas count for Datacatalog deployment | | datacatalog.resources | object | `{"limits":{"cpu":"500m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}}` | Default resources requests and limits for Datacatalog deployment | @@ -157,10 +161,11 @@ helm install gateway bitnami/contour -n flyte | flyteadmin.extraArgs | object | `{}` | Appends extra command line arguments to the serve command | | flyteadmin.image.pullPolicy | string | `"IfNotPresent"` | | | flyteadmin.image.repository | string | `"cr.flyte.org/flyteorg/flyteadmin"` | Docker image for Flyteadmin deployment | -| flyteadmin.image.tag | string | `"v1.10.6"` | | +| flyteadmin.image.tag | string | `"v1.10.7-b2"` | | | flyteadmin.initialProjects | list | `["flytesnacks","flytetester","flyteexamples"]` | Initial projects to create | | flyteadmin.nodeSelector | object | `{}` | nodeSelector for Flyteadmin deployment | | flyteadmin.podAnnotations | object | `{}` | Annotations for Flyteadmin pods | +| flyteadmin.podLabels | object | `{}` | Labels for Flyteadmin pods | | flyteadmin.priorityClassName | string | `""` | Sets priorityClassName for flyteadmin pod(s). | | flyteadmin.replicaCount | int | `1` | Replicas count for Flyteadmin deployment | | flyteadmin.resources | object | `{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}}` | Default resources requests and limits for Flyteadmin deployment | @@ -192,6 +197,8 @@ helm install gateway bitnami/contour -n flyte | flyteconsole.image.tag | string | `"v1.10.2"` | | | flyteconsole.nodeSelector | object | `{}` | nodeSelector for Flyteconsole deployment | | flyteconsole.podAnnotations | object | `{}` | Annotations for Flyteconsole pods | +| flyteconsole.podEnv | object | `{}` | Additional Flyteconsole container environment variables | +| flyteconsole.podLabels | object | `{}` | Labels for Flyteconsole pods | | flyteconsole.priorityClassName | string | `""` | Sets priorityClassName for flyte console pod(s). | | flyteconsole.replicaCount | int | `1` | Replicas count for Flyteconsole deployment | | flyteconsole.resources | object | `{"limits":{"cpu":"500m","memory":"250Mi"},"requests":{"cpu":"10m","memory":"50Mi"}}` | Default resources requests and limits for Flyteconsole deployment | @@ -209,10 +216,12 @@ helm install gateway bitnami/contour -n flyte | flytepropeller.extraArgs | object | `{}` | Appends extra command line arguments to the main command | | flytepropeller.image.pullPolicy | string | `"IfNotPresent"` | | | flytepropeller.image.repository | string | `"cr.flyte.org/flyteorg/flytepropeller"` | Docker image for Flytepropeller deployment | -| flytepropeller.image.tag | string | `"v1.10.6"` | | +| flytepropeller.image.tag | string | `"v1.10.7-b2"` | | | flytepropeller.manager | bool | `false` | | | flytepropeller.nodeSelector | object | `{}` | nodeSelector for Flytepropeller deployment | | flytepropeller.podAnnotations | object | `{}` | Annotations for Flytepropeller pods | +| flytepropeller.podEnv | object | `{}` | Additional Flytepropeller container environment variables | +| flytepropeller.podLabels | object | `{}` | Labels for Flytepropeller pods | | flytepropeller.priorityClassName | string | `""` | Sets priorityClassName for propeller pod(s). | | flytepropeller.replicaCount | int | `1` | Replicas count for Flytepropeller deployment | | flytepropeller.resources | object | `{"limits":{"cpu":"200m","ephemeral-storage":"100Mi","memory":"200Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"100Mi"}}` | Default resources requests and limits for Flytepropeller deployment | @@ -236,9 +245,11 @@ helm install gateway bitnami/contour -n flyte | flytescheduler.configPath | string | `"/etc/flyte/config/*.yaml"` | Default regex string for searching configuration files | | flytescheduler.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flytescheduler.image.repository | string | `"cr.flyte.org/flyteorg/flytescheduler"` | Docker image for Flytescheduler deployment | -| flytescheduler.image.tag | string | `"v1.10.6"` | Docker image tag | +| flytescheduler.image.tag | string | `"v1.10.7-b2"` | Docker image tag | | flytescheduler.nodeSelector | object | `{}` | nodeSelector for Flytescheduler deployment | | flytescheduler.podAnnotations | object | `{}` | Annotations for Flytescheduler pods | +| flytescheduler.podEnv | object | `{}` | Additional Flytescheduler container environment variables | +| flytescheduler.podLabels | object | `{}` | Labels for Flytescheduler pods | | flytescheduler.priorityClassName | string | `""` | Sets priorityClassName for flyte scheduler pod(s). | | flytescheduler.resources | object | `{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}}` | Default resources requests and limits for Flytescheduler deployment | | flytescheduler.runPrecheck | bool | `true` | Whether to inject an init container which waits on flyteadmin | diff --git a/charts/flyte-core/templates/_helpers.tpl b/charts/flyte-core/templates/_helpers.tpl index 0221874d5c..2c3b059841 100755 --- a/charts/flyte-core/templates/_helpers.tpl +++ b/charts/flyte-core/templates/_helpers.tpl @@ -28,6 +28,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flyteadmin.podLabels" -}} +{{ include "flyteadmin.labels" . }} +{{- with .Values.flyteadmin.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + {{- define "flytescheduler.name" -}} flytescheduler {{- end -}} @@ -44,6 +51,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flytescheduler.podLabels" -}} +{{ include "flytescheduler.labels" . }} +{{- with .Values.flytescheduler.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + {{- define "flyteclusterresourcesync.name" -}} flyteclusterresourcesync {{- end -}} @@ -59,6 +73,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flyteclusterresourcesync.podLabels" -}} +{{ include "flyteclusterresourcesync.labels" . }} +{{- with .Values.cluster_resource_manager.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + {{- define "datacatalog.name" -}} datacatalog {{- end -}} @@ -74,6 +95,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "datacatalog.podLabels" -}} +{{ include "datacatalog.labels" . }} +{{- with .Values.datacatalog.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + {{- define "flyteagent.name" -}} flyteagent {{- end -}} @@ -89,6 +117,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flyteagent.podLabels" -}} +{{ include "flyteagent.labels" . }} +{{- with .Values.flyteagent.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + {{- define "flytepropeller.name" -}} flytepropeller {{- end -}} @@ -104,6 +139,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flytepropeller.podLabels" -}} +{{ include "flytepropeller.labels" . }} +{{- with .Values.flytepropeller.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + {{- define "flytepropeller-manager.name" -}} flytepropeller-manager {{- end -}} @@ -119,6 +161,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flytepropeller-manager.podLabels" -}} +{{ include "flytepropeller-manager.labels" . }} +{{- with .Values.flytepropeller.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + {{- define "flyte-pod-webhook.name" -}} flyte-pod-webhook {{- end -}} @@ -139,6 +188,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flyteconsole.podLabels" -}} +{{ include "flyteconsole.labels" . }} +{{- with .Values.flyteconsole.podLabels }} +{{ toYaml . }} +{{- end }} +{{- end -}} + # Optional blocks for secret mount {{- define "databaseSecret.volume" -}} diff --git a/charts/flyte-core/templates/admin/deployment.yaml b/charts/flyte-core/templates/admin/deployment.yaml index 8b6dff34af..5a081224a8 100755 --- a/charts/flyte-core/templates/admin/deployment.yaml +++ b/charts/flyte-core/templates/admin/deployment.yaml @@ -16,7 +16,7 @@ spec: {{- with .Values.flyteadmin.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} - labels: {{ include "flyteadmin.labels" . | nindent 8 }} + labels: {{ include "flyteadmin.podLabels" . | nindent 8 }} spec: securityContext: fsGroup: 65534 diff --git a/charts/flyte-core/templates/clusterresourcesync/deployment.yaml b/charts/flyte-core/templates/clusterresourcesync/deployment.yaml index bf13c08080..b4d4847c38 100644 --- a/charts/flyte-core/templates/clusterresourcesync/deployment.yaml +++ b/charts/flyte-core/templates/clusterresourcesync/deployment.yaml @@ -16,7 +16,7 @@ spec: {{- with .Values.cluster_resource_manager.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} - labels: {{ include "flyteclusterresourcesync.labels" . | nindent 8 }} + labels: {{ include "flyteclusterresourcesync.podLabels" . | nindent 8 }} spec: containers: - command: @@ -25,6 +25,12 @@ spec: - {{ .Values.flyteadmin.configPath }} - clusterresource - run + {{- if .Values.cluster_resource_manager.podEnv }} + env: + {{- with .Values.cluster_resource_manager.podEnv }} + {{- toYaml . | nindent 10 }} + {{- end }} + {{- end }} image: "{{ .Values.flyteadmin.image.repository }}:{{ .Values.flyteadmin.image.tag }}" imagePullPolicy: "{{ .Values.flyteadmin.image.pullPolicy }}" name: sync-cluster-resources diff --git a/charts/flyte-core/templates/console/deployment.yaml b/charts/flyte-core/templates/console/deployment.yaml index 0c9a948f5f..09c9e741c8 100644 --- a/charts/flyte-core/templates/console/deployment.yaml +++ b/charts/flyte-core/templates/console/deployment.yaml @@ -16,7 +16,7 @@ spec: {{- with .Values.flyteconsole.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} - labels: {{ include "flyteconsole.labels" . | nindent 8 }} + labels: {{ include "flyteconsole.podLabels" . | nindent 8 }} spec: securityContext: runAsUser: 1000 @@ -33,13 +33,18 @@ spec: name: flyte-console-config ports: - containerPort: 8080 - {{- if .Values.flyteconsole.ga.enabled }} env: + {{- if .Values.flyteconsole.ga.enabled }} - name: ENABLE_GA value: "{{ .Values.flyteconsole.ga.enabled }}" - name: GA_TRACKING_ID value: "{{ .Values.flyteconsole.ga.tracking_id }}" {{- end }} + {{- if .Values.flyteconsole.podEnv -}} + {{- with .Values.flyteconsole.podEnv }} + {{- toYaml . | nindent 8 }} + {{- end }} + {{- end }} resources: {{ toYaml .Values.flyteconsole.resources | nindent 10 }} volumeMounts: - mountPath: /srv/flyte diff --git a/charts/flyte-core/templates/datacatalog/deployment.yaml b/charts/flyte-core/templates/datacatalog/deployment.yaml index e9f2c84ec8..72c18ac61b 100644 --- a/charts/flyte-core/templates/datacatalog/deployment.yaml +++ b/charts/flyte-core/templates/datacatalog/deployment.yaml @@ -16,7 +16,7 @@ spec: {{- with .Values.datacatalog.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} - labels: {{ include "datacatalog.labels" . | nindent 8 }} + labels: {{ include "datacatalog.podLabels" . | nindent 8 }} spec: securityContext: fsGroup: 1001 @@ -47,6 +47,12 @@ spec: {{- with .Values.datacatalog.extraArgs }} {{- tpl (toYaml .) $ | nindent 8 }} {{- end }} + {{- if .Values.datacatalog.podEnv }} + env: + {{- with .Values.datacatalog.podEnv }} + {{- toYaml . | nindent 8 }} + {{- end }} + {{- end }} image: "{{ .Values.datacatalog.image.repository }}:{{ .Values.datacatalog.image.tag }}" imagePullPolicy: "{{ .Values.datacatalog.image.pullPolicy }}" name: datacatalog diff --git a/charts/flyte-core/templates/flytescheduler/deployment.yaml b/charts/flyte-core/templates/flytescheduler/deployment.yaml index 443354efd7..fa5cfc4fd9 100755 --- a/charts/flyte-core/templates/flytescheduler/deployment.yaml +++ b/charts/flyte-core/templates/flytescheduler/deployment.yaml @@ -17,7 +17,7 @@ spec: {{- with .Values.flytescheduler.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} - labels: {{ include "flytescheduler.labels" . | nindent 8 }} + labels: {{ include "flytescheduler.podLabels" . | nindent 8 }} spec: securityContext: fsGroup: 65534 @@ -48,6 +48,12 @@ spec: - run - --config - {{ .Values.flytescheduler.configPath }} + {{- if .Values.flytescheduler.podEnv }} + env: + {{- with .Values.flytescheduler.podEnv -}} + {{- toYaml . | nindent 8 }} + {{- end }} + {{- end }} image: "{{ .Values.flytescheduler.image.repository }}:{{ .Values.flytescheduler.image.tag }}" imagePullPolicy: "{{ .Values.flytescheduler.image.pullPolicy }}" name: flytescheduler diff --git a/charts/flyte-core/templates/propeller/deployment.yaml b/charts/flyte-core/templates/propeller/deployment.yaml index 72f94fcc1b..0efb3b5dc9 100644 --- a/charts/flyte-core/templates/propeller/deployment.yaml +++ b/charts/flyte-core/templates/propeller/deployment.yaml @@ -26,9 +26,9 @@ spec: {{- toYaml . | nindent 8 }} {{- end }} {{- if .Values.flytepropeller.manager }} - labels: {{ include "flytepropeller-manager.labels" . | nindent 8 }} + labels: {{ include "flytepropeller-manager.podLabels" . | nindent 8 }} {{- else }} - labels: {{ include "flytepropeller.labels" . | nindent 8 }} + labels: {{ include "flytepropeller.podLabels" . | nindent 8 }} {{- end }} spec: securityContext: @@ -65,6 +65,11 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace + {{- if .Values.flytepropeller.podEnv -}} + {{- with .Values.flytepropeller.podEnv -}} + {{- toYaml . | nindent 8 }} + {{- end }} + {{- end }} image: "{{ .Values.flytepropeller.image.repository }}:{{ .Values.flytepropeller.image.tag }}" imagePullPolicy: "{{ .Values.flytepropeller.image.pullPolicy }}" {{- if .Values.flytepropeller.manager }} diff --git a/charts/flyte-core/templates/propeller/webhook.yaml b/charts/flyte-core/templates/propeller/webhook.yaml index 2934bfcf21..75866a8c2b 100644 --- a/charts/flyte-core/templates/propeller/webhook.yaml +++ b/charts/flyte-core/templates/propeller/webhook.yaml @@ -26,9 +26,12 @@ spec: app: {{ template "flyte-pod-webhook.name" . }} app.kubernetes.io/name: {{ template "flyte-pod-webhook.name" . }} app.kubernetes.io/version: {{ .Values.flytepropeller.image.tag }} + {{- with .Values.flytepropeller.podLabels }} + {{- toYaml . | nindent 8 }} + {{- end }} annotations: configChecksum: {{ include (print .Template.BasePath "/propeller/configmap.yaml") . | sha256sum | trunc 63 | quote }} - {{- with .Values.flyteadmin.podAnnotations }} + {{- with .Values.flytepropeller.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} spec: @@ -60,7 +63,7 @@ spec: fieldPath: metadata.namespace volumeMounts: - name: config-volume - mountPath: /etc/flyte/config + mountPath: /etc/flyte/config {{- end }} containers: - name: webhook @@ -81,6 +84,11 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace + {{- if .Values.flytepropeller.podEnv -}} + {{- with .Values.flytepropeller.podEnv -}} + {{- toYaml . | nindent 12 }} + {{- end }} + {{- end }} volumeMounts: - name: config-volume mountPath: /etc/flyte/config diff --git a/charts/flyte-core/values.yaml b/charts/flyte-core/values.yaml index d09d9f9824..36a9b22e5e 100755 --- a/charts/flyte-core/values.yaml +++ b/charts/flyte-core/values.yaml @@ -16,7 +16,7 @@ flyteadmin: image: # -- Docker image for Flyteadmin deployment repository: cr.flyte.org/flyteorg/flyteadmin # FLYTEADMIN_IMAGE - tag: v1.10.6 # FLYTEADMIN_TAG + tag: v1.10.7-b2 # FLYTEADMIN_TAG pullPolicy: IfNotPresent # -- Additional flyteadmin container environment variables # @@ -89,9 +89,11 @@ flyteadmin: - limitranges # -- Specifies the verbs (actions) that this ClusterRole can perform on the specified resources verbs: - - '*' + - "*" # -- Annotations for Flyteadmin pods podAnnotations: {} + # -- Labels for Flyteadmin pods + podLabels: {} # -- nodeSelector for Flyteadmin deployment nodeSelector: {} # -- tolerations for Flyteadmin deployment @@ -132,7 +134,7 @@ flytescheduler: # -- Docker image for Flytescheduler deployment repository: cr.flyte.org/flyteorg/flytescheduler # FLYTESCHEDULER_IMAGE # -- Docker image tag - tag: v1.10.6 # FLYTESCHEDULER_TAG + tag: v1.10.7-b2 # FLYTESCHEDULER_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytescheduler deployment @@ -158,6 +160,10 @@ flytescheduler: imagePullSecrets: [] # -- Annotations for Flytescheduler pods podAnnotations: {} + # -- Additional Flytescheduler container environment variables + podEnv: {} + # -- Labels for Flytescheduler pods + podLabels: {} # -- nodeSelector for Flytescheduler deployment nodeSelector: {} # -- tolerations for Flytescheduler deployment @@ -186,7 +192,7 @@ datacatalog: # -- Docker image for Datacatalog deployment repository: cr.flyte.org/flyteorg/datacatalog # DATACATALOG_IMAGE # -- Docker image tag - tag: v1.10.6 # DATACATALOG_TAG + tag: v1.10.7-b2 # DATACATALOG_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Datacatalog deployment @@ -216,6 +222,10 @@ datacatalog: imagePullSecrets: [] # -- Annotations for Datacatalog pods podAnnotations: {} + # -- Additional Datacatalog container environment variables + podEnv: {} + # -- Labels for Datacatalog pods + podLabels: {} # -- nodeSelector for Datacatalog deployment nodeSelector: {} # -- tolerations for Datacatalog deployment @@ -254,7 +264,7 @@ flytepropeller: image: # -- Docker image for Flytepropeller deployment repository: cr.flyte.org/flyteorg/flytepropeller # FLYTEPROPELLER_IMAGE - tag: v1.10.6 # FLYTEPROPELLER_TAG + tag: v1.10.7-b2 # FLYTEPROPELLER_TAG pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytepropeller deployment resources: @@ -282,6 +292,10 @@ flytepropeller: imagePullSecrets: [] # -- Annotations for Flytepropeller pods podAnnotations: {} + # -- Additional Flytepropeller container environment variables + podEnv: {} + # -- Labels for Flytepropeller pods + podLabels: {} # -- nodeSelector for Flytepropeller deployment nodeSelector: {} # -- tolerations for Flytepropeller deployment @@ -344,6 +358,10 @@ flyteconsole: type: ClusterIP # -- Annotations for Flyteconsole pods podAnnotations: {} + # -- Additional Flyteconsole container environment variables + podEnv: {} + # -- Labels for Flyteconsole pods + podLabels: {} # -- nodeSelector for Flyteconsole deployment nodeSelector: {} # -- tolerations for Flyteconsole deployment @@ -357,7 +375,6 @@ flyteconsole: # -- Sets priorityClassName for flyte console pod(s). priorityClassName: "" - # It will enable the redoc route in ingress deployRedoc: false @@ -489,37 +506,37 @@ db: configmap: clusters: labelClusterMap: {} -# labelClusterMap: -# team1: -# - id: testcluster -# weight: 1 -# team2: -# - id: testcluster2 -# weight: 0.5 -# - id: testcluster3 -# weight: 0.5 + # labelClusterMap: + # team1: + # - id: testcluster + # weight: 1 + # team2: + # - id: testcluster2 + # weight: 0.5 + # - id: testcluster3 + # weight: 0.5 clusterConfigs: [] -# clusterConfigs: -# - name: "testcluster" -# endpoint: "testcluster_endpoint" -# auth: -# type: "file_path" -# tokenPath: "/path/to/testcluster/token" -# certPath: "/path/to/testcluster/cert" -# - name: "testcluster2" -# endpoint: "testcluster2_endpoint" -# enabled: true -# auth: -# type: "file_path" -# tokenPath: "/path/to/testcluster2/token" -# certPath: "/path/to/testcluster2/cert" -# - name: "testcluster3" -# endpoint: "testcluster3_endpoint" -# enabled: true -# auth: -# type: "file_path" -# tokenPath: "/path/to/testcluster3/token" -# certPath: "/path/to/testcluster3/cert" + # clusterConfigs: + # - name: "testcluster" + # endpoint: "testcluster_endpoint" + # auth: + # type: "file_path" + # tokenPath: "/path/to/testcluster/token" + # certPath: "/path/to/testcluster/cert" + # - name: "testcluster2" + # endpoint: "testcluster2_endpoint" + # enabled: true + # auth: + # type: "file_path" + # tokenPath: "/path/to/testcluster2/token" + # certPath: "/path/to/testcluster2/cert" + # - name: "testcluster3" + # endpoint: "testcluster3_endpoint" + # enabled: true + # auth: + # type: "file_path" + # tokenPath: "/path/to/testcluster3/token" + # certPath: "/path/to/testcluster3/cert" # -- Configuration for Flyte console UI console: @@ -571,7 +588,7 @@ configmap: eventVersion: 2 testing: host: http://flyteadmin - + # -- Authentication configuration auth: authorizedUris: @@ -621,11 +638,11 @@ configmap: defaults: cpu: 100m memory: 500Mi - storage: 500Mi + ephemeralStorage: 500Mi limits: cpu: 2 memory: 1Gi - storage: 20Mi + ephemeralStorage: 20Mi gpu: 1 # -- Admin Client configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/subworkflow/launchplan#AdminConfig) @@ -654,7 +671,7 @@ configmap: # -- Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) co-pilot: name: flyte-copilot- - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 # FLYTECOPILOT_IMAGE + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 # FLYTECOPILOT_IMAGE start-timeout: 30s # -- Core propeller configuration @@ -824,6 +841,10 @@ cluster_resource_manager: service_account_name: flyteadmin # -- Annotations for ClusterResource pods podAnnotations: {} + # -- Additional ClusterResource container environment variables + podEnv: {} + # -- Labels for ClusterResource pods + podLabels: {} # -- Configmap for ClusterResource parameters config: # -- ClusterResource parameters @@ -922,4 +943,4 @@ databricks: databricks: entrypointFile: dbfs:///FileStore/tables/entrypoint.py # Databricks account - databricksInstance: dbc-a53b7a3c-614c \ No newline at end of file + databricksInstance: dbc-a53b7a3c-614c diff --git a/charts/flyte/README.md b/charts/flyte/README.md index d4a46ff286..cf9b247998 100644 --- a/charts/flyte/README.md +++ b/charts/flyte/README.md @@ -71,7 +71,7 @@ helm upgrade -f values-sandbox.yaml flyte . | contour.tolerations | list | `[]` | tolerations for Contour deployment | | daskoperator | object | `{"enabled":false}` | Optional: Dask Plugin using the Dask Operator | | daskoperator.enabled | bool | `false` | - enable or disable the dask operator deployment installation | -| flyte | object | `{"cluster_resource_manager":{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"service_account_name":"flyteadmin","templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]},"common":{"databaseSecret":{"name":"","secretManifest":{}},"flyteNamespaceTemplate":{"enabled":false},"ingress":{"albSSLRedirect":false,"annotations":{"nginx.ingress.kubernetes.io/app-root":"/console"},"enabled":true,"host":"","separateGrpcIngress":false,"separateGrpcIngressAnnotations":{"nginx.ingress.kubernetes.io/backend-protocol":"GRPC"},"tls":{"enabled":false},"webpackHMR":true}},"configmap":{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpcPort":8089,"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.6","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"bigquery_query_job_task":"agent-service","container":"container","container_array":"k8s-array","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service"]}}},"k8s":{"plugins":{"agent-service":{"defaultAgent":{"endpoint":"dns:///flyteagent.flyte.svc.cluster.local:8000","insecure":true},"supportedTaskTypes":["bigquery_query_job_task"]},"k8s":{"default-cpus":"100m","default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}},"datacatalog":{"affinity":{},"configPath":"/etc/datacatalog/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/datacatalog","tag":"v1.10.6"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"NodePort"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"db":{"admin":{"database":{"dbname":"flyteadmin","host":"postgres","port":5432,"username":"postgres"}},"datacatalog":{"database":{"dbname":"datacatalog","host":"postgres","port":5432,"username":"postgres"}}},"deployRedoc":true,"flyteadmin":{"additionalVolumeMounts":[],"additionalVolumes":[],"affinity":{},"configPath":"/etc/flyte/config/*.yaml","env":[],"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteadmin","tag":"v1.10.6"},"initialProjects":["flytesnacks","flytetester","flyteexamples"],"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"loadBalancerSourceRanges":[],"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flyteconsole":{"affinity":{},"ga":{"enabled":true,"tracking_id":"G-0QW4DJWJ20"},"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteconsole","tag":"v1.10.2"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","memory":"275Mi"},"requests":{"cpu":"10m","memory":"250Mi"}},"service":{"annotations":{},"type":"ClusterIP"},"tolerations":[]},"flytepropeller":{"affinity":{},"cacheSizeMbs":0,"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytepropeller","tag":"v1.10.6"},"manager":false,"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"200m","ephemeral-storage":"100Mi","memory":"200Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flytescheduler":{"affinity":{},"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytescheduler","tag":"v1.10.6"},"nodeSelector":{},"podAnnotations":{},"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"storage":{"bucketName":"my-s3-bucket","custom":{},"gcs":null,"s3":{"region":"us-east-1"},"type":"sandbox"},"webhook":{"enabled":true,"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]}},"workflow_notifications":{"config":{},"enabled":false},"workflow_scheduler":{"enabled":true,"type":"native"}}` | ------------------------------------------------------------------- Core System settings This section consists of Core components of Flyte and their deployment settings. This includes FlyteAdmin service, Datacatalog, FlytePropeller and Flyteconsole | +| flyte | object | `{"cluster_resource_manager":{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"service_account_name":"flyteadmin","templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]},"common":{"databaseSecret":{"name":"","secretManifest":{}},"flyteNamespaceTemplate":{"enabled":false},"ingress":{"albSSLRedirect":false,"annotations":{"nginx.ingress.kubernetes.io/app-root":"/console"},"enabled":true,"host":"","separateGrpcIngress":false,"separateGrpcIngressAnnotations":{"nginx.ingress.kubernetes.io/backend-protocol":"GRPC"},"tls":{"enabled":false},"webpackHMR":true}},"configmap":{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpcPort":8089,"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"bigquery_query_job_task":"agent-service","container":"container","container_array":"k8s-array","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service"]}}},"k8s":{"plugins":{"agent-service":{"defaultAgent":{"endpoint":"dns:///flyteagent.flyte.svc.cluster.local:8000","insecure":true},"supportedTaskTypes":["bigquery_query_job_task"]},"k8s":{"default-cpus":"100m","default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}},"datacatalog":{"affinity":{},"configPath":"/etc/datacatalog/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/datacatalog","tag":"v1.10.7-b2"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"NodePort"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"db":{"admin":{"database":{"dbname":"flyteadmin","host":"postgres","port":5432,"username":"postgres"}},"datacatalog":{"database":{"dbname":"datacatalog","host":"postgres","port":5432,"username":"postgres"}}},"deployRedoc":true,"flyteadmin":{"additionalVolumeMounts":[],"additionalVolumes":[],"affinity":{},"configPath":"/etc/flyte/config/*.yaml","env":[],"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteadmin","tag":"v1.10.7-b2"},"initialProjects":["flytesnacks","flytetester","flyteexamples"],"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"loadBalancerSourceRanges":[],"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flyteconsole":{"affinity":{},"ga":{"enabled":true,"tracking_id":"G-0QW4DJWJ20"},"image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flyteconsole","tag":"v1.10.2"},"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"500m","memory":"275Mi"},"requests":{"cpu":"10m","memory":"250Mi"}},"service":{"annotations":{},"type":"ClusterIP"},"tolerations":[]},"flytepropeller":{"affinity":{},"cacheSizeMbs":0,"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytepropeller","tag":"v1.10.7-b2"},"manager":false,"nodeSelector":{},"podAnnotations":{},"replicaCount":1,"resources":{"limits":{"cpu":"200m","ephemeral-storage":"100Mi","memory":"200Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"flytescheduler":{"affinity":{},"configPath":"/etc/flyte/config/*.yaml","image":{"pullPolicy":"IfNotPresent","repository":"cr.flyte.org/flyteorg/flytescheduler","tag":"v1.10.7-b2"},"nodeSelector":{},"podAnnotations":{},"resources":{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}},"secrets":{},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]},"tolerations":[]},"storage":{"bucketName":"my-s3-bucket","custom":{},"gcs":null,"s3":{"region":"us-east-1"},"type":"sandbox"},"webhook":{"enabled":true,"service":{"annotations":{"projectcontour.io/upstream-protocol.h2c":"grpc"},"type":"ClusterIP"},"serviceAccount":{"annotations":{},"create":true,"imagePullSecrets":[]}},"workflow_notifications":{"config":{},"enabled":false},"workflow_scheduler":{"enabled":true,"type":"native"}}` | ------------------------------------------------------------------- Core System settings This section consists of Core components of Flyte and their deployment settings. This includes FlyteAdmin service, Datacatalog, FlytePropeller and Flyteconsole | | flyte.cluster_resource_manager | object | `{"config":{"cluster_resources":{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}},"enabled":true,"service_account_name":"flyteadmin","templates":[{"key":"aa_namespace","value":"apiVersion: v1\nkind: Namespace\nmetadata:\n name: {{ namespace }}\nspec:\n finalizers:\n - kubernetes\n"},{"key":"ab_project_resource_quota","value":"apiVersion: v1\nkind: ResourceQuota\nmetadata:\n name: project-quota\n namespace: {{ namespace }}\nspec:\n hard:\n limits.cpu: {{ projectQuotaCpu }}\n limits.memory: {{ projectQuotaMemory }}\n"}]}` | Configuration for the Cluster resource manager component. This is an optional component, that enables automatic cluster configuration. This is useful to set default quotas, manage namespaces etc that map to a project/domain | | flyte.cluster_resource_manager.config.cluster_resources | object | `{"customData":[{"production":[{"projectQuotaCpu":{"value":"5"}},{"projectQuotaMemory":{"value":"4000Mi"}}]},{"staging":[{"projectQuotaCpu":{"value":"2"}},{"projectQuotaMemory":{"value":"3000Mi"}}]},{"development":[{"projectQuotaCpu":{"value":"4"}},{"projectQuotaMemory":{"value":"3000Mi"}}]}],"refresh":"5m","refreshInterval":"5m","standaloneDeployment":false,"templatePath":"/etc/flyte/clusterresource/templates"}` | ClusterResource parameters Refer to the [structure](https://pkg.go.dev/github.com/lyft/flyteadmin@v0.3.37/pkg/runtime/interfaces#ClusterResourceConfig) to customize. | | flyte.cluster_resource_manager.config.cluster_resources.standaloneDeployment | bool | `false` | Starts the cluster resource manager in standalone mode with requisite auth credentials to call flyteadmin service endpoints | @@ -91,15 +91,15 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.common.ingress.separateGrpcIngressAnnotations | object | `{"nginx.ingress.kubernetes.io/backend-protocol":"GRPC"}` | - Extra Ingress annotations applied only to the GRPC ingress. Only makes sense if `separateGrpcIngress` is enabled. | | flyte.common.ingress.tls | object | `{"enabled":false}` | - TLS Settings | | flyte.common.ingress.webpackHMR | bool | `true` | - Enable or disable HMR route to flyteconsole. This is useful only for frontend development. | -| flyte.configmap | object | `{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpcPort":8089,"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.6","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"bigquery_query_job_task":"agent-service","container":"container","container_array":"k8s-array","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service"]}}},"k8s":{"plugins":{"agent-service":{"defaultAgent":{"endpoint":"dns:///flyteagent.flyte.svc.cluster.local:8000","insecure":true},"supportedTaskTypes":["bigquery_query_job_task"]},"k8s":{"default-cpus":"100m","default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}}` | ----------------------------------------------------------------- CONFIGMAPS SETTINGS | +| flyte.configmap | object | `{"adminServer":{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpcPort":8089,"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}},"catalog":{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}},"console":{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"},"copilot":{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2","name":"flyte-copilot-","start-timeout":"30s"}}}},"core":{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}},"datacatalogServer":{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}},"domain":{"domains":[{"id":"development","name":"development"},{"id":"staging","name":"staging"},{"id":"production","name":"production"}]},"enabled_plugins":{"tasks":{"task-plugins":{"default-for-task-types":{"bigquery_query_job_task":"agent-service","container":"container","container_array":"k8s-array","sidecar":"sidecar"},"enabled-plugins":["container","sidecar","k8s-array","agent-service"]}}},"k8s":{"plugins":{"agent-service":{"defaultAgent":{"endpoint":"dns:///flyteagent.flyte.svc.cluster.local:8000","insecure":true},"supportedTaskTypes":["bigquery_query_job_task"]},"k8s":{"default-cpus":"100m","default-env-vars":[{"FLYTE_AWS_ENDPOINT":"http://minio.flyte:9000"},{"FLYTE_AWS_ACCESS_KEY_ID":"minio"},{"FLYTE_AWS_SECRET_ACCESS_KEY":"miniostorage"}],"default-memory":"200Mi"}}},"logger":{"logger":{"level":5,"show-source":true}},"remoteData":{"remoteData":{"region":"us-east-1","scheme":"local","signedUrls":{"durationMinutes":3}}},"resource_manager":{"propeller":{"resourcemanager":{"redis":null,"type":"noop"}}},"task_logs":{"plugins":{"logs":{"cloudwatch-enabled":false,"kubernetes-enabled":true,"kubernetes-template-uri":"http://localhost:30082/#/log/{{ \"{{\" }} .namespace {{ \"}}\" }}/{{ \"{{\" }} .podName {{ \"}}\" }}/pod?namespace={{ \"{{\" }} .namespace {{ \"}}\" }}"}}},"task_resource_defaults":{"task_resources":{"defaults":{"cpu":"100m","memory":"200Mi","storage":"5Mi"},"limits":{"cpu":2,"gpu":1,"memory":"1Gi","storage":"20Mi"}}}}` | ----------------------------------------------------------------- CONFIGMAPS SETTINGS | | flyte.configmap.adminServer | object | `{"auth":{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}},"flyteadmin":{"eventVersion":2,"metadataStoragePrefix":["metadata","admin"],"metricsScope":"flyte:","profilerPort":10254,"roleNameKey":"iam.amazonaws.com/role","testing":{"host":"http://flyteadmin"}},"server":{"grpcPort":8089,"httpPort":8088,"security":{"allowCors":true,"allowedHeaders":["Content-Type","flyte-authorization"],"allowedOrigins":["*"],"secure":false,"useAuth":false}}}` | FlyteAdmin server configuration | | flyte.configmap.adminServer.auth | object | `{"appAuth":{"thirdPartyConfig":{"flyteClient":{"clientId":"flytectl","redirectUri":"http://localhost:53593/callback","scopes":["offline","all"]}}},"authorizedUris":["https://localhost:30081","http://flyteadmin:80","http://flyteadmin.flyte.svc.cluster.local:80"],"userAuth":{"openId":{"baseUrl":"https://accounts.google.com","clientId":"657465813211-6eog7ek7li5k7i7fvgv2921075063hpe.apps.googleusercontent.com","scopes":["profile","openid"]}}}` | Authentication configuration | | flyte.configmap.adminServer.server.security.secure | bool | `false` | Controls whether to serve requests over SSL/TLS. | | flyte.configmap.adminServer.server.security.useAuth | bool | `false` | Controls whether to enforce authentication. Follow the guide in https://docs.flyte.org/ on how to setup authentication. | | flyte.configmap.catalog | object | `{"catalog-cache":{"endpoint":"datacatalog:89","insecure":true,"type":"datacatalog"}}` | Catalog Client configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/catalog#Config) Additional advanced Catalog configuration [here](https://pkg.go.dev/github.com/lyft/flyteplugins/go/tasks/pluginmachinery/catalog#Config) | | flyte.configmap.console | object | `{"BASE_URL":"/console","CONFIG_DIR":"/etc/flyte/config"}` | Configuration for Flyte console UI | -| flyte.configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.6","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | -| flyte.configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.6","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | +| flyte.configmap.copilot | object | `{"plugins":{"k8s":{"co-pilot":{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2","name":"flyte-copilot-","start-timeout":"30s"}}}}` | Copilot configuration | +| flyte.configmap.copilot.plugins.k8s.co-pilot | object | `{"image":"cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2","name":"flyte-copilot-","start-timeout":"30s"}` | Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) | | flyte.configmap.core | object | `{"propeller":{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"},"webhook":{"certDir":"/etc/webhook/certs","serviceName":"flyte-pod-webhook"}}` | Core propeller configuration | | flyte.configmap.core.propeller | object | `{"downstream-eval-duration":"30s","enable-admin-launcher":true,"leader-election":{"enabled":true,"lease-duration":"15s","lock-config-map":{"name":"propeller-leader","namespace":"flyte"},"renew-deadline":"10s","retry-period":"2s"},"limit-namespace":"all","max-workflow-retries":30,"metadata-prefix":"metadata/propeller","metrics-prefix":"flyte","prof-port":10254,"queue":{"batch-size":-1,"batching-interval":"2s","queue":{"base-delay":"5s","capacity":1000,"max-delay":"120s","rate":100,"type":"maxof"},"sub-queue":{"capacity":100,"rate":10,"type":"bucket"},"type":"batch"},"rawoutput-prefix":"s3://my-s3-bucket/","workers":4,"workflow-reeval-duration":"30s"}` | follows the structure specified [here](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/config). | | flyte.configmap.datacatalogServer | object | `{"application":{"grpcPort":8089,"grpcServerReflection":true,"httpPort":8080},"datacatalog":{"metrics-scope":"datacatalog","profiler-port":10254,"storage-prefix":"metadata/datacatalog"}}` | Datacatalog server config | @@ -120,7 +120,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.datacatalog.configPath | string | `"/etc/datacatalog/config/*.yaml"` | Default regex string for searching configuration files | | flyte.datacatalog.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.datacatalog.image.repository | string | `"cr.flyte.org/flyteorg/datacatalog"` | Docker image for Datacatalog deployment | -| flyte.datacatalog.image.tag | string | `"v1.10.6"` | Docker image tag | +| flyte.datacatalog.image.tag | string | `"v1.10.7-b2"` | Docker image tag | | flyte.datacatalog.nodeSelector | object | `{}` | nodeSelector for Datacatalog deployment | | flyte.datacatalog.podAnnotations | object | `{}` | Annotations for Datacatalog pods | | flyte.datacatalog.replicaCount | int | `1` | Replicas count for Datacatalog deployment | @@ -136,7 +136,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.flyteadmin.env | list | `[]` | Additional flyteadmin container environment variables e.g. SendGrid's API key - name: SENDGRID_API_KEY value: "" e.g. secret environment variable (you can combine it with .additionalVolumes): - name: SENDGRID_API_KEY valueFrom: secretKeyRef: name: sendgrid-secret key: api_key | | flyte.flyteadmin.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.flyteadmin.image.repository | string | `"cr.flyte.org/flyteorg/flyteadmin"` | Docker image for Flyteadmin deployment | -| flyte.flyteadmin.image.tag | string | `"v1.10.6"` | Docker image tag | +| flyte.flyteadmin.image.tag | string | `"v1.10.7-b2"` | Docker image tag | | flyte.flyteadmin.initialProjects | list | `["flytesnacks","flytetester","flyteexamples"]` | Initial projects to create | | flyte.flyteadmin.nodeSelector | object | `{}` | nodeSelector for Flyteadmin deployment | | flyte.flyteadmin.podAnnotations | object | `{}` | Annotations for Flyteadmin pods | @@ -162,7 +162,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.flytepropeller.configPath | string | `"/etc/flyte/config/*.yaml"` | Default regex string for searching configuration files | | flyte.flytepropeller.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.flytepropeller.image.repository | string | `"cr.flyte.org/flyteorg/flytepropeller"` | Docker image for Flytepropeller deployment | -| flyte.flytepropeller.image.tag | string | `"v1.10.6"` | Docker image tag | +| flyte.flytepropeller.image.tag | string | `"v1.10.7-b2"` | Docker image tag | | flyte.flytepropeller.nodeSelector | object | `{}` | nodeSelector for Flytepropeller deployment | | flyte.flytepropeller.podAnnotations | object | `{}` | Annotations for Flytepropeller pods | | flyte.flytepropeller.replicaCount | int | `1` | Replicas count for Flytepropeller deployment | @@ -176,7 +176,7 @@ helm upgrade -f values-sandbox.yaml flyte . | flyte.flytescheduler.configPath | string | `"/etc/flyte/config/*.yaml"` | Default regex string for searching configuration files | | flyte.flytescheduler.image.pullPolicy | string | `"IfNotPresent"` | Docker image pull policy | | flyte.flytescheduler.image.repository | string | `"cr.flyte.org/flyteorg/flytescheduler"` | Docker image for Flytescheduler deployment | -| flyte.flytescheduler.image.tag | string | `"v1.10.6"` | Docker image tag | +| flyte.flytescheduler.image.tag | string | `"v1.10.7-b2"` | Docker image tag | | flyte.flytescheduler.nodeSelector | object | `{}` | nodeSelector for Flytescheduler deployment | | flyte.flytescheduler.podAnnotations | object | `{}` | Annotations for Flytescheduler pods | | flyte.flytescheduler.resources | object | `{"limits":{"cpu":"250m","ephemeral-storage":"100Mi","memory":"500Mi"},"requests":{"cpu":"10m","ephemeral-storage":"50Mi","memory":"50Mi"}}` | Default resources requests and limits for Flytescheduler deployment | diff --git a/charts/flyte/values.yaml b/charts/flyte/values.yaml index 36d2a440db..fc53ada74f 100755 --- a/charts/flyte/values.yaml +++ b/charts/flyte/values.yaml @@ -16,7 +16,7 @@ flyte: # -- Docker image for Flyteadmin deployment repository: cr.flyte.org/flyteorg/flyteadmin # FLYTEADMIN_IMAGE # -- Docker image tag - tag: v1.10.6 # FLYTEADMIN_TAG + tag: v1.10.7-b2 # FLYTEADMIN_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Additional flyteadmin container environment variables @@ -84,7 +84,7 @@ flyte: # -- Docker image for Flytescheduler deployment repository: cr.flyte.org/flyteorg/flytescheduler # FLYTESCHEDULER_IMAGE # -- Docker image tag - tag: v1.10.6 # FLYTESCHEDULER_TAG + tag: v1.10.7-b2 # FLYTESCHEDULER_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytescheduler deployment @@ -129,7 +129,7 @@ flyte: # -- Docker image for Datacatalog deployment repository: cr.flyte.org/flyteorg/datacatalog # DATACATALOG_IMAGE # -- Docker image tag - tag: v1.10.6 # DATACATALOG_TAG + tag: v1.10.7-b2 # DATACATALOG_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Datacatalog deployment @@ -178,7 +178,7 @@ flyte: # -- Docker image for Flytepropeller deployment repository: cr.flyte.org/flyteorg/flytepropeller # FLYTEPROPELLER_IMAGE # -- Docker image tag - tag: v1.10.6 # FLYTEPROPELLER_TAG + tag: v1.10.7-b2 # FLYTEPROPELLER_TAG # -- Docker image pull policy pullPolicy: IfNotPresent # -- Default resources requests and limits for Flytepropeller deployment @@ -471,7 +471,7 @@ flyte: # -- Structure documented [here](https://pkg.go.dev/github.com/lyft/flyteplugins@v0.5.28/go/tasks/pluginmachinery/flytek8s/config#FlyteCoPilotConfig) co-pilot: name: flyte-copilot- - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 # FLYTECOPILOT_IMAGE + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 # FLYTECOPILOT_IMAGE start-timeout: 30s # -- Core propeller configuration diff --git a/charts/flyteagent/README.md b/charts/flyteagent/README.md index 80107d8e2b..9db05060d0 100644 --- a/charts/flyteagent/README.md +++ b/charts/flyteagent/README.md @@ -24,6 +24,8 @@ A Helm chart for Flyte agent | nameOverride | string | `""` | | | nodeSelector | object | `{}` | nodeSelector for flyteagent deployment | | podAnnotations | object | `{}` | Annotations for flyteagent pods | +| podEnv | object | `{}` | Additional flyteagent pod container environment variables | +| podLabels | object | `{}` | Labels for flyteagent pods | | ports.containerPort | int | `8000` | | | ports.name | string | `"agent-grpc"` | | | priorityClassName | string | `""` | Sets priorityClassName for datacatalog pod(s). | diff --git a/charts/flyteagent/templates/_helpers.tpl b/charts/flyteagent/templates/_helpers.tpl index fffa18304a..40411a61b9 100755 --- a/charts/flyteagent/templates/_helpers.tpl +++ b/charts/flyteagent/templates/_helpers.tpl @@ -28,6 +28,13 @@ helm.sh/chart: {{ include "flyte.chart" . }} app.kubernetes.io/managed-by: {{ .Release.Service }} {{- end -}} +{{- define "flyteagent.podLabels" -}} +{{ include "flyteagent.labels" . }} +{{- with .Values.podLabels }} +{{- toYaml . }} +{{- end }} +{{- end -}} + # Optional blocks for secret mount {{- define "agentSecret.volume" -}} @@ -43,4 +50,4 @@ app.kubernetes.io/managed-by: {{ .Release.Service }} {{- define "flyteagent.servicePort" -}} {{ include .Values.ports.containerPort}} -{{- end }} \ No newline at end of file +{{- end }} diff --git a/charts/flyteagent/templates/agent/deployment.yaml b/charts/flyteagent/templates/agent/deployment.yaml index 90d03c6f02..5dd293bc14 100644 --- a/charts/flyteagent/templates/agent/deployment.yaml +++ b/charts/flyteagent/templates/agent/deployment.yaml @@ -14,7 +14,7 @@ spec: {{- with .Values.podAnnotations }} {{- toYaml . | nindent 8 }} {{- end }} - labels: {{ include "flyteagent.labels" . | nindent 8 }} + labels: {{ include "flyteagent.podLabels" . | nindent 8 }} spec: {{- if .Values.priorityClassName }} priorityClassName: {{ .Values.priorityClassName }} @@ -24,6 +24,12 @@ spec: - pyflyte - serve - agent + {{- if .Values.podEnv }} + env: + {{- with .Values.podEnv }} + {{- toYaml . | nindent 8 }} + {{- end }} + {{- end }} image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" imagePullPolicy: "{{ .Values.image.pullPolicy }}" name: flyteagent diff --git a/charts/flyteagent/values.yaml b/charts/flyteagent/values.yaml index d1d7ac5cbf..b682600d46 100755 --- a/charts/flyteagent/values.yaml +++ b/charts/flyteagent/values.yaml @@ -56,6 +56,10 @@ serviceAccount: imagePullSecrets: [] # -- Annotations for flyteagent pods podAnnotations: {} +# -- Additional flyteagent pod container environment variables +podEnv: {} +# -- Labels for flyteagent pods +podLabels: {} # -- nodeSelector for flyteagent deployment nodeSelector: {} # -- tolerations for flyteagent deployment diff --git a/cmd/single/start.go b/cmd/single/start.go index 3ad8038cd6..d415f82111 100644 --- a/cmd/single/start.go +++ b/cmd/single/start.go @@ -4,9 +4,19 @@ import ( "context" "net/http" "os" + metricsserver "sigs.k8s.io/controller-runtime/pkg/metrics/server" ctrlWebhook "sigs.k8s.io/controller-runtime/pkg/webhook" + _ "github.com/golang/glog" + "github.com/prometheus/client_golang/prometheus/promhttp" + "github.com/spf13/cobra" + "golang.org/x/sync/errgroup" + _ "gorm.io/driver/postgres" // Required to import database driver. + "sigs.k8s.io/controller-runtime/pkg/cache" + "sigs.k8s.io/controller-runtime/pkg/manager" + "sigs.k8s.io/controller-runtime/pkg/metrics" + datacatalogConfig "github.com/flyteorg/flyte/datacatalog/pkg/config" datacatalogRepo "github.com/flyteorg/flyte/datacatalog/pkg/repositories" datacatalog "github.com/flyteorg/flyte/datacatalog/pkg/rpc/datacatalogservice" @@ -18,6 +28,7 @@ import ( adminScheduler "github.com/flyteorg/flyte/flyteadmin/scheduler" propellerEntrypoint "github.com/flyteorg/flyte/flytepropeller/pkg/controller" propellerConfig "github.com/flyteorg/flyte/flytepropeller/pkg/controller/config" + "github.com/flyteorg/flyte/flytepropeller/pkg/controller/executors" "github.com/flyteorg/flyte/flytepropeller/pkg/signals" webhookEntrypoint "github.com/flyteorg/flyte/flytepropeller/pkg/webhook" webhookConfig "github.com/flyteorg/flyte/flytepropeller/pkg/webhook/config" @@ -28,16 +39,6 @@ import ( "github.com/flyteorg/flyte/flytestdlib/promutils" "github.com/flyteorg/flyte/flytestdlib/promutils/labeled" "github.com/flyteorg/flyte/flytestdlib/storage" - _ "github.com/golang/glog" - "github.com/prometheus/client_golang/prometheus/promhttp" - "github.com/spf13/cobra" - "golang.org/x/sync/errgroup" - _ "gorm.io/driver/postgres" // Required to import database driver. - "k8s.io/client-go/rest" - "sigs.k8s.io/controller-runtime/pkg/cache" - "sigs.k8s.io/controller-runtime/pkg/client" - "sigs.k8s.io/controller-runtime/pkg/manager" - "sigs.k8s.io/controller-runtime/pkg/metrics" ) const defaultNamespace = "all" @@ -122,22 +123,8 @@ func startPropeller(ctx context.Context, cfg Propeller) error { SyncPeriod: &propellerCfg.DownstreamEval.Duration, DefaultNamespaces: namespaceConfigs, }, - NewCache: func(config *rest.Config, options cache.Options) (cache.Cache, error) { - k8sCache, err := cache.New(config, options) - if err != nil { - return k8sCache, err - } - - return otelutils.WrapK8sCache(k8sCache), nil - }, - NewClient: func(config *rest.Config, options client.Options) (client.Client, error) { - k8sClient, err := client.New(config, options) - if err != nil { - return k8sClient, err - } - - return otelutils.WrapK8sClient(k8sClient), nil - }, + NewCache: executors.NewCache, + NewClient: executors.BuildNewClientFunc(propellerScope), Metrics: metricsserver.Options{ // Disable metrics serving BindAddress: "0", diff --git a/datacatalog/Dockerfile b/datacatalog/Dockerfile deleted file mode 100644 index 6ba53406ed..0000000000 --- a/datacatalog/Dockerfile +++ /dev/null @@ -1,53 +0,0 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst - -FROM --platform=${BUILDPLATFORM} golang:1.19-alpine3.16 as builder - -ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux - -RUN apk add git openssh-client make curl - -# Create the artifacts directory -RUN mkdir /artifacts - -# Pull GRPC health probe binary for liveness and readiness checks -RUN GRPC_HEALTH_PROBE_VERSION=v0.4.11 && \ - wget -qO/artifacts/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ - chmod +x /artifacts/grpc_health_probe && \ - echo 'ded15e598d887ccc47bf2321371950bbf930f5e4856b9f75712ce4b2b5120480 /artifacts/grpc_health_probe' > .grpc_checksum && \ - sha256sum -c .grpc_checksum - -# COPY only the go mod files for efficient caching -COPY go.mod go.sum /go/src/github.com/flyteorg/datacatalog/ -WORKDIR /go/src/github.com/flyteorg/datacatalog - -# Pull dependencies -RUN go mod download - -# COPY the rest of the source code -COPY . /go/src/github.com/flyteorg/datacatalog/ - -# This 'linux_compile' target should compile binaries to the /artifacts directory -# The main entrypoint should be compiled to /artifacts/datacatalog -RUN make linux_compile - -# update the PATH to include the /artifacts directory -ENV PATH="/artifacts:${PATH}" - -# This will eventually move to centurylink/ca-certs:latest for minimum possible image size -FROM alpine:3.16 -LABEL org.opencontainers.image.source=https://github.com/flyteorg/datacatalog - -COPY --from=builder /artifacts /bin - -# Ensure the latest CA certs are present to authenticate SSL connections. -RUN apk --update add ca-certificates - -RUN addgroup -S flyte && adduser -S flyte -G flyte -USER flyte - -CMD ["datacatalog"] diff --git a/datacatalog/pull_request_template.md b/datacatalog/pull_request_template.md deleted file mode 100644 index 9cdab99b46..0000000000 --- a/datacatalog/pull_request_template.md +++ /dev/null @@ -1,35 +0,0 @@ -## _Read then delete this section_ - -_- Make sure to use a concise title for the pull-request._ - -_- Use #patch, #minor or #major in the pull-request title to bump the corresponding version. Otherwise, the patch version -will be bumped. [More details](https://github.com/marketplace/actions/github-tag-bump)_ - -# TL;DR -_Please replace this text with a description of what this PR accomplishes._ - -## Type - - [ ] Bug Fix - - [ ] Feature - - [ ] Plugin - -## Are all requirements met? - - - [ ] Code completed - - [ ] Smoke tested - - [ ] Unit tests added - - [ ] Code documentation added - - [ ] Any pending items have an associated Issue - -## Complete description - _How did you fix the bug, make the feature etc. Link to any design docs etc_ - -## Tracking Issue -_Remove the '*fixes*' keyword if there will be multiple PRs to fix the linked issue_ - -fixes https://github.com/flyteorg/flyte/issues/ - -## Follow-up issue -_NA_ -OR -_https://github.com/flyteorg/flyte/issues/_ diff --git a/deployment/eks/flyte_aws_scheduler_helm_generated.yaml b/deployment/eks/flyte_aws_scheduler_helm_generated.yaml index 647754edda..c4f557836a 100644 --- a/deployment/eks/flyte_aws_scheduler_helm_generated.yaml +++ b/deployment/eks/flyte_aws_scheduler_helm_generated.yaml @@ -192,10 +192,12 @@ data: task_resources: defaults: cpu: 1000m + ephemeralStorage: 500Mi memory: 1000Mi storage: 1000Mi limits: cpu: 2 + ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -429,7 +431,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -847,7 +849,7 @@ spec: template: metadata: annotations: - configChecksum: "2b5c85969f2bd85bb51a084f9fd72c20c3aca94be99e53cb4c4e9f78e77ebc5" + configChecksum: "85f2694a4138443026b87878dbbc5f1e9f52aa54eb87ef4c64117d1d91e1a7f" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -865,7 +867,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -882,7 +884,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: seed-projects volumeMounts: @@ -896,7 +898,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -909,7 +911,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -932,7 +934,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -1033,7 +1035,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -1094,6 +1096,7 @@ spec: name: flyte-console-config ports: - containerPort: 8080 + env: resources: limits: cpu: 250m @@ -1153,7 +1156,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -1167,7 +1170,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1226,7 +1229,7 @@ spec: template: metadata: annotations: - configChecksum: "fd3294c2987d4c2863e13a84abd65a284f69332d97a698c833c27e5c9f5417a" + configChecksum: "ea2c2cd8898cdabd21a443862894799a49efd108e5118f4605b5e7c94008e0a" labels: app.kubernetes.io/name: flytepropeller app.kubernetes.io/instance: flyte @@ -1252,7 +1255,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -1306,9 +1309,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.10.6 + app.kubernetes.io/version: v1.10.7-b2 annotations: - configChecksum: "fd3294c2987d4c2863e13a84abd65a284f69332d97a698c833c27e5c9f5417a" + configChecksum: "ea2c2cd8898cdabd21a443862894799a49efd108e5118f4605b5e7c94008e0a" spec: securityContext: fsGroup: 65534 @@ -1317,7 +1320,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -1340,7 +1343,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/eks/flyte_helm_controlplane_generated.yaml b/deployment/eks/flyte_helm_controlplane_generated.yaml index 30a03b4498..47a75cae25 100644 --- a/deployment/eks/flyte_helm_controlplane_generated.yaml +++ b/deployment/eks/flyte_helm_controlplane_generated.yaml @@ -173,10 +173,12 @@ data: task_resources: defaults: cpu: 1000m + ephemeralStorage: 500Mi memory: 1000Mi storage: 1000Mi limits: cpu: 2 + ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -553,7 +555,7 @@ spec: template: metadata: annotations: - configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" + configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -571,7 +573,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -588,7 +590,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: seed-projects volumeMounts: @@ -602,7 +604,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -615,7 +617,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -638,7 +640,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -739,7 +741,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -800,6 +802,7 @@ spec: name: flyte-console-config ports: - containerPort: 8080 + env: resources: limits: cpu: 250m @@ -859,7 +862,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -873,7 +876,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -932,7 +935,7 @@ spec: template: metadata: annotations: - configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" + configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte @@ -949,7 +952,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler-check volumeMounts: @@ -965,7 +968,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: diff --git a/deployment/eks/flyte_helm_dataplane_generated.yaml b/deployment/eks/flyte_helm_dataplane_generated.yaml index e5655a4c53..c1d6b96380 100644 --- a/deployment/eks/flyte_helm_dataplane_generated.yaml +++ b/deployment/eks/flyte_helm_dataplane_generated.yaml @@ -94,7 +94,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -427,7 +427,7 @@ spec: template: metadata: annotations: - configChecksum: "fd3294c2987d4c2863e13a84abd65a284f69332d97a698c833c27e5c9f5417a" + configChecksum: "ea2c2cd8898cdabd21a443862894799a49efd108e5118f4605b5e7c94008e0a" labels: app.kubernetes.io/name: flytepropeller app.kubernetes.io/instance: flyte @@ -453,7 +453,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -507,9 +507,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.10.6 + app.kubernetes.io/version: v1.10.7-b2 annotations: - configChecksum: "fd3294c2987d4c2863e13a84abd65a284f69332d97a698c833c27e5c9f5417a" + configChecksum: "ea2c2cd8898cdabd21a443862894799a49efd108e5118f4605b5e7c94008e0a" spec: securityContext: fsGroup: 65534 @@ -518,7 +518,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -541,7 +541,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/eks/flyte_helm_generated.yaml b/deployment/eks/flyte_helm_generated.yaml index 45fed80156..910b2746b8 100644 --- a/deployment/eks/flyte_helm_generated.yaml +++ b/deployment/eks/flyte_helm_generated.yaml @@ -204,10 +204,12 @@ data: task_resources: defaults: cpu: 1000m + ephemeralStorage: 500Mi memory: 1000Mi storage: 1000Mi limits: cpu: 2 + ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -460,7 +462,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -878,7 +880,7 @@ spec: template: metadata: annotations: - configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" + configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -896,7 +898,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -913,7 +915,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: seed-projects volumeMounts: @@ -927,7 +929,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -940,7 +942,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -963,7 +965,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -1064,7 +1066,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -1125,6 +1127,7 @@ spec: name: flyte-console-config ports: - containerPort: 8080 + env: resources: limits: cpu: 250m @@ -1184,7 +1187,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -1198,7 +1201,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1257,7 +1260,7 @@ spec: template: metadata: annotations: - configChecksum: "053b20ebc40227f6ed8ddc61f5997ee7997c604158f773779f20ec61af11a2f" + configChecksum: "61fa8a4eebe7e96a3e25b0b2c4baaf7d6af84924167f57e569632fdd282b442" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte @@ -1274,7 +1277,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler-check volumeMounts: @@ -1290,7 +1293,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: @@ -1345,7 +1348,7 @@ spec: template: metadata: annotations: - configChecksum: "fd3294c2987d4c2863e13a84abd65a284f69332d97a698c833c27e5c9f5417a" + configChecksum: "ea2c2cd8898cdabd21a443862894799a49efd108e5118f4605b5e7c94008e0a" labels: app.kubernetes.io/name: flytepropeller app.kubernetes.io/instance: flyte @@ -1371,7 +1374,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -1425,9 +1428,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.10.6 + app.kubernetes.io/version: v1.10.7-b2 annotations: - configChecksum: "fd3294c2987d4c2863e13a84abd65a284f69332d97a698c833c27e5c9f5417a" + configChecksum: "ea2c2cd8898cdabd21a443862894799a49efd108e5118f4605b5e7c94008e0a" spec: securityContext: fsGroup: 65534 @@ -1436,7 +1439,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -1459,7 +1462,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/gcp/flyte_generated.yaml b/deployment/gcp/flyte_generated.yaml index 14897b6b71..01d708d860 100644 --- a/deployment/gcp/flyte_generated.yaml +++ b/deployment/gcp/flyte_generated.yaml @@ -8682,7 +8682,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: cr.flyte.org/flyteorg/datacatalog:v1.9.37 + image: cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2 imagePullPolicy: IfNotPresent name: datacatalog ports: @@ -8705,7 +8705,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: cr.flyte.org/flyteorg/datacatalog:v1.9.37 + image: cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2 imagePullPolicy: IfNotPresent name: run-migrations volumeMounts: @@ -8766,7 +8766,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: cr.flyte.org/flyteorg/flytepropeller:v1.9.37 + image: cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2 imagePullPolicy: IfNotPresent name: webhook volumeMounts: @@ -8793,7 +8793,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: cr.flyte.org/flyteorg/flytepropeller:v1.9.37 + image: cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2 imagePullPolicy: IfNotPresent name: generate-secrets volumeMounts: @@ -8841,7 +8841,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: cr.flyte.org/flyteorg/flyteadmin:v1.9.37 + image: cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2 imagePullPolicy: IfNotPresent name: flyteadmin ports: @@ -8888,7 +8888,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: cr.flyte.org/flyteorg/flyteadmin:v1.9.37 + image: cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2 imagePullPolicy: IfNotPresent name: run-migrations volumeMounts: @@ -8905,7 +8905,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: cr.flyte.org/flyteorg/flyteadmin:v1.9.37 + image: cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2 imagePullPolicy: IfNotPresent name: seed-projects volumeMounts: @@ -8919,7 +8919,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: cr.flyte.org/flyteorg/flyteadmin:v1.9.37 + image: cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2 imagePullPolicy: IfNotPresent name: sync-cluster-resources volumeMounts: @@ -8939,7 +8939,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: cr.flyte.org/flyteorg/flyteadmin:v1.9.37 + image: cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2 imagePullPolicy: IfNotPresent name: generate-secrets volumeMounts: @@ -9044,7 +9044,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: cr.flyte.org/flyteorg/flytepropeller:v1.9.37 + image: cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2 imagePullPolicy: IfNotPresent name: flytepropeller ports: @@ -9312,7 +9312,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: cr.flyte.org/flyteorg/flyteadmin:v1.9.37 + image: cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2 imagePullPolicy: IfNotPresent name: sync-cluster-resources volumeMounts: diff --git a/deployment/gcp/flyte_helm_controlplane_generated.yaml b/deployment/gcp/flyte_helm_controlplane_generated.yaml index 29be77bd52..a31446875c 100644 --- a/deployment/gcp/flyte_helm_controlplane_generated.yaml +++ b/deployment/gcp/flyte_helm_controlplane_generated.yaml @@ -178,10 +178,12 @@ data: task_resources: defaults: cpu: 500m + ephemeralStorage: 500Mi memory: 500Mi storage: 500Mi limits: cpu: 2 + ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -568,7 +570,7 @@ spec: template: metadata: annotations: - configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" + configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -586,7 +588,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -603,7 +605,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: seed-projects volumeMounts: @@ -617,7 +619,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -630,7 +632,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -653,7 +655,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -754,7 +756,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -815,6 +817,7 @@ spec: name: flyte-console-config ports: - containerPort: 8080 + env: resources: limits: cpu: 250m @@ -874,7 +877,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -888,7 +891,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -947,7 +950,7 @@ spec: template: metadata: annotations: - configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" + configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte @@ -964,7 +967,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler-check volumeMounts: @@ -980,7 +983,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: diff --git a/deployment/gcp/flyte_helm_dataplane_generated.yaml b/deployment/gcp/flyte_helm_dataplane_generated.yaml index bfe7b45516..c0cb5c5a6d 100644 --- a/deployment/gcp/flyte_helm_dataplane_generated.yaml +++ b/deployment/gcp/flyte_helm_dataplane_generated.yaml @@ -94,7 +94,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -435,7 +435,7 @@ spec: template: metadata: annotations: - configChecksum: "5cfa27fed34037ee9e992af31d950a82a9abe1e77f10b7bd7f37d2fb6b935e1" + configChecksum: "063bb0277c649fa12017e257aa4bb51aae517b3563e0593925680cb89bf2d55" labels: app.kubernetes.io/name: flytepropeller app.kubernetes.io/instance: flyte @@ -460,7 +460,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -514,9 +514,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.10.6 + app.kubernetes.io/version: v1.10.7-b2 annotations: - configChecksum: "5cfa27fed34037ee9e992af31d950a82a9abe1e77f10b7bd7f37d2fb6b935e1" + configChecksum: "063bb0277c649fa12017e257aa4bb51aae517b3563e0593925680cb89bf2d55" spec: securityContext: fsGroup: 65534 @@ -525,7 +525,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -548,7 +548,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/gcp/flyte_helm_generated.yaml b/deployment/gcp/flyte_helm_generated.yaml index 9c81b402c5..0627ba986c 100644 --- a/deployment/gcp/flyte_helm_generated.yaml +++ b/deployment/gcp/flyte_helm_generated.yaml @@ -209,10 +209,12 @@ data: task_resources: defaults: cpu: 500m + ephemeralStorage: 500Mi memory: 500Mi storage: 500Mi limits: cpu: 2 + ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 2000Mi @@ -473,7 +475,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -901,7 +903,7 @@ spec: template: metadata: annotations: - configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" + configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -919,7 +921,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -936,7 +938,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: seed-projects volumeMounts: @@ -950,7 +952,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -963,7 +965,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -986,7 +988,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -1087,7 +1089,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -1148,6 +1150,7 @@ spec: name: flyte-console-config ports: - containerPort: 8080 + env: resources: limits: cpu: 250m @@ -1207,7 +1210,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -1221,7 +1224,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -1280,7 +1283,7 @@ spec: template: metadata: annotations: - configChecksum: "2e169a911a8234dd42d06ca0887279093f4ed36033d0543749ce126b26b50f3" + configChecksum: "c7d43aa7ff4bf67124616d00a83d3c45926ea5ca36bdebdfac1cbcd0e465270" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte @@ -1297,7 +1300,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler-check volumeMounts: @@ -1313,7 +1316,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: @@ -1368,7 +1371,7 @@ spec: template: metadata: annotations: - configChecksum: "5cfa27fed34037ee9e992af31d950a82a9abe1e77f10b7bd7f37d2fb6b935e1" + configChecksum: "063bb0277c649fa12017e257aa4bb51aae517b3563e0593925680cb89bf2d55" labels: app.kubernetes.io/name: flytepropeller app.kubernetes.io/instance: flyte @@ -1393,7 +1396,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -1447,9 +1450,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.10.6 + app.kubernetes.io/version: v1.10.7-b2 annotations: - configChecksum: "5cfa27fed34037ee9e992af31d950a82a9abe1e77f10b7bd7f37d2fb6b935e1" + configChecksum: "063bb0277c649fa12017e257aa4bb51aae517b3563e0593925680cb89bf2d55" spec: securityContext: fsGroup: 65534 @@ -1458,7 +1461,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -1481,7 +1484,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml b/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml index 2b0d197af3..039b6628e4 100644 --- a/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml +++ b/deployment/sandbox-binary/flyte_sandbox_binary_helm_generated.yaml @@ -116,7 +116,7 @@ data: stackdriver-enabled: false k8s: co-pilot: - image: "cr.flyte.org/flyteorg/flytecopilot:v1.10.6" + image: "cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2" k8s-array: logs: config: @@ -358,7 +358,7 @@ spec: app.kubernetes.io/instance: flyte app.kubernetes.io/component: flyte-binary annotations: - checksum/configuration: 94a584c983cfe1077f02569ae00f81c4fddae4505901006eda76d9338bd3dda2 + checksum/configuration: 4566d9bb7575f535ccf8481c719474e72b926b8029d83e3b6318549e3cdd2f14 checksum/configuration-secret: d5d93f4e67780b21593dc3799f0f6682aab0765e708e4020939975d14d44f929 checksum/cluster-resource-templates: 7dfa59f3d447e9c099b8f8ffad3af466fecbc9cf9f8c97295d9634254a55d4ae spec: diff --git a/deployment/sandbox/flyte_helm_generated.yaml b/deployment/sandbox/flyte_helm_generated.yaml index 26a61908ed..cade17e8d6 100644 --- a/deployment/sandbox/flyte_helm_generated.yaml +++ b/deployment/sandbox/flyte_helm_generated.yaml @@ -334,10 +334,12 @@ data: task_resources: defaults: cpu: 100m + ephemeralStorage: 500Mi memory: 200Mi storage: 5Mi limits: cpu: 2 + ephemeralStorage: 20Mi gpu: 1 memory: 1Gi storage: 20Mi @@ -585,7 +587,7 @@ data: plugins: k8s: co-pilot: - image: cr.flyte.org/flyteorg/flytecopilot:v1.10.6 + image: cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2 name: flyte-copilot- start-timeout: 30s core.yaml: | @@ -6690,7 +6692,7 @@ spec: template: metadata: annotations: - configChecksum: "45f0232531c0d1494809cf83387a95b2fc802019ea095de7a24ccd4f8de86ec" + configChecksum: "82d6ffa2a2dd83eb11c491a95af43fdede659d6b5b400b6edcd88291a28c4f4" labels: app.kubernetes.io/name: flyteadmin app.kubernetes.io/instance: flyte @@ -6708,7 +6710,7 @@ spec: - /etc/flyte/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -6724,7 +6726,7 @@ spec: - flytesnacks - flytetester - flyteexamples - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: seed-projects volumeMounts: @@ -6737,7 +6739,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - sync - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -6749,7 +6751,7 @@ spec: - mountPath: /etc/secrets/ name: admin-secrets - name: generate-secrets - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: ["/bin/sh", "-c"] args: @@ -6772,7 +6774,7 @@ spec: - --config - /etc/flyte/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flyteadmin ports: @@ -6863,7 +6865,7 @@ spec: - /etc/flyte/config/*.yaml - clusterresource - run - image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.6" + image: "cr.flyte.org/flyteorg/flyteadmin:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: sync-cluster-resources volumeMounts: @@ -6978,7 +6980,7 @@ spec: - /etc/datacatalog/config/*.yaml - migrate - run - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: run-migrations volumeMounts: @@ -6991,7 +6993,7 @@ spec: - --config - /etc/datacatalog/config/*.yaml - serve - image: "cr.flyte.org/flyteorg/datacatalog:v1.10.6" + image: "cr.flyte.org/flyteorg/datacatalog:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: datacatalog ports: @@ -7040,7 +7042,7 @@ spec: template: metadata: annotations: - configChecksum: "45f0232531c0d1494809cf83387a95b2fc802019ea095de7a24ccd4f8de86ec" + configChecksum: "82d6ffa2a2dd83eb11c491a95af43fdede659d6b5b400b6edcd88291a28c4f4" labels: app.kubernetes.io/name: flytescheduler app.kubernetes.io/instance: flyte @@ -7057,7 +7059,7 @@ spec: - precheck - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler-check volumeMounts: @@ -7072,7 +7074,7 @@ spec: - run - --config - /etc/flyte/config/*.yaml - image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.6" + image: "cr.flyte.org/flyteorg/flytescheduler:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytescheduler ports: @@ -7124,7 +7126,7 @@ spec: template: metadata: annotations: - configChecksum: "3d2a38f6286a81fa144d0690a97cf10282a73e862a2a3b5a3e0bb2911aac9ca" + configChecksum: "809822d98423c61b882d80deb62e9a8e59acd78ad56f875a4542a8154162ed7" labels: app.kubernetes.io/name: flytepropeller app.kubernetes.io/instance: flyte @@ -7149,7 +7151,7 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" name: flytepropeller ports: @@ -7196,9 +7198,9 @@ spec: labels: app: flyte-pod-webhook app.kubernetes.io/name: flyte-pod-webhook - app.kubernetes.io/version: v1.10.6 + app.kubernetes.io/version: v1.10.7-b2 annotations: - configChecksum: "3d2a38f6286a81fa144d0690a97cf10282a73e862a2a3b5a3e0bb2911aac9ca" + configChecksum: "809822d98423c61b882d80deb62e9a8e59acd78ad56f875a4542a8154162ed7" spec: securityContext: fsGroup: 65534 @@ -7207,7 +7209,7 @@ spec: serviceAccountName: flyte-pod-webhook initContainers: - name: generate-secrets - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller @@ -7230,7 +7232,7 @@ spec: mountPath: /etc/flyte/config containers: - name: webhook - image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.6" + image: "cr.flyte.org/flyteorg/flytepropeller:v1.10.7-b2" imagePullPolicy: "IfNotPresent" command: - flytepropeller diff --git a/doc-requirements.in b/doc-requirements.in deleted file mode 100644 index 49f7439659..0000000000 --- a/doc-requirements.in +++ /dev/null @@ -1,16 +0,0 @@ -codespell -git+https://github.com/flyteorg/furo@main -sphinx -sphinx-prompt -sphinx-code-include -sphinx-autoapi -sphinx-copybutton -sphinxext-remoteliteralinclude -sphinx-issues -sphinx_fontawesome -sphinx-panels -sphinxcontrib-mermaid -sphinxcontrib-video -sphinxcontrib-youtube==1.2.0 -sphinx-tabs -sphinx-tags diff --git a/doc-requirements.txt b/doc-requirements.txt deleted file mode 100644 index 2bde883f6a..0000000000 --- a/doc-requirements.txt +++ /dev/null @@ -1,61 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --annotation-style=line doc-requirements.in -# -alabaster==0.7.13 # via sphinx -astroid==3.0.1 # via sphinx-autoapi -babel==2.13.1 # via sphinx -beautifulsoup4==4.12.2 # via furo, sphinx-code-include -certifi==2023.11.17 # via requests -cfgv==3.4.0 # via pre-commit -charset-normalizer==3.3.2 # via requests -codespell==2.2.6 # via -r doc-requirements.in -distlib==0.3.7 # via virtualenv -docutils==0.17.1 # via sphinx, sphinx-panels, sphinx-tabs -filelock==3.13.1 # via virtualenv -furo @ git+https://github.com/flyteorg/furo@main # via -r doc-requirements.in -identify==2.5.32 # via pre-commit -idna==3.6 # via requests -imagesize==1.4.1 # via sphinx -jinja2==3.0.3 # via sphinx, sphinx-autoapi, sphinx-tabs -markupsafe==2.1.3 # via jinja2 -nodeenv==1.8.0 # via pre-commit -packaging==23.2 # via sphinx -platformdirs==4.1.0 # via virtualenv -pre-commit==3.5.0 # via sphinx-tags -pygments==2.17.2 # via furo, sphinx, sphinx-prompt, sphinx-tabs -pyyaml==6.0.1 # via pre-commit, sphinx-autoapi -requests==2.31.0 # via sphinx, sphinxcontrib-youtube -six==1.16.0 # via sphinx-code-include, sphinxext-remoteliteralinclude -snowballstemmer==2.2.0 # via sphinx -soupsieve==2.5 # via beautifulsoup4 -sphinx==4.5.0 # via -r doc-requirements.in, furo, sphinx-autoapi, sphinx-basic-ng, sphinx-code-include, sphinx-copybutton, sphinx-fontawesome, sphinx-issues, sphinx-panels, sphinx-prompt, sphinx-tabs, sphinx-tags, sphinxcontrib-video, sphinxcontrib-youtube, sphinxext-remoteliteralinclude -sphinx-autoapi==2.0.1 # via -r doc-requirements.in -sphinx-basic-ng==1.0.0b2 # via furo -sphinx-code-include==1.1.1 # via -r doc-requirements.in -sphinx-copybutton==0.5.2 # via -r doc-requirements.in -sphinx-fontawesome==0.0.6 # via -r doc-requirements.in -sphinx-issues==3.0.1 # via -r doc-requirements.in -sphinx-panels==0.6.0 # via -r doc-requirements.in -sphinx-prompt==1.5.0 # via -r doc-requirements.in -sphinx-tabs==3.4.0 # via -r doc-requirements.in -sphinx-tags==0.2.1 # via -r doc-requirements.in -sphinxcontrib-applehelp==1.0.4 # via sphinx -sphinxcontrib-devhelp==1.0.2 # via sphinx -sphinxcontrib-htmlhelp==2.0.1 # via sphinx -sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-mermaid==0.9.2 # via -r doc-requirements.in -sphinxcontrib-qthelp==1.0.3 # via sphinx -sphinxcontrib-serializinghtml==1.1.5 # via sphinx -sphinxcontrib-video==0.2.0 # via -r doc-requirements.in -sphinxcontrib-youtube==1.2.0 # via -r doc-requirements.in -sphinxext-remoteliteralinclude==0.4.0 # via -r doc-requirements.in -typing-extensions==4.8.0 # via astroid -unidecode==1.3.7 # via sphinx-autoapi -urllib3==2.1.0 # via requests -virtualenv==20.25.0 # via pre-commit - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/docker/sandbox-bundled/manifests/complete-agent.yaml b/docker/sandbox-bundled/manifests/complete-agent.yaml index da57ef92f5..44510f5efc 100644 --- a/docker/sandbox-bundled/manifests/complete-agent.yaml +++ b/docker/sandbox-bundled/manifests/complete-agent.yaml @@ -468,7 +468,7 @@ data: stackdriver-enabled: false k8s: co-pilot: - image: "cr.flyte.org/flyteorg/flytecopilot:v1.10.6" + image: "cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2" k8s-array: logs: config: @@ -816,7 +816,7 @@ type: Opaque --- apiVersion: v1 data: - haSharedSecret: RkZLd2xsVEZSNXNHMExjeg== + haSharedSecret: R3dGeldWcjRZb1d5dlVMbA== proxyPassword: "" proxyUsername: "" kind: Secret @@ -1246,7 +1246,7 @@ spec: metadata: annotations: checksum/cluster-resource-templates: 6fd9b172465e3089fcc59f738b92b8dc4d8939360c19de8ee65f68b0e7422035 - checksum/configuration: 9cb50a36963e1d23a3b500b148de3407c1fcc1b65da0f7da8b038cfd35c97fca + checksum/configuration: 267295f401ead24cac5d0e20fd495f2373d31774f0abaa3279d4a22f94dc954f checksum/configuration-secret: 09216ffaa3d29e14f88b1f30af580d02a2a5e014de4d750b7f275cc07ed4e914 labels: app.kubernetes.io/component: flyte-binary @@ -1412,7 +1412,7 @@ spec: metadata: annotations: checksum/config: 8f50e768255a87f078ba8b9879a0c174c3e045ffb46ac8723d2eedbe293c8d81 - checksum/secret: 54b8e8ed67f9cf2f4b8c83b917ba11b0ed9f81f453df70376c332e202522643e + checksum/secret: 11c2215901062e6c31e3f73ad634f47f23e3ce1c1b5b774e500e6592969a907c labels: app: docker-registry release: flyte-sandbox diff --git a/docker/sandbox-bundled/manifests/complete.yaml b/docker/sandbox-bundled/manifests/complete.yaml index 2a4bd6bbdc..4abe5dad66 100644 --- a/docker/sandbox-bundled/manifests/complete.yaml +++ b/docker/sandbox-bundled/manifests/complete.yaml @@ -457,7 +457,7 @@ data: stackdriver-enabled: false k8s: co-pilot: - image: "cr.flyte.org/flyteorg/flytecopilot:v1.10.6" + image: "cr.flyte.org/flyteorg/flytecopilot:v1.10.7-b2" k8s-array: logs: config: @@ -796,7 +796,7 @@ type: Opaque --- apiVersion: v1 data: - haSharedSecret: SENrVEZGc09ob3RKdFJRSA== + haSharedSecret: dWZVZnFyN055a2NqT1lTTw== proxyPassword: "" proxyUsername: "" kind: Secret @@ -1194,7 +1194,7 @@ spec: metadata: annotations: checksum/cluster-resource-templates: 6fd9b172465e3089fcc59f738b92b8dc4d8939360c19de8ee65f68b0e7422035 - checksum/configuration: 52331d07774723e1045269ebc8e4cdf95e1bdd85ab104699149cae45f1eb0327 + checksum/configuration: 98f436232001bb629ccb144b80f87538ab271997bfc494a3f51c33a99b826d70 checksum/configuration-secret: 09216ffaa3d29e14f88b1f30af580d02a2a5e014de4d750b7f275cc07ed4e914 labels: app.kubernetes.io/component: flyte-binary @@ -1360,7 +1360,7 @@ spec: metadata: annotations: checksum/config: 8f50e768255a87f078ba8b9879a0c174c3e045ffb46ac8723d2eedbe293c8d81 - checksum/secret: 7c61351bdfcc86e008feb9f8d023e02d3aa7e570e1759790f8d7c3f6a4fd9c86 + checksum/secret: c07763867933245f95f13aa477630c8c337c62efee4cba504729dc8a4f9e5ce2 labels: app: docker-registry release: flyte-sandbox diff --git a/docker/sandbox-bundled/manifests/dev.yaml b/docker/sandbox-bundled/manifests/dev.yaml index 050efccd96..a8b9244f22 100644 --- a/docker/sandbox-bundled/manifests/dev.yaml +++ b/docker/sandbox-bundled/manifests/dev.yaml @@ -499,7 +499,7 @@ metadata: --- apiVersion: v1 data: - haSharedSecret: RjVjd3lFdXVJRllyTzlTNA== + haSharedSecret: cDQ2bUZZQkg4bjg2Y2hGRQ== proxyPassword: "" proxyUsername: "" kind: Secret @@ -934,7 +934,7 @@ spec: metadata: annotations: checksum/config: 8f50e768255a87f078ba8b9879a0c174c3e045ffb46ac8723d2eedbe293c8d81 - checksum/secret: 048f8e7e067f646dce51250b72fe31919755c8fa62f7f4f4ccc054e20a719d9e + checksum/secret: e65c1b8ed75e538fd09b9d94538d1a99a986534b18625406a2bc9cce8aeffe1a labels: app: docker-registry release: flyte-sandbox diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000000..322c7fe3b5 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,29 @@ +# Building the Flyte docs locally + +## Prerequisites + +* `conda` (We recommend Miniconda installed with an [official installer](https://docs.conda.io/projects/miniconda/en/latest/index.html#latest-miniconda-installer-links)) + +* [`conda-lock`](https://github.com/conda/conda-lock) + + +## Set up the build environment + +In the `flyteorg/flyte` root directory do: + +``` +$ conda-lock install --name monodocs-env monodocs-environment.lock.yaml +``` + +This creates a new environment called `monodocs-env` with all the dependencies needed to build the docs. You can choose a different environment name if you like. + + +## Building the docs + +In the `flyteorg/flyte` root directory make sure you have activated the `monodocs-env` (or whatever you called it) environment and do: + +``` +$ make docs +``` + +The resulting `html` files will be in `docs/_build/html`. diff --git a/docs/_ext/import_projects.py b/docs/_ext/import_projects.py index b4fa6310d7..1e32aa0d8c 100644 --- a/docs/_ext/import_projects.py +++ b/docs/_ext/import_projects.py @@ -26,6 +26,7 @@ class ImportProjectsConfig: flytekit_api_dir: str source_regex_mapping: dict = field(default_factory=dict) list_table_toc: List[str] = field(default_factory=list) + dev_build: bool = False @dataclass @@ -53,6 +54,8 @@ class Project: ``` """ +VERSION_PATTERN = r"^v[0-9]+.[0-9]+.[0-9]+$" + class ListTableToc(SphinxDirective): """Custom directive to convert list-table into both list-table and toctree.""" @@ -102,7 +105,7 @@ def update_sys_path_for_flytekit(import_project_config: ImportProjectsConfig): def update_html_context(project: Project, tag: str, commit: str, config: Config): - tag_url = f"{project.source}/releases/tag/{tag}" + tag_url = "#" if tag == "dev" else f"{project.source}/releases/tag/{tag}" commit_url = f"{project.source}/tree/{commit}" config.html_context[f"{project.name}_tag"] = tag @@ -146,10 +149,14 @@ def import_projects(app: Sphinx, config: Config): dest_docs_dir = srcdir / project.dest if repo: - tags = sorted(repo.tags, key=lambda t: t.commit.committed_datetime) - if not tags: - # If tags don't exist just use the current commit. This occurs - # when the git repo is a shallow clone. + tags = sorted( + [t for t in repo.tags if re.match(VERSION_PATTERN, t.name)], + key=lambda t: t.commit.committed_datetime + ) + if not tags or import_projects_config.dev_build: + # If dev_build is specified or the tags don't exist just use the + # current commit. This occurs when the git repo is a shallow + # clone. tag_str = "dev" commit = str(repo.head.commit)[:7] else: diff --git a/docs/_static/algolia-search.js b/docs/_static/algolia-search.js new file mode 100644 index 0000000000..34789e97e8 --- /dev/null +++ b/docs/_static/algolia-search.js @@ -0,0 +1,12 @@ +/*global docsearch*/ + +docsearch({ + container: ".docsearch", + appId: "WLG0MZB58Q", + apiKey: "28bf9bfd4a77a7d6b3ab7e98c671e781", + indexName: "flyte", + searchParameters: { + hitsPerPage: 10, + // See https://www.algolia.com/doc/api-reference/api-parameters/ + }, +}); diff --git a/docs/_static/algolia.css b/docs/_static/algolia.css new file mode 100644 index 0000000000..c051ed6197 --- /dev/null +++ b/docs/_static/algolia.css @@ -0,0 +1,23 @@ +/* Algolia Docs Search Style */ +.docsearch { + width: 100% !important; +} + +.DocSearch-Button { + height: 60px !important; + width: 100% !important; + margin: 0px !important; + border-radius: 0 !important; + border-bottom: 1px solid var(--color-header-border) !important; + background: var(--color-sidebar-background) !important; + padding: 0 15px !important; +} + +.DocSearch-Button:hover, .DocSearch-Button:active { + box-shadow: none !important; + background: var(--docsearch-searchbox-background) !important; +} + +.sidebar-search-container::before { + content: none; +} diff --git a/docs/_static/flyte.css b/docs/_static/flyte.css index a0e5225c25..6071b8ab51 100644 --- a/docs/_static/flyte.css +++ b/docs/_static/flyte.css @@ -43,6 +43,7 @@ h6 { width: 100% !important; margin: 0px !important; border-radius: 0 !important; + border-top: 1px solid var(--color-header-border) !important; border-bottom: 1px solid var(--color-header-border) !important; background: var(--color-sidebar-background) !important; padding: 0 15px !important; @@ -53,6 +54,16 @@ h6 { background: var(--docsearch-searchbox-background) !important; } +.DocSearch-Button-Key { + border-radius: 3px !important; + box-shadow: var(--docsearch-key-shadow) !important; + color: var(--docsearch-muted-color) !important; + display: flex !important; + margin-right: 0.4em !important; + padding: 0 0 2px !important; + border: 0 !important; +} + .sidebar-search-container::before { content: none; } diff --git a/docs/_templates/base.html b/docs/_templates/base.html new file mode 100644 index 0000000000..f3c5ef1a49 --- /dev/null +++ b/docs/_templates/base.html @@ -0,0 +1,125 @@ + + + + {%- block site_meta -%} + + + + + {%- if metatags %}{{ metatags }}{% endif -%} + + {%- block linktags %} + {%- if hasdoc('about') -%} + + {%- endif -%} + {%- if hasdoc('genindex') -%} + + {%- endif -%} + {%- if hasdoc('search') -%} + + {%- endif -%} + {%- if hasdoc('copyright') -%} + + {%- endif -%} + {%- if next -%} + + {%- endif -%} + {%- if prev -%} + + {%- endif -%} + {#- rel="canonical" (set by html_baseurl) -#} + {%- if pageurl %} + + {%- endif %} + {%- endblock linktags %} + + {# Favicon #} + {%- if favicon_url -%} + + {%- endif -%} + + {#- Generator banner -#} + + + {%- endblock site_meta -%} + + {#- Site title -#} + {%- block htmltitle -%} + {% if not docstitle %} + {{ title|striptags|e }} + {% elif pagename == master_doc %} + {{ docstitle|striptags|e }} + {% else %} + {{ title|striptags|e }} - {{ docstitle|striptags|e }} + {% endif %} + {%- endblock -%} + + {%- block styles -%} + + {# Custom stylesheets #} + {%- block regular_styles -%} + {%- for css in css_files -%} + {% if css|attr("filename") -%} + {{ css_tag(css) }} + {%- else -%} + + {%- endif %} + {% endfor -%} + {%- endblock regular_styles -%} + + {#- Theme-related stylesheets -#} + {%- block theme_styles %} + {% include "partials/_head_css_variables.html" with context %} + {%- endblock theme_styles -%} + + {%- block extra_styles -%} + + + {%- endblock extra_styles -%} + + {%- endblock styles -%} + + {#- Custom front matter #} + {%- block extrahead -%}{%- endblock -%} + + + + + + {% block body %} + + {% endblock %} + + {%- block scripts -%} + + {# Custom JS #} + {%- block regular_scripts -%} + {% for path in script_files -%} + {{ js_tag(path) }} + {% endfor -%} + {%- endblock regular_scripts -%} + + {# Theme-related JavaScript code #} + {%- block theme_scripts -%} + + + {%- endblock -%} + {%- endblock scripts -%} + + diff --git a/docs/_templates/page.html b/docs/_templates/page.html index 582062c95e..d078e9dd5e 100644 --- a/docs/_templates/page.html +++ b/docs/_templates/page.html @@ -61,6 +61,8 @@
+ + diff --git a/docs/_templates/sidebar/search.html b/docs/_templates/sidebar/search.html new file mode 100644 index 0000000000..a6bfc08dd9 --- /dev/null +++ b/docs/_templates/sidebar/search.html @@ -0,0 +1,5 @@ +
+ + + diff --git a/docs/concepts/component_architecture/flytepropeller_architecture.rst b/docs/concepts/component_architecture/flytepropeller_architecture.rst index a04f6dbe4d..c04edbf617 100644 --- a/docs/concepts/component_architecture/flytepropeller_architecture.rst +++ b/docs/concepts/component_architecture/flytepropeller_architecture.rst @@ -19,7 +19,7 @@ A Flyte :ref:`workflow ` is represented as a Directed Acycli - ``DynamicNodes`` add nodes to the DAG. - ``WorkflowNodes`` allow embedding workflows within each other. -FlytePropeller is responsible for scheduling and tracking execution of Flyte workflows. It is implemented using a K8s controller and adheres to the established K8s design principles. In this scheme, resources are periodically evaluated and the goal is to transition from the observed state to a requested state. +FlytePropeller is responsible for scheduling and tracking execution of Flyte workflows. It is implemented using a K8s controller and adheres to the established K8s design principles. In this scheme, resources are periodically evaluated and the goal is to transition from the observed state to a requested state. In our case, workflows are the resources and they are iteratively evaluated to transition from the current state to success. During each loop, the current workflow state is established as the phase of workflow nodes and subsequent tasks, and FlytePropeller performs operations to transition this state to success. The operations may include scheduling (or rescheduling) node executions, evaluating dynamic or branch nodes, etc. These design decisions ensure that FlytePropeller can scale to manage a large number of concurrent workflows without performance degradation. @@ -33,7 +33,7 @@ Components FlyteWorkflow CRD / K8s Integration ----------------------------------- -Workflows in Flyte are maintained as Custom Resource Definitions (CRDs) in Kubernetes, which are stored in the backing etcd cluster. Each execution of a workflow definition results in the creation of a new FlyteWorkflow CR (Custom Resource) which maintains a state for the entirety of processing. CRDs provide variable definitions to describe both resource specifications (spec) and status' (status). The FlyteWorkflow CRD uses the spec subsection to detail the workflow DAG, embodying node dependencies, etc. The status subsection tracks workflow metadata including overall workflow status, node/task phases, status/phase transition timestamps, etc. +Workflows in Flyte are maintained as Custom Resource Definitions (CRDs) in Kubernetes, which are stored in the backing etcd cluster. Each execution of a workflow definition results in the creation of a new FlyteWorkflow CR (Custom Resource) which maintains a state for the entirety of processing. CRDs provide variable definitions to describe both resource specifications (spec) and status (status). The FlyteWorkflow CRD uses the spec subsection to detail the workflow DAG, embodying node dependencies, etc. The status subsection tracks workflow metadata including overall workflow status, node/task phases, status/phase transition timestamps, etc. K8s exposes a powerful controller/operator API that enables entities to track creation/updates over a specific resource type. FlytePropeller uses this API to track FlyteWorkflows, meaning every time an instance of the FlyteWorkflow CR is created/updated, the FlytePropeller instance is notified. FlyteAdmin is the common entry point, where initialization of FlyteWorkflow CRs may be triggered by user workflow definition executions, automatic relaunches, or periodically scheduled workflow definition executions. However, it is conceivable to manually create FlyteWorkflow CRs, but this will have limited visibility and usability. @@ -69,7 +69,15 @@ NodeHandlers FlytePropeller includes a robust collection of NodeHandlers to support diverse evaluation of the workflow DAG: -* **TaskHandler (Plugins)**: These are responsible for executing plugin specific tasks. This may include contacting FlyteAdmin to schedule K8s pod to perform work, calling a web API to begin/track evaluation, and much more. The plugin paradigm exposes an extensible interface for adding functionality to Flyte workflows. +* **TaskHandler (Plugins)**: These are responsible for executing tasks in the Flyte cluster. There are mainly 3 kinds of tasks for the task handler: + + 1. **Pod Task**: Create a pod in the Kubernetes cluster, execute the task, and then delete the pod. + + 2. **K8s Operator Backend Plugin**: Install a specific Kubernetes Operator (e.g., Spark, Ray, and Kubeflow) in the cluster, create pods by the Kubernetes Operator, execute the task, and then delete the pods. + + 3. **Web API Task**: Send REST/gRPC requests to a server and return the response. + Note: The Web API Task will not start a pod. + * **DynamicHandler**: Flyte workflow CRs are initialized using a DAG compiled during the registration process. The numerous benefits of this approach are beyond the scope of this document. However, there are situations where the complete DAG is unknown at compile time. For example, when executing a task on each value of an input list. Using Dynamic nodes, a new DAG subgraph may be dynamically compiled during runtime and linked to the existing FlyteWorkflow CR. * **WorkflowHandler**: This handler allows embedding workflows within another workflow definition. The API exposes this functionality using either (1) an inline execution, where the workflow function is invoked directly resulting in a single FlyteWorkflow CR with an appended sub-workflow, or (2) a launch plan, which uses a TODO to create a separate sub-FlyteWorkflow CR whose execution state is linked to the parent FlyteWorkflow CR. * **BranchHandler**: The branch handler allows the DAG to follow a specific control path based on input (or computed) values. @@ -79,3 +87,10 @@ FlyteAdmin Events ----------------- It should be noted that the WorkflowExecutor, NodeExecutor, and TaskHandlers send events to FlyteAdmin, enabling it to track workflows in near real-time. + +FlytePlugins +------------ + +Here is an overview architecture of FlytePlugins: + +.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/architecture/flytepropeller_plugins_architecture.png diff --git a/docs/concepts/data_management.rst b/docs/concepts/data_management.rst index 33a7f499a1..0d4edbd0a8 100644 --- a/docs/concepts/data_management.rst +++ b/docs/concepts/data_management.rst @@ -168,7 +168,7 @@ Between Tasks Bringing in Your Own Datastores for Raw Data ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Flytekit has a pluggable `data persistence layer `__. +Flytekit has a pluggable data persistence layer. This is driven by PROTOCOL. For example, it is theoretically possible to use S3 ``s3://`` for metadata and GCS ``gcs://`` for raw data. It is also possible to create your own protocol ``my_fs://``, to change how data is stored and accessed. But for Metadata, the data should be accessible to Flyte control plane. diff --git a/docs/conf.py b/docs/conf.py index 89692d1f78..0bb049a3bb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -35,7 +35,7 @@ # The short X.Y version version = "" # The full version, including alpha/beta/rc tags -release = "1.8.0" +release = "1.10.7-b2" # -- General configuration --------------------------------------------------- @@ -186,7 +186,7 @@ # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] -html_css_files = ["custom.css", "flyte.css"] +html_css_files = ["custom.css", "flyte.css", "algolia.css"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -311,7 +311,7 @@ # from other repos. REPLACE_PATTERNS = { r"": r"", - r"": r"", + r"": r"", INTERSPHINX_REFS_PATTERN: INTERSPHINX_REFS_REPLACE, r"": r"", @@ -337,6 +337,7 @@ "flytesnacks/tutorials", "flytesnacks/integrations", ], + "dev_build": bool(int(os.environ.get("MONODOCS_DEV_BUILD", 1))), } # Define these environment variables to use local copies of the projects. This @@ -444,6 +445,7 @@ def filter(self, record: logging.LogRecord) -> bool: 'Error with CSV data in "csv-table" directive', "Definition list ends without a blank line", "autodoc: failed to import module 'awssagemaker' from module 'flytekitplugins'", + "Enumerated list ends without a blank line", ) if msg.strip().startswith(filter_out): diff --git a/docs/deployment/agents/bigquery.rst b/docs/deployment/agents/bigquery.rst index ac6ec896bb..9835c3d47a 100644 --- a/docs/deployment/agents/bigquery.rst +++ b/docs/deployment/agents/bigquery.rst @@ -102,3 +102,5 @@ Upgrade the Flyte Helm release Replace ```` with the name of your release (e.g., ``flyte``) and ```` with the name of your namespace (e.g., ``flyte``). + +For BigQuery plugin on the Flyte cluster, please refer to `BigQuery Plugin Example `_ diff --git a/rsts/deployment/agents/databricks.rst b/docs/deployment/agents/databricks.rst similarity index 98% rename from rsts/deployment/agents/databricks.rst rename to docs/deployment/agents/databricks.rst index 7d5e2bb3e6..00a5e97a47 100644 --- a/rsts/deployment/agents/databricks.rst +++ b/docs/deployment/agents/databricks.rst @@ -291,4 +291,4 @@ Wait for the upgrade to complete. You can check the status of the deployment pod kubectl get pods -n flyte -For databricks plugin on the Flyte cluster, please refer to `Databricks Plugin Example `_ +For databricks plugin on the Flyte cluster, please refer to `Databricks Plugin Example `_ diff --git a/docs/deployment/agents/index.md b/docs/deployment/agents/index.md index 10a80236b9..e27644570a 100644 --- a/docs/deployment/agents/index.md +++ b/docs/deployment/agents/index.md @@ -16,6 +16,8 @@ Discover the process of setting up Agents for Flyte. - Guide to setting up the MMCloud agent. * - {ref}`Sensor Agent ` - Guide to setting up the Sensor agent. +* - {ref}`Databricks Agent ` + - Guide to setting up the Databricks agent. ``` ```{toctree} @@ -25,5 +27,6 @@ Discover the process of setting up Agents for Flyte. bigquery mmcloud +databricks sensor ``` diff --git a/docs/deployment/agents/mmcloud.rst b/docs/deployment/agents/mmcloud.rst index 1e36ff7cfc..217beab8ed 100644 --- a/docs/deployment/agents/mmcloud.rst +++ b/docs/deployment/agents/mmcloud.rst @@ -57,15 +57,15 @@ Enable the MMCloud agent by adding the following config to the relevant YAML fil .. code-block:: yaml tasks: - task-agents: - enabled-agents: + task-plugins: + enabled-plugins: - agent-service default-for-task-types: - mmcloud_task: agent-service .. code-block:: yaml - agents: + plugins: agent-service: agents: mmcloud-agent: @@ -117,3 +117,5 @@ Wait for the upgrade to complete. You can check the status of the deployment pod .. code-block:: kubectl get pods -n flyte + +For MMCloud plugin on the Flyte cluster, please refer to `Memory Machine Cloud Plugin Example `_ diff --git a/docs/deployment/configuration/generated/datacatalog_config.rst b/docs/deployment/configuration/generated/datacatalog_config.rst index b20aa7fe59..fe79705c20 100644 --- a/docs/deployment/configuration/generated/datacatalog_config.rst +++ b/docs/deployment/configuration/generated/datacatalog_config.rst @@ -12,6 +12,8 @@ Flyte Datacatalog Configuration - `logger <#section-logger>`_ +- `otel <#section-otel>`_ + - `storage <#section-storage>`_ Section: application @@ -217,11 +219,11 @@ postgres (`database.PostgresConfig`_) dbname: postgres debug: false - host: postgres + host: localhost options: sslmode=disable - password: "" + password: postgres passwordPath: "" - port: 5432 + port: 30001 username: postgres @@ -260,7 +262,7 @@ The host name of the database server .. code-block:: yaml - postgres + localhost port (int) @@ -272,7 +274,7 @@ The port name of the database server .. code-block:: yaml - "5432" + "30001" dbname (string) @@ -308,7 +310,7 @@ The database password. .. code-block:: yaml - "" + postgres passwordPath (string) @@ -491,6 +493,75 @@ Sets logging format type. json +Section: otel +======================================================================================================================== + +type (string) +------------------------------------------------------------------------------------------------------------------------ + +Sets the type of exporter to configure [noop/file/jaeger]. + +**Default Value**: + +.. code-block:: yaml + + noop + + +file (`otelutils.FileConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a file + +**Default Value**: + +.. code-block:: yaml + + filename: /tmp/trace.txt + + +jaeger (`otelutils.JaegerConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a jaeger + +**Default Value**: + +.. code-block:: yaml + + endpoint: http://localhost:14268/api/traces + + +otelutils.FileConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +filename (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Filename to store exported telemetry traces + +**Default Value**: + +.. code-block:: yaml + + /tmp/trace.txt + + +otelutils.JaegerConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +endpoint (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Endpoint for the jaeger telemtry trace ingestor + +**Default Value**: + +.. code-block:: yaml + + http://localhost:14268/api/traces + + Section: storage ======================================================================================================================== diff --git a/docs/deployment/configuration/generated/flyteadmin_config.rst b/docs/deployment/configuration/generated/flyteadmin_config.rst index 8970bf6e73..fe78df979c 100644 --- a/docs/deployment/configuration/generated/flyteadmin_config.rst +++ b/docs/deployment/configuration/generated/flyteadmin_config.rst @@ -30,6 +30,8 @@ Flyte Admin Configuration - `notifications <#section-notifications>`_ +- `otel <#section-otel>`_ + - `plugins <#section-plugins>`_ - `propeller <#section-propeller>`_ @@ -1408,6 +1410,16 @@ reconnectDelaySeconds (int) "0" +cloudEventVersion (uint8) +------------------------------------------------------------------------------------------------------------------------ + +**Default Value**: + +.. code-block:: yaml + + v1 + + interfaces.AWSConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1721,11 +1733,11 @@ postgres (`database.PostgresConfig`_) dbname: postgres debug: false - host: postgres + host: localhost options: sslmode=disable - password: "" + password: postgres passwordPath: "" - port: 5432 + port: 30001 username: postgres @@ -1751,7 +1763,7 @@ The host name of the database server .. code-block:: yaml - postgres + localhost port (int) @@ -1763,7 +1775,7 @@ The port name of the database server .. code-block:: yaml - "5432" + "30001" dbname (string) @@ -1799,7 +1811,7 @@ The database password. .. code-block:: yaml - "" + postgres passwordPath (string) @@ -2132,6 +2144,33 @@ envs (map[string]string) null +featureGates (`interfaces.FeatureGates`_) +------------------------------------------------------------------------------------------------------------------------ + +Enable experimental features. + +**Default Value**: + +.. code-block:: yaml + + enableArtifacts: false + + +interfaces.FeatureGates +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +enableArtifacts (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Enable artifacts feature. + +**Default Value**: + +.. code-block:: yaml + + "false" + + Section: logger ======================================================================================================================== @@ -2445,6 +2484,75 @@ topicName (string) "" +Section: otel +======================================================================================================================== + +type (string) +------------------------------------------------------------------------------------------------------------------------ + +Sets the type of exporter to configure [noop/file/jaeger]. + +**Default Value**: + +.. code-block:: yaml + + noop + + +file (`otelutils.FileConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a file + +**Default Value**: + +.. code-block:: yaml + + filename: /tmp/trace.txt + + +jaeger (`otelutils.JaegerConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a jaeger + +**Default Value**: + +.. code-block:: yaml + + endpoint: http://localhost:14268/api/traces + + +otelutils.FileConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +filename (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Filename to store exported telemetry traces + +**Default Value**: + +.. code-block:: yaml + + /tmp/trace.txt + + +otelutils.JaegerConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +endpoint (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Endpoint for the jaeger telemtry trace ingestor + +**Default Value**: + +.. code-block:: yaml + + http://localhost:14268/api/traces + + Section: plugins ======================================================================================================================== @@ -2483,6 +2591,7 @@ k8s (`config.K8sPluginConfig`_) output-vol-name: flyte-outputs start-timeout: 1m40s storage: "" + create-container-config-error-grace-period: 0s create-container-error-grace-period: 3m0s default-annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "false" @@ -2511,6 +2620,7 @@ k8s (`config.K8sPluginConfig`_) interruptible-node-selector-requirement: null interruptible-tolerations: null non-interruptible-node-selector-requirement: null + pod-pending-timeout: 0s resource-tolerations: null scheduler-name: "" send-object-events: false @@ -2800,6 +2910,16 @@ create-container-error-grace-period (`config.Duration`_) 3m0s +create-container-config-error-grace-period (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + 0s + + image-pull-backoff-grace-period (`config.Duration`_) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -2810,6 +2930,16 @@ image-pull-backoff-grace-period (`config.Duration`_) 3m0s +pod-pending-timeout (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + 0s + + gpu-device-node-label (string) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -3428,6 +3558,7 @@ config for a workflow node node-execution-deadline: 0s workflow-active-deadline: 0s default-max-attempts: 1 + enable-cr-debug-metadata: false ignore-retry-cause: false interruptible-failure-threshold: -1 max-node-retries-system-failures: 3 @@ -3566,6 +3697,18 @@ ArrayNode eventing version. 0 => legacy (drop-in replacement for maptask), 1 => "0" +node-execution-worker-count (int) +------------------------------------------------------------------------------------------------------------------------ + +Number of workers to evaluate node executions, currently only used for array nodes + +**Default Value**: + +.. code-block:: yaml + + "8" + + config.CompositeQueueConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3914,6 +4057,18 @@ Ignore retry cause and count all attempts toward a node's max attempts "false" +enable-cr-debug-metadata (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Collapse node on any terminal state, not just successful terminations. This is useful to reduce the size of workflow state in etcd. + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.DefaultDeadlines ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -4522,7 +4677,7 @@ grpc (`config.GrpcConfig`_) .. code-block:: yaml - enableGrpcHistograms: false + enableGrpcLatencyMetrics: false maxMessageSizeBytes: 0 port: 8089 serverReflection: true @@ -4723,10 +4878,10 @@ The max size in bytes for incoming gRPC messages "0" -enableGrpcHistograms (bool) +enableGrpcLatencyMetrics (bool) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Enable grpc histograms +Enable grpc latency metrics. Note Histograms metrics can be expensive on Prometheus servers. **Default Value**: diff --git a/docs/deployment/configuration/generated/flytepropeller_config.rst b/docs/deployment/configuration/generated/flytepropeller_config.rst index 3a85329ab3..85c318e082 100644 --- a/docs/deployment/configuration/generated/flytepropeller_config.rst +++ b/docs/deployment/configuration/generated/flytepropeller_config.rst @@ -12,6 +12,8 @@ Flyte Propeller Configuration - `logger <#section-logger>`_ +- `otel <#section-otel>`_ + - `plugins <#section-plugins>`_ - `propeller <#section-propeller>`_ @@ -735,6 +737,75 @@ Sets logging format type. json +Section: otel +======================================================================================================================== + +type (string) +------------------------------------------------------------------------------------------------------------------------ + +Sets the type of exporter to configure [noop/file/jaeger]. + +**Default Value**: + +.. code-block:: yaml + + noop + + +file (`otelutils.FileConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a file + +**Default Value**: + +.. code-block:: yaml + + filename: /tmp/trace.txt + + +jaeger (`otelutils.JaegerConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a jaeger + +**Default Value**: + +.. code-block:: yaml + + endpoint: http://localhost:14268/api/traces + + +otelutils.FileConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +filename (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Filename to store exported telemetry traces + +**Default Value**: + +.. code-block:: yaml + + /tmp/trace.txt + + +otelutils.JaegerConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +endpoint (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Endpoint for the jaeger telemtry trace ingestor + +**Default Value**: + +.. code-block:: yaml + + http://localhost:14268/api/traces + + Section: plugins ======================================================================================================================== @@ -750,7 +821,7 @@ agent-service (`agent.Config`_) defaultAgent: defaultServiceConfig: "" defaultTimeout: 10s - endpoint: dns:///flyteagent.flyte.svc.cluster.local:80 + endpoint: "" insecure: true timeouts: null resourceConstraints: @@ -911,6 +982,16 @@ databricks (`databricks.Config`_) qps: 10 +echo (`testing.Config`_) +------------------------------------------------------------------------------------------------------------------------ + +**Default Value**: + +.. code-block:: yaml + + sleep-duration: 0s + + k8s (`config.K8sPluginConfig`_) ------------------------------------------------------------------------------------------------------------------------ @@ -929,6 +1010,7 @@ k8s (`config.K8sPluginConfig`_) output-vol-name: flyte-outputs start-timeout: 1m40s storage: "" + create-container-config-error-grace-period: 0s create-container-error-grace-period: 3m0s default-annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "false" @@ -957,6 +1039,7 @@ k8s (`config.K8sPluginConfig`_) interruptible-node-selector-requirement: null interruptible-tolerations: null non-interruptible-node-selector-requirement: null + pod-pending-timeout: 0s resource-tolerations: null scheduler-name: "" send-object-events: false @@ -983,6 +1066,8 @@ k8s-array (`k8s.Config`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: true kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName @@ -1032,6 +1117,8 @@ logs (`logs.LogConfig`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: true kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName @@ -1075,6 +1162,7 @@ ray (`ray.Config`_) .. code-block:: yaml dashboardHost: 0.0.0.0 + dashboardURLTemplate: null defaults: headNode: ipAddress: $MY_POD_IP @@ -1091,6 +1179,8 @@ ray (`ray.Config`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -1099,6 +1189,7 @@ ray (`ray.Config`_) stackdriver-logresourcename: "" stackdriver-template-uri: "" templates: null + logsSidecar: null remoteClusterConfig: auth: caCertPath: "" @@ -1111,25 +1202,6 @@ ray (`ray.Config`_) ttlSecondsAfterFinished: 3600 -sagemaker (`config.Config (sagemaker)`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - prebuiltAlgorithms: - - name: xgboost - regionalConfigs: - - region: us-east-1 - versionConfigs: - - image: 683313688378.dkr.ecr.us-east-1.amazonaws.com/sagemaker-xgboost:0.90-2-cpu-py3 - version: "0.90" - region: us-east-1 - roleAnnotationKey: "" - roleArn: default_role - - snowflake (`snowflake.Config`_) ------------------------------------------------------------------------------------------------------------------------ @@ -1175,6 +1247,8 @@ spark (`spark.Config`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -1188,6 +1262,8 @@ spark (`spark.Config`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: true kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName @@ -1202,6 +1278,8 @@ spark (`spark.Config`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -1215,6 +1293,8 @@ spark (`spark.Config`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -1279,7 +1359,7 @@ The default agent. defaultServiceConfig: "" defaultTimeout: 10s - endpoint: dns:///flyteagent.flyte.svc.cluster.local:80 + endpoint: "" insecure: true timeouts: null @@ -1327,7 +1407,7 @@ endpoint (string) .. code-block:: yaml - dns:///flyteagent.flyte.svc.cluster.local:80 + "" insecure (bool) @@ -2058,60 +2138,6 @@ destinationClusterConfigs ([]config.DestinationClusterConfig) [] -config.Config (sagemaker) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -roleArn (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The role the SageMaker plugin uses to communicate with the SageMaker service - -**Default Value**: - -.. code-block:: yaml - - default_role - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The AWS region the SageMaker plugin communicates to - -**Default Value**: - -.. code-block:: yaml - - us-east-1 - - -roleAnnotationKey (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Map key to use to lookup role from task annotations. - -**Default Value**: - -.. code-block:: yaml - - "" - - -prebuiltAlgorithms ([]config.PrebuiltAlgorithmConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - name: xgboost - regionalConfigs: - - region: us-east-1 - versionConfigs: - - image: 683313688378.dkr.ecr.us-east-1.amazonaws.com/sagemaker-xgboost:0.90-2-cpu-py3 - version: "0.90" - - config.K8sPluginConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -2326,6 +2352,16 @@ create-container-error-grace-period (`config.Duration`_) 3m0s +create-container-config-error-grace-period (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + 0s + + image-pull-backoff-grace-period (`config.Duration`_) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -2336,6 +2372,16 @@ image-pull-backoff-grace-period (`config.Duration`_) 3m0s +pod-pending-timeout (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + 0s + + gpu-device-node-label (string) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -2895,6 +2941,8 @@ Config for log links for k8s array jobs. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: true kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName @@ -3012,6 +3060,8 @@ Defines the log config for k8s logs. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: true kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName @@ -3159,7 +3209,31 @@ Template Uri to use when building stackdriver log links "" -templates ([]logs.TemplateLogPluginConfig) +flyin-enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Enable Log-links to flyin logs + +**Default Value**: + +.. code-block:: yaml + + "false" + + +flyin-template-uri (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Template Uri to use when building flyin log links + +**Default Value**: + +.. code-block:: yaml + + "" + + +templates ([]tasklog.TemplateLogPlugin) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" **Default Value**: @@ -3332,7 +3406,31 @@ Template Uri to use when building stackdriver log links "" -templates ([]logs.TemplateLogPluginConfig) +flyin-enabled (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Enable Log-links to flyin logs + +**Default Value**: + +.. code-block:: yaml + + "false" + + +flyin-template-uri (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Template Uri to use when building flyin log links + +**Default Value**: + +.. code-block:: yaml + + "" + + +templates ([]tasklog.TemplateLogPlugin) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" **Default Value**: @@ -3433,6 +3531,8 @@ logs (`logs.LogConfig`_) cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -3443,6 +3543,28 @@ logs (`logs.LogConfig`_) templates: null +logsSidecar (v1.Container) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + null + + +dashboardURLTemplate (tasklog.TemplateLogPlugin) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Template for URL of Ray dashboard running on a head node. + +**Default Value**: + +.. code-block:: yaml + + null + + defaults (`ray.DefaultConfig`_) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -3646,6 +3768,8 @@ Config for log links for spark applications. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -3659,6 +3783,8 @@ Config for log links for spark applications. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: true kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName @@ -3673,6 +3799,8 @@ Config for log links for spark applications. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -3686,6 +3814,8 @@ Config for log links for spark applications. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -3712,6 +3842,8 @@ Defines the log config that's not split into user/system. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: true kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName @@ -3736,6 +3868,8 @@ Defines the log config for user logs. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -3759,6 +3893,8 @@ Defines the log config for system logs. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -3782,6 +3918,8 @@ All user logs across driver and executors. cloudwatch-log-group: "" cloudwatch-region: "" cloudwatch-template-uri: "" + flyin-enabled: false + flyin-template-uri: "" gcp-project: "" kubernetes-enabled: false kubernetes-template-uri: "" @@ -3792,6 +3930,21 @@ All user logs across driver and executors. templates: null +testing.Config +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +sleep-duration (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Indicates the amount of time before transitioning to success + +**Default Value**: + +.. code-block:: yaml + + 0s + + Section: propeller ======================================================================================================================== @@ -4084,6 +4237,7 @@ config for a workflow node node-execution-deadline: 0s workflow-active-deadline: 0s default-max-attempts: 1 + enable-cr-debug-metadata: false ignore-retry-cause: false interruptible-failure-threshold: -1 max-node-retries-system-failures: 3 @@ -4222,6 +4376,18 @@ ArrayNode eventing version. 0 => legacy (drop-in replacement for maptask), 1 => "0" +node-execution-worker-count (int) +------------------------------------------------------------------------------------------------------------------------ + +Number of workers to evaluate node executions, currently only used for array nodes + +**Default Value**: + +.. code-block:: yaml + + "8" + + admin-launcher (`launchplan.AdminConfig`_) ------------------------------------------------------------------------------------------------------------------------ @@ -4716,6 +4882,18 @@ Ignore retry cause and count all attempts toward a node's max attempts "false" +enable-cr-debug-metadata (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Collapse node on any terminal state, not just successful terminations. This is useful to reduce the size of workflow state in etcd. + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.DefaultDeadlines ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -5466,6 +5644,16 @@ requests (v1.ResourceList) memory: 500Mi +claims ([]v1.ResourceClaim) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + null + + config.GCPSecretManagerConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/docs/deployment/configuration/generated/scheduler_config.rst b/docs/deployment/configuration/generated/scheduler_config.rst index c2ed67edd7..c96746dd62 100644 --- a/docs/deployment/configuration/generated/scheduler_config.rst +++ b/docs/deployment/configuration/generated/scheduler_config.rst @@ -30,6 +30,8 @@ Flyte Scheduler Configuration - `notifications <#section-notifications>`_ +- `otel <#section-otel>`_ + - `plugins <#section-plugins>`_ - `propeller <#section-propeller>`_ @@ -1408,6 +1410,16 @@ reconnectDelaySeconds (int) "0" +cloudEventVersion (uint8) +------------------------------------------------------------------------------------------------------------------------ + +**Default Value**: + +.. code-block:: yaml + + v1 + + interfaces.AWSConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1721,11 +1733,11 @@ postgres (`database.PostgresConfig`_) dbname: postgres debug: false - host: postgres + host: localhost options: sslmode=disable - password: "" + password: postgres passwordPath: "" - port: 5432 + port: 30001 username: postgres @@ -1751,7 +1763,7 @@ The host name of the database server .. code-block:: yaml - postgres + localhost port (int) @@ -1763,7 +1775,7 @@ The port name of the database server .. code-block:: yaml - "5432" + "30001" dbname (string) @@ -1799,7 +1811,7 @@ The database password. .. code-block:: yaml - "" + postgres passwordPath (string) @@ -2132,6 +2144,33 @@ envs (map[string]string) null +featureGates (`interfaces.FeatureGates`_) +------------------------------------------------------------------------------------------------------------------------ + +Enable experimental features. + +**Default Value**: + +.. code-block:: yaml + + enableArtifacts: false + + +interfaces.FeatureGates +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +enableArtifacts (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Enable artifacts feature. + +**Default Value**: + +.. code-block:: yaml + + "false" + + Section: logger ======================================================================================================================== @@ -2445,6 +2484,75 @@ topicName (string) "" +Section: otel +======================================================================================================================== + +type (string) +------------------------------------------------------------------------------------------------------------------------ + +Sets the type of exporter to configure [noop/file/jaeger]. + +**Default Value**: + +.. code-block:: yaml + + noop + + +file (`otelutils.FileConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a file + +**Default Value**: + +.. code-block:: yaml + + filename: /tmp/trace.txt + + +jaeger (`otelutils.JaegerConfig`_) +------------------------------------------------------------------------------------------------------------------------ + +Configuration for exporting telemetry traces to a jaeger + +**Default Value**: + +.. code-block:: yaml + + endpoint: http://localhost:14268/api/traces + + +otelutils.FileConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +filename (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Filename to store exported telemetry traces + +**Default Value**: + +.. code-block:: yaml + + /tmp/trace.txt + + +otelutils.JaegerConfig +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +endpoint (string) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Endpoint for the jaeger telemtry trace ingestor + +**Default Value**: + +.. code-block:: yaml + + http://localhost:14268/api/traces + + Section: plugins ======================================================================================================================== @@ -2483,6 +2591,7 @@ k8s (`config.K8sPluginConfig`_) output-vol-name: flyte-outputs start-timeout: 1m40s storage: "" + create-container-config-error-grace-period: 0s create-container-error-grace-period: 3m0s default-annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "false" @@ -2511,6 +2620,7 @@ k8s (`config.K8sPluginConfig`_) interruptible-node-selector-requirement: null interruptible-tolerations: null non-interruptible-node-selector-requirement: null + pod-pending-timeout: 0s resource-tolerations: null scheduler-name: "" send-object-events: false @@ -2800,6 +2910,16 @@ create-container-error-grace-period (`config.Duration`_) 3m0s +create-container-config-error-grace-period (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + 0s + + image-pull-backoff-grace-period (`config.Duration`_) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -2810,6 +2930,16 @@ image-pull-backoff-grace-period (`config.Duration`_) 3m0s +pod-pending-timeout (`config.Duration`_) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +**Default Value**: + +.. code-block:: yaml + + 0s + + gpu-device-node-label (string) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" @@ -3428,6 +3558,7 @@ config for a workflow node node-execution-deadline: 0s workflow-active-deadline: 0s default-max-attempts: 1 + enable-cr-debug-metadata: false ignore-retry-cause: false interruptible-failure-threshold: -1 max-node-retries-system-failures: 3 @@ -3566,6 +3697,18 @@ ArrayNode eventing version. 0 => legacy (drop-in replacement for maptask), 1 => "0" +node-execution-worker-count (int) +------------------------------------------------------------------------------------------------------------------------ + +Number of workers to evaluate node executions, currently only used for array nodes + +**Default Value**: + +.. code-block:: yaml + + "8" + + config.CompositeQueueConfig ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -3914,6 +4057,18 @@ Ignore retry cause and count all attempts toward a node's max attempts "false" +enable-cr-debug-metadata (bool) +"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +Collapse node on any terminal state, not just successful terminations. This is useful to reduce the size of workflow state in etcd. + +**Default Value**: + +.. code-block:: yaml + + "false" + + config.DefaultDeadlines ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -4522,7 +4677,7 @@ grpc (`config.GrpcConfig`_) .. code-block:: yaml - enableGrpcHistograms: false + enableGrpcLatencyMetrics: false maxMessageSizeBytes: 0 port: 8089 serverReflection: true @@ -4723,10 +4878,10 @@ The max size in bytes for incoming gRPC messages "0" -enableGrpcHistograms (bool) +enableGrpcLatencyMetrics (bool) """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" -Enable grpc histograms +Enable grpc latency metrics. Note Histograms metrics can be expensive on Prometheus servers. **Default Value**: diff --git a/docs/deployment/configuration/performance.rst b/docs/deployment/configuration/performance.rst index ee65cdaa20..f7bb1d2d2d 100644 --- a/docs/deployment/configuration/performance.rst +++ b/docs/deployment/configuration/performance.rst @@ -189,7 +189,7 @@ Manual scale-out ---------------- FlytePropeller can be run manually per namespace. This is not a recommended solution as it is harder to deploy, but if your organization can deploy and maintain multiple copies of FlytePropeller, you can use this. -Automatic scale-out +Sharded scale-out ------------------- FlytePropeller Manager is a new component introduced as part of `this RFC `_ to facilitate horizontal scaling of FlytePropeller through sharding. Effectively, the Manager is responsible for maintaining liveness and proper configuration over a collection of FlytePropeller instances. This scheme uses k8s label selectors to deterministically assign FlyteWorkflow CRD responsibilities to FlytePropeller instances, effectively distributing processing load over the shards. @@ -197,7 +197,7 @@ Deployment of FlytePropeller Manager requires k8s configuration updates includin Flyte provides a variety of Shard Strategies to configure how FlyteWorkflows are sharded among managed FlytePropeller instances. These include hash, which uses consistent hashing to load-balance evaluation over shards, and project / domain, which map the respective IDs to specific managed FlytePropeller instances. Below we include examples of helm configurations for each of the existing Shard Strategies. -The Hash Shard Strategy, denoted by "type: hash" in the configuration below, uses consistent hashing to evenly distribute FlyteWorkflows over managed FlytePropeller instances. This configuration requires a "shard-count" variable which defines the number of managed FlytePropeller instances. +The Hash Shard Strategy, denoted by "type: Hash" in the configuration below, uses consistent hashing to evenly distribute FlyteWorkflows over managed FlytePropeller instances. This configuration requires a "shard-count" variable which defines the number of managed FlytePropeller instances. You may change the shard count without impacting existing workflows. Note that changing the shard-count is a manual step, it is not auto-scaling. .. code-block:: yaml @@ -208,7 +208,7 @@ The Hash Shard Strategy, denoted by "type: hash" in the configuration below, use # pod and scanning configuration redacted # ... shard: - type: hash # use the "hash" shard strategy + type: Hash # use the "hash" shard strategy shard-count: 4 # the total number of shards The Project and Domain Shard Strategies, denoted by "type: project" and "type: domain" respectively, use the FlyteWorkflow project and domain metadata to shard FlyteWorkflows. These Shard Strategies are configured using a "per-shard-mapping" option, which is a list of ID lists. Each element in the "per-shard-mapping" list defines a new shard and the ID list assigns responsibility for the specified IDs to that shard. A shard configured as a single wildcard ID (i.e. "*") is responsible for all IDs that are not covered by other shards. Only a single shard may be configured with a wildcard ID and on that shard their must be only one ID, namely the wildcard. diff --git a/docs/deployment/plugins/k8s/index.rst b/docs/deployment/plugins/k8s/index.rst index 53b6ebd543..2199f099e8 100644 --- a/docs/deployment/plugins/k8s/index.rst +++ b/docs/deployment/plugins/k8s/index.rst @@ -267,6 +267,7 @@ Spin up a cluster `__, please ensure: * You have the correct kubeconfig and have selected the correct Kubernetes context. + * You have configured the correct flytectl settings in ``~/.flyte/config.yaml``. .. note:: @@ -277,6 +278,88 @@ Spin up a cluster helm repo add flyteorg https://flyteorg.github.io/flyte + .. tabs:: + + If you have installed Flyte using the `flyte-sandbox Helm chart `__, please ensure: + + * You have the correct kubeconfig and have selected the correct Kubernetes context. + + * You have configured the correct flytectl settings in ``~/.flyte/config.yaml``. + + * You have the correct kubeconfig and have selected the correct Kubernetes context. + * You have configured the correct flytectl settings in ``~/.flyte/config.yaml``. + + .. tabs:: + + .. group-tab:: Helm chart + + .. tabs:: + + .. group-tab:: Spark + + create the following four files and apply them using ``kubectl apply -f ``: + + 1. ``serviceaccount.yaml`` + + .. code-block:: yaml + + apiVersion: v1 + kind: ServiceAccount + metadata: + name: default + namespace: "{{ namespace }}" + annotations: + eks.amazonaws.com/role-arn: "{{ defaultIamRole }}" + + 2. ``spark_role.yaml`` + + .. code-block:: yaml + + apiVersion: rbac.authorization.k8s.io/v1 + kind: Role + metadata: + name: spark-role + namespace: "{{ namespace }}" + rules: + - apiGroups: + - "" + resources: + - pods + - services + - configmaps + verbs: + - "*" + + 3. ``spark_service_account.yaml`` + + .. code-block:: yaml + + apiVersion: v1 + kind: ServiceAccount + metadata: + name: spark + namespace: "{{ namespace }}" + annotations: + eks.amazonaws.com/role-arn: "{{ defaultIamRole }}" + + 4. ``spark_role_binding.yaml`` + + .. code-block:: yaml + + apiVersion: rbac.authorization.k8s.io/v1 + kind: RoleBinding + metadata: + name: spark-role-binding + namespace: "{{ namespace }}" + roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: spark-role + subjects: + - kind: ServiceAccount + name: spark + namespace: "{{ namespace }}" + Install the Kubernetes operator ------------------------------- @@ -751,7 +834,123 @@ Specify plugin configuration sidecar: sidecar container_array: k8s-array spark: spark - + + .. group-tab:: Flyte sandbox + + Create a file named ``values-override.yaml`` and add the following config to it: + + .. note:: + + Within the flyte-binary block, the value of inline.storage.signedURL.stowConfigOverride.endpoint should be set to the corresponding node Hostname/IP on the MinIO pod if you are deploying on a Kubernetes cluster. + + .. code-block:: yaml + + flyte-binary: + nameOverride: flyte-sandbox + enabled: true + configuration: + database: + host: '{{ printf "%s-postgresql" .Release.Name | trunc 63 | trimSuffix "-" }}' + password: postgres + storage: + metadataContainer: my-s3-bucket + userDataContainer: my-s3-bucket + provider: s3 + providerConfig: + s3: + disableSSL: true + v2Signing: true + endpoint: http://{{ printf "%s-minio" .Release.Name | trunc 63 | trimSuffix "-" }}.{{ .Release.Namespace }}:9000 + authType: accesskey + accessKey: minio + secretKey: miniostorage + logging: + level: 5 + plugins: + kubernetes: + enabled: true + templateUri: |- + http://localhost:30080/kubernetes-dashboard/#/log/{{.namespace }}/{{ .podName }}/pod?namespace={{ .namespace }} + inline: + task_resources: + defaults: + cpu: 500m + ephemeralStorage: 0 + gpu: 0 + memory: 1Gi + limits: + cpu: 0 + ephemeralStorage: 0 + gpu: 0 + memory: 0 + storage: + signedURL: + stowConfigOverride: + endpoint: http://localhost:30002 + plugins: + k8s: + default-env-vars: + - FLYTE_AWS_ENDPOINT: http://{{ printf "%s-minio" .Release.Name | trunc 63 | trimSuffix "-" }}.{{ .Release.Namespace }}:9000 + - FLYTE_AWS_ACCESS_KEY_ID: minio + - FLYTE_AWS_SECRET_ACCESS_KEY: miniostorage + spark: + spark-config-default: + - spark.driver.cores: "1" + - spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider" + - spark.hadoop.fs.s3a.endpoint: http://{{ printf "%s-minio" .Release.Name | trunc 63 | trimSuffix "-" }}.{{ .Release.Namespace }}:9000 + - spark.hadoop.fs.s3a.access.key: "minio" + - spark.hadoop.fs.s3a.secret.key: "miniostorage" + - spark.hadoop.fs.s3a.path.style.access: "true" + - spark.kubernetes.allocation.batch.size: "50" + - spark.hadoop.fs.s3a.acl.default: "BucketOwnerFullControl" + - spark.hadoop.fs.s3n.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" + - spark.hadoop.fs.AbstractFileSystem.s3n.impl: "org.apache.hadoop.fs.s3a.S3A" + - spark.hadoop.fs.s3.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" + - spark.hadoop.fs.AbstractFileSystem.s3.impl: "org.apache.hadoop.fs.s3a.S3A" + - spark.hadoop.fs.s3a.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" + - spark.hadoop.fs.AbstractFileSystem.s3a.impl: "org.apache.hadoop.fs.s3a.S3A" + inlineConfigMap: '{{ include "flyte-sandbox.configuration.inlineConfigMap" . }}' + clusterResourceTemplates: + inlineConfigMap: '{{ include "flyte-sandbox.clusterResourceTemplates.inlineConfigMap" . }}' + deployment: + image: + repository: flyte-binary + tag: sandbox + pullPolicy: Never + waitForDB: + image: + repository: bitnami/postgresql + tag: sandbox + pullPolicy: Never + rbac: + # This is strictly NOT RECOMMENDED in production clusters, and is only for use + # within local Flyte sandboxes. + # When using cluster resource templates to create additional namespaced roles, + # Flyte is required to have a superset of those permissions. To simplify + # experimenting with new backend plugins that require additional roles be created + # with cluster resource templates (e.g. Spark), we add the following: + extraRules: + - apiGroups: + - '*' + resources: + - '*' + verbs: + - '*' + enabled_plugins: + tasks: + task-plugins: + enabled-plugins: + - container + - sidecar + - k8s-array + - agent-service + - spark + default-for-task-types: + container: container + sidecar: sidecar + container_array: k8s-array + spark: spark + .. group-tab:: Dask .. tabs:: @@ -817,7 +1016,7 @@ Upgrade the deployment ```` with the name of your namespace (e.g., ``flyte``), and ```` with the name of your YAML file. - .. group-tab:: Flyte core + .. group-tab:: Flyte core / sandbox .. code-block:: bash @@ -830,4 +1029,4 @@ Wait for the upgrade to complete. You can check the status of the deployment pod .. code-block:: bash - kubectl get pods -n --all-namespaces + kubectl get pods -n flyte diff --git a/docs/deployment/plugins/webapi/databricks.rst b/docs/deployment/plugins/webapi/databricks.rst index 671fdb4e18..69f710677c 100644 --- a/docs/deployment/plugins/webapi/databricks.rst +++ b/docs/deployment/plugins/webapi/databricks.rst @@ -288,7 +288,7 @@ Add the Databricks access token to FlytePropeller: .. group-tab:: Helm chart - Create an external secret as follows: + Create a secret as follows (or add to it if it already exists from other plugins): .. code-block:: bash @@ -296,7 +296,7 @@ Add the Databricks access token to FlytePropeller: apiVersion: v1 kind: Secret metadata: - name: flyte-binary-client-secrets-external-secret + name: flyte-binary-external-services namespace: flyte type: Opaque stringData: @@ -304,16 +304,15 @@ Add the Databricks access token to FlytePropeller: EOF Reference the newly created secret in - ``.Values.configuration.auth.clientSecretsExternalSecretRef`` + ``.Values.configuration.inlineSecretRef`` in your YAML file as follows: .. code-block:: yaml - :emphasize-lines: 3 + :emphasize-lines: 2 configuration: - auth: - clientSecretsExternalSecretRef: flyte-binary-client-secrets-external-secret - + inlineSecretRef: flyte-binary-external-services + Replace ```` with your access token. .. group-tab:: Flyte core diff --git a/docs/deployment/plugins/webapi/snowflake.rst b/docs/deployment/plugins/webapi/snowflake.rst index 85f13fe115..80ef2305d0 100644 --- a/docs/deployment/plugins/webapi/snowflake.rst +++ b/docs/deployment/plugins/webapi/snowflake.rst @@ -154,7 +154,7 @@ Then, add the Snowflake JWT token to FlytePropeller. .. group-tab:: Helm chart - Create an external secret as follows: + Create a secret as follows (or add to it if it already exists from other plugins): .. code-block:: bash @@ -162,7 +162,7 @@ Then, add the Snowflake JWT token to FlytePropeller. apiVersion: v1 kind: Secret metadata: - name: flyte-binary-client-secrets-external-secret + name: flyte-binary-external-services namespace: flyte type: Opaque stringData: @@ -170,16 +170,15 @@ Then, add the Snowflake JWT token to FlytePropeller. EOF Reference the newly created secret in - ``.Values.configuration.auth.clientSecretsExternalSecretRef`` + ``.Values.configuration.inlineSecretRef`` in your YAML file as follows: .. code-block:: yaml - :emphasize-lines: 3 + :emphasize-lines: 2 configuration: - auth: - clientSecretsExternalSecretRef: flyte-binary-client-secrets-external-secret - + inlineSecretRef: flyte-binary-external-services + Replace ```` with your JWT token. .. group-tab:: Flyte core @@ -240,4 +239,6 @@ Wait for the upgrade to complete. You can check the status of the deployment pod .. code-block:: - kubectl get pods -n flyte \ No newline at end of file + kubectl get pods -n flyte + + For Snowflake plugin on the Flyte cluster, please refer to `Snowflake Plugin Example `_ diff --git a/docs/flytectl_overview.rst b/docs/flytectl_overview.rst deleted file mode 100644 index a6d104b08f..0000000000 --- a/docs/flytectl_overview.rst +++ /dev/null @@ -1,107 +0,0 @@ -.. flytectl doc - -#################################################### -Flytectl: The Official Flyte Command-line Interface -#################################################### - -Overview -========= -This video will take you on a tour of Flytectl - how to install and configure it, as well as how to use the Verbs and Nouns sections on the left hand side menu. Detailed information can be found in the sections below the video. - -.. youtube:: cV8ezYnBANE - - -Installation -============ - -Flytectl is a Golang binary that can be installed on any platform supported by Golang. - -.. tabbed:: OSX - - .. prompt:: bash $ - - brew install flyteorg/homebrew-tap/flytectl - - *Upgrade* existing installation using the following command: - - .. prompt:: bash $ - - flytectl upgrade - -.. tabbed:: Other Operating systems - - .. prompt:: bash $ - - curl -sL https://ctl.flyte.org/install | bash - - *Upgrade* existing installation using the following command: - - .. prompt:: bash $ - - flytectl upgrade - -**Test** if Flytectl is installed correctly (your Flytectl version should be > 0.2.0) using the following command: - -.. prompt:: bash $ - - flytectl version - -Configuration -============= - -Flytectl allows you to communicate with FlyteAdmin using a YAML file or by passing every configuration value -on the command-line. The following configuration can be used for the setup: - -Basic Configuration --------------------- - -The full list of available configurable options can be found by running ``flytectl --help``, or `here `__. - -.. NOTE:: - - Currently, the Project ``-p``, Domain ``-d``, and Output ``-o`` flags cannot be used in the config file. - -.. tabbed:: Local Flyte Sandbox - - Automatically configured for you by ``flytectl sandbox`` command. - - .. code-block:: yaml - - admin: - # For GRPC endpoints you might want to use dns:///flyte.myexample.com - endpoint: dns:///localhost:30081 - insecure: true # Set to false to enable TLS/SSL connection (not recommended except on local sandbox deployment). - authType: Pkce # authType: Pkce # if using authentication or just drop this. - -.. tabbed:: AWS Configuration - - .. code-block:: yaml - - admin: - # For GRPC endpoints you might want to use dns:///flyte.myexample.com - endpoint: dns:/// - authType: Pkce # authType: Pkce # if using authentication or just drop this. - insecure: true # insecure: True # Set to true if the endpoint isn't accessible through TLS/SSL connection (not recommended except on local sandbox deployment) - -.. tabbed:: GCS Configuration - - .. code-block:: yaml - - admin: - # For GRPC endpoints you might want to use dns:///flyte.myexample.com - endpoint: dns:/// - authType: Pkce # authType: Pkce # if using authentication or just drop this. - insecure: false # insecure: True # Set to true if the endpoint isn't accessible through TLS/SSL connection (not recommended except on local sandbox deployment) - -.. tabbed:: Others - - For other supported storage backends like Oracle, Azure, etc., refer to the configuration structure `here `__. - - Place the config file in ``$HOME/.flyte`` directory with the name config.yaml. - This file is typically searched in: - - * ``$HOME/.flyte`` - * currDir from where you run flytectl - * ``/etc/flyte/config`` - - You can also pass the file name in the command line using ``--config ``. diff --git a/docs/index.md b/docs/index.md index c2bf0f9087..797b323842 100644 --- a/docs/index.md +++ b/docs/index.md @@ -57,7 +57,7 @@ learning and analytics. Created at [Lyft](https://www.lyft.com/) in collaboration with Spotify, Freenome, and many others, Flyte provides first-class support for -{doc}`Python `, +{doc}`Python `, [Java, and Scala](https://github.com/flyteorg/flytekit-java). Data Scientists and ML Engineers in the industry use Flyte to create: @@ -102,11 +102,11 @@ Below are the API reference to the different components of Flyte: :header-rows: 0 :widths: 20 30 -* - {doc}`Flytekit ` +* - {doc}`Flytekit ` - Flyte's official Python SDK. -* - {doc}`FlyteCTL ` +* - {doc}`FlyteCTL ` - Flyte's command-line interface for interacting with a Flyte cluster. -* - {doc}`FlyteIDL ` +* - {doc}`FlyteIDL ` - Flyte's core specification language. ``` @@ -135,6 +135,8 @@ Have questions or need support? The best way to reach us is through Slack: :hidden: Introduction +Quickstart guide +Getting started with workflow development Flyte Fundamentals Core Use Cases ``` @@ -172,8 +174,8 @@ reference/swagger :name: apitoc :hidden: -flytekit -flytectl +flytekit +flytectl flyteidl ``` diff --git a/docs/introduction.md b/docs/introduction.md index b902f32786..8e61c433a4 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -4,375 +4,35 @@ jupytext: text_representation: extension: .md format_name: myst -kernelspec: - display_name: Python 3 - language: python - name: python3 # override the toc-determined page navigation order -next-page: getting_started/flyte_fundamentals -next-page-title: Flyte Fundamentals +next-page: getting_started/quickstart_guide +next-page-title: Quickstart guide --- (getting_started_index)= -# Getting Started +# Introduction to Flyte -## Introduction to Flyte +Flyte is a workflow orchestrator that unifies machine learning, data engineering, and data analytics stacks for building robust and reliable applications. Flyte features: +* Reproducible, repeatable workflows +* Strongly typed interfaces +* Structured datasets to enable easy conversion of dataframes between types, and column-level type checking +* Easy movement of data between local and cloud storage +* Easy tracking of data lineages +* Built-in data and artifact visualization -Flyte is a workflow orchestrator that seamlessly unifies data, -machine learning, and analytics stacks for building robust and reliable -applications. +For a full list of feature, see the [Flyte features page](https://flyte.org/features). -This introduction provides a quick overview of how to get Flyte up and running -on your local machine. +## Basic Flyte components -````{dropdown} Want to try Flyte on the browser? -:title: text-muted -:animate: fade-in-slide-down +Flyte is made up of a user plane, control plane, and data plane. +* The **user plane** contains the elements you need to develop the code that will implement your application's directed acyclic graph (DAG). These elements are FlyteKit and Flytectl. Data scientists and machine learning engineers primarily work in the user plane. +* The **control plane** is part of the Flyte backend that is configured by platform engineers or others tasked with setting up computing infrastructure. It consists of FlyteConsole and FlyteAdmin, which serves as the main Flyte API to process requests from clients in the user plane. The control plane sends workflow execution requests to the data plane for execution, and stores information such as current and past running workflows, and provides that information upon request. +* The **data plane** is another part of the Flyte backend that contains FlytePropeller, the core engine of Flyte that executes workflows. FlytePropeller is designed as a [Kubernetes Controller](https://kubernetes.io/docs/concepts/architecture/controller/). The data plane sends status events back to the control plane so that information can be stored and surfaced to end users. -The introduction below is also available on a hosted sandbox environment, where -you can get started with Flyte without installing anything locally. +## Next steps -```{link-button} https://sandbox.union.ai/ ---- -classes: try-hosted-flyte btn-warning btn-block -text: Try Hosted Flyte Sandbox ---- -``` - -```{div} text-muted -*Courtesy of [Union.ai](https://www.union.ai/)* -``` - -```` - -(getting_started_installation)= - -## Installation - -```{admonition} Prerequisites -:class: important - -[Install Docker](https://docs.docker.com/get-docker/) and ensure that you -have the Docker daemon running. - -Flyte supports any [OCI-compatible](https://opencontainers.org/) container -technology (like [Podman](https://podman.io/), -[LXD](https://linuxcontainers.org/lxd/introduction/), and -[Containerd](https://containerd.io/)) when running tasks on a Flyte cluster, but -for the purpose of this guide, `flytectl` uses Docker to spin up a local -Kubernetes cluster so that you can interact with it on your machine. -``` - -First install [flytekit](https://pypi.org/project/flytekit/), Flyte's Python SDK and [Scikit-learn](https://scikit-learn.org/stable). - -```{prompt} bash $ -pip install flytekit flytekitplugins-deck-standard scikit-learn -``` - -Then install [flytectl](https://docs.flyte.org/projects/flytectl/en/latest/), -which the command-line interface for interacting with a Flyte backend. - -````{tabbed} Homebrew - -```{prompt} bash $ -brew install flyteorg/homebrew-tap/flytectl -``` - -```` - -````{tabbed} Curl - -```{prompt} bash $ -curl -sL https://ctl.flyte.org/install | sudo bash -s -- -b /usr/local/bin -``` - -```` - -## Creating a Workflow - -The first workflow we'll create is a simple model training workflow that consists -of three steps that will: - -1. 🍷 Get the classic [wine dataset](https://scikit-learn.org/stable/datasets/toy_dataset.html#wine-recognition-dataset) - using [sklearn](https://scikit-learn.org/stable/). -2. 📊 Process the data that simplifies the 3-class prediction problem into a - binary classification problem by consolidating class labels `1` and `2` into - a single class. -3. 🤖 Train a `LogisticRegression` model to learn a binary classifier. - -First, we'll define three tasks for each of these steps. Create a file called -`example.py` and copy the following code into it. - -```{code-cell} python -:tags: [remove-output] - -import pandas as pd -from sklearn.datasets import load_wine -from sklearn.linear_model import LogisticRegression - -import flytekit.extras.sklearn -from flytekit import task, workflow - - -@task -def get_data() -> pd.DataFrame: - """Get the wine dataset.""" - return load_wine(as_frame=True).frame - -@task -def process_data(data: pd.DataFrame) -> pd.DataFrame: - """Simplify the task from a 3-class to a binary classification problem.""" - return data.assign(target=lambda x: x["target"].where(x["target"] == 0, 1)) - -@task -def train_model(data: pd.DataFrame, hyperparameters: dict) -> LogisticRegression: - """Train a model on the wine dataset.""" - features = data.drop("target", axis="columns") - target = data["target"] - return LogisticRegression(max_iter=3000, **hyperparameters).fit(features, target) -``` - -As we can see in the code snippet above, we defined three tasks as Python -functions: `get_data`, `process_data`, and `train_model`. - -In Flyte, **tasks** are the most basic unit of compute and serve as the building -blocks 🧱 for more complex applications. A task is a function that takes some -inputs and produces an output. We can use these tasks to define a simple model -training workflow: - -```{code-cell} python -@workflow -def training_workflow(hyperparameters: dict) -> LogisticRegression: - """Put all of the steps together into a single workflow.""" - data = get_data() - processed_data = process_data(data=data) - return train_model( - data=processed_data, - hyperparameters=hyperparameters, - ) -``` - -```{note} -A task can also be an isolated piece of compute that takes no inputs and -produces no output, but for the most part to do something useful a task -is typically written with inputs and outputs. -``` - -A **workflow** is also defined as a Python function, and it specifies the flow -of data between tasks and, more generally, the dependencies between tasks 🔀. - -::::{dropdown} {fa}`info-circle` The code above looks like Python, but what do `@task` and `@workflow` do exactly? -:title: text-muted -:animate: fade-in-slide-down - -Flyte `@task` and `@workflow` decorators are designed to work seamlessly with -your code-base, provided that the *decorated function is at the top-level scope -of the module*. - -This means that you can invoke tasks and workflows as regular Python methods and -even import and use them in other Python modules or scripts. - -:::{note} -A {func}`task ` is a pure Python function, while a {func}`workflow ` -is actually a [DSL](https://en.wikipedia.org/wiki/Domain-specific_language) that -only supports a subset of Python's semantics. Learn more in the -{ref}`Flyte Fundamentals ` section. -::: - -:::: - -(intro_running_flyte_workflows)= - -## Running Flyte Workflows in Python - -You can run the workflow in ``example.py`` on a local Python by using `pyflyte`, -the CLI that ships with `flytekit`. - -```{prompt} bash $ -pyflyte run example.py training_workflow \ - --hyperparameters '{"C": 0.1}' -``` - -:::::{dropdown} {fa}`info-circle` Running into shell issues? -:title: text-muted -:animate: fade-in-slide-down - -If you're using Bash, you can ignore this 🙂 -You may need to add .local/bin to your PATH variable if it's not already set, -as that's not automatically added for non-bourne shells like fish or xzsh. - -To use pyflyte, make sure to set the /.local/bin directory in PATH - -:::{code-block} fish -set -gx PATH $PATH ~/.local/bin -::: -::::: - - - -:::::{dropdown} {fa}`info-circle` Why use `pyflyte run` rather than `python example.py`? -:title: text-muted -:animate: fade-in-slide-down - -`pyflyte run` enables you to execute a specific workflow using the syntax -`pyflyte run `. - -Keyword arguments can be supplied to ``pyflyte run`` by passing in options in -the format ``--kwarg value``, and in the case of ``snake_case_arg`` argument -names, you can pass in options in the form of ``--snake-case-arg value``. - -::::{note} -If you want to run a workflow with `python example.py`, you would have to write -a `main` module conditional at the end of the script to actually run the -workflow: - -:::{code-block} python -if __name__ == "__main__": - training_workflow(hyperparameters={"C": 0.1}) -::: - -This becomes even more verbose if you want to pass in arguments: - -:::{code-block} python -if __name__ == "__main__": - import json - from argparse import ArgumentParser - - parser = ArgumentParser() - parser.add_argument("--hyperparameters", type=json.loads) - ... # add the other options - - args = parser.parse_args() - training_workflow(hyperparameters=args.hyperparameters) -::: - -:::: - -::::: - -(getting_started_flyte_cluster)= - -## Running Workflows in a Flyte Cluster - -You can also use `pyflyte run` to execute workflows on a Flyte cluster. -To do so, first spin up a local demo cluster. `flytectl` uses Docker to create -a local Kubernetes cluster and minimal Flyte backend that you can use to run -the example above: - -```{important} -Before you start the local cluster, make sure that you allocate a minimum of -`4 CPUs` and `3 GB` of memory in your Docker daemon. If you're using the -[Docker Desktop](https://www.docker.com/products/docker-desktop/), you can -do this easily by going to: - -`Settings > Resources > Advanced` - -Then set the **CPUs** and **Memory** sliders to the appropriate levels. -``` - -```{prompt} bash $ -flytectl demo start -``` - -````{div} shadow p-3 mb-8 rounded -**Expected Output:** - -```{code-block} -👨‍💻 Flyte is ready! Flyte UI is available at http://localhost:30080/console 🚀 🚀 🎉 -❇️ Run the following command to export sandbox environment variables for accessing flytectl - export FLYTECTL_CONFIG=~/.flyte/config-sandbox.yaml -🐋 Flyte sandbox ships with a Docker registry. Tag and push custom workflow images to localhost:30000 -📂 The Minio API is hosted on localhost:30002. Use http://localhost:30080/minio/login for Minio console -``` - -```{important} -Make sure to export the `FLYTECTL_CONFIG=~/.flyte/config-sandbox.yaml` environment -variable in your shell. -``` -```` - -Then, run the workflow on the Flyte cluster with `pyflyte run` using the -`--remote` flag: - -```{prompt} bash $ -pyflyte run --remote example.py training_workflow \ - --hyperparameters '{"C": 0.1}' -``` - -````{div} shadow p-3 mb-8 rounded - -**Expected Output:** A URL to the workflow execution on your demo Flyte cluster: - -```{code-block} -Go to http://localhost:30080/console/projects/flytesnacks/domains/development/executions/ to see execution in the console. -``` - -Where ```` is a unique identifier for the workflow execution. - -```` - - -## Inspect the Results - -Navigate to the URL produced by `pyflyte run`. This will take you to -FlyteConsole, the web UI used to manage Flyte entities such as tasks, -workflows, and executions. - -![getting started console](https://github.com/flyteorg/static-resources/raw/main/flytesnacks/getting_started/getting_started_console.gif) - - -```{note} -There are a few features about FlyteConsole worth pointing out in the GIF above: - -- The default execution view shows the list of tasks executing in sequential order. -- The right-hand panel shows metadata about the task execution, including logs, inputs, outputs, and task metadata. -- The **Graph** view shows the execution graph of the workflow, providing visual information about the topology - of the graph and the state of each node as the workflow progresses. -- On completion, you can inspect the outputs of each task, and ultimately, the overarching workflow. -``` - -## Summary - -🎉 **Congratulations! In this introductory guide, you:** - -1. 📜 Created a Flyte script, which trains a binary classification model. -2. 🚀 Spun up a demo Flyte cluster on your local system. -3. 👟 Ran a workflow locally and on a demo Flyte cluster. - - -## What's Next? - -Follow the rest of the sections in the documentation to get a better -understanding of the key constructs that make Flyte such a powerful -orchestration tool 💪. - -```{admonition} Recommendation -:class: tip - -If you're new to Flyte we recommend that you go through the -{ref}`Flyte Fundamentals ` and -{ref}`Core Use Cases ` section before diving -into the other sections of the documentation. -``` - -```{list-table} -:header-rows: 0 -:widths: 10 30 - -* - {ref}`🔤 Flyte Fundamentals ` - - A brief tour of the Flyte's main concepts and development lifecycle -* - {ref}`🌟 Core Use Cases ` - - An overview of core uses cases for data, machine learning, and analytics - practitioners. -* - {ref}`📖 User Guide ` - - A comprehensive view of Flyte's functionality for data scientists, - ML engineers, data engineers, and data analysts. -* - {ref}`📚 Tutorials ` - - End-to-end examples of Flyte for data/feature engineering, machine learning, - bioinformatics, and more. -* - {ref}`🚀 Deployment Guide ` - - Guides for platform engineers to deploy a Flyte cluster on your own - infrastructure. -``` +* To quickly try out Flyte on your machine, follow the {ref}`Quickstart guide `. +* To create a Flyte project that can be used to package workflow code for deployment to a Flyte cluster, see {ref}`"Getting started with workflow development" `. +* To set up a Flyte cluster, see the [Deployment documentation](https://docs.flyte.org/en/latest/deployment/index.html). diff --git a/docs/reference_flytectl.md b/docs/reference_flytectl.md deleted file mode 100644 index ad1ba6c7a5..0000000000 --- a/docs/reference_flytectl.md +++ /dev/null @@ -1,11 +0,0 @@ -# FlyteCTL API Reference - -```{toctree} -:maxdepth: 2 - -Overview -CLI Entrypoint -flytectl/verbs -flytectl/nouns -flytectl/contribute -``` diff --git a/docs/reference_flytekit.md b/docs/reference_flytekit.md deleted file mode 100644 index 97012a9429..0000000000 --- a/docs/reference_flytekit.md +++ /dev/null @@ -1,20 +0,0 @@ -# Flytekit API Reference - -```{toctree} -:maxdepth: 2 - -api/flytekit/design/index -api/flytekit/flytekit -api/flytekit/configuration -api/flytekit/remote -api/flytekit/clients -api/flytekit/testing -api/flytekit/extend -api/flytekit/deck -api/flytekit/plugins/index -api/flytekit/tasks.extend -api/flytekit/types.extend -api/flytekit/experimental -api/flytekit/pyflyte -api/flytekit/contributing -``` diff --git a/flyte-single-binary-local.yaml b/flyte-single-binary-local.yaml index ca63458b03..c369424795 100644 --- a/flyte-single-binary-local.yaml +++ b/flyte-single-binary-local.yaml @@ -6,9 +6,6 @@ admin: # and _also_, admin to talk to artifacts endpoint: localhost:30080 insecure: true -flyteadmin: - featureGates: - enableArtifacts: true catalog-cache: endpoint: localhost:8081 diff --git a/flyteadmin/Dockerfile b/flyteadmin/Dockerfile deleted file mode 100644 index e7412dfe43..0000000000 --- a/flyteadmin/Dockerfile +++ /dev/null @@ -1,54 +0,0 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'LYFT/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst - -FROM --platform=${BUILDPLATFORM} golang:1.19-alpine3.16 as builder - -ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux - -RUN apk add git openssh-client make curl - -# Create the artifacts directory -RUN mkdir /artifacts - -# Pull GRPC health probe binary for liveness and readiness checks -RUN GRPC_HEALTH_PROBE_VERSION=v0.4.11 && \ - wget -qO/artifacts/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \ - chmod +x /artifacts/grpc_health_probe && \ - echo 'ded15e598d887ccc47bf2321371950bbf930f5e4856b9f75712ce4b2b5120480 /artifacts/grpc_health_probe' > .grpc_checksum && \ - sha256sum -c .grpc_checksum - -# COPY only the go mod files for efficient caching -COPY go.mod go.sum /go/src/github.com/flyteorg/flyteadmin/ -WORKDIR /go/src/github.com/flyteorg/flyteadmin - -# Pull dependencies -RUN go mod download - - -# COPY the rest of the source code -COPY . /go/src/github.com/flyteorg/flyteadmin/ - -# This 'linux_compile' target should compile binaries to the /artifacts directory -# The main entrypoint should be compiled to /artifacts/flyteadmin -RUN make linux_compile - -# update the PATH to include the /artifacts directory -ENV PATH="/artifacts:${PATH}" - -# This will eventually move to centurylink/ca-certs:latest for minimum possible image size -FROM alpine:3.16 -LABEL org.opencontainers.image.source https://github.com/flyteorg/flyteadmin - -COPY --from=builder /artifacts /bin - -# Ensure the latest CA certs are present to authenticate SSL connections. -RUN apk --update add ca-certificates - -RUN addgroup -S flyte && adduser -S flyte -G flyte -USER flyte - -CMD ["flyteadmin"] diff --git a/flyteadmin/auth/handler_utils.go b/flyteadmin/auth/handler_utils.go index e6fd1a7236..a6b4031ca8 100644 --- a/flyteadmin/auth/handler_utils.go +++ b/flyteadmin/auth/handler_utils.go @@ -8,6 +8,7 @@ import ( "github.com/grpc-ecosystem/go-grpc-middleware/util/metautils" "github.com/flyteorg/flyte/flyteadmin/auth/config" + "github.com/flyteorg/flyte/flytestdlib/logger" ) const ( @@ -146,3 +147,32 @@ func GetPublicURL(ctx context.Context, req *http.Request, cfg *config.Config) *u return u } + +func isAuthorizedRedirectURL(url *url.URL, authorizedURL *url.URL) bool { + return url.Hostname() == authorizedURL.Hostname() && url.Port() == authorizedURL.Port() && url.Scheme == authorizedURL.Scheme +} + +func GetRedirectURLAllowed(ctx context.Context, urlRedirectParam string, cfg *config.Config) bool { + if len(urlRedirectParam) == 0 { + logger.Debugf(ctx, "not validating whether empty redirect url is authorized") + return true + } + redirectURL, err := url.Parse(urlRedirectParam) + if err != nil { + logger.Debugf(ctx, "failed to parse user-supplied redirect url: %s with err: %v", urlRedirectParam, err) + return false + } + if redirectURL.Host == "" { + logger.Debugf(ctx, "not validating whether relative redirect url is authorized") + return true + } + logger.Debugf(ctx, "validating whether redirect url: %s is authorized", redirectURL) + for _, authorizedURI := range cfg.AuthorizedURIs { + if isAuthorizedRedirectURL(redirectURL, &authorizedURI.URL) { + logger.Debugf(ctx, "authorizing redirect url: %s against authorized uri: %s", redirectURL.String(), authorizedURI.String()) + return true + } + } + logger.Debugf(ctx, "not authorizing redirect url: %s", redirectURL.String()) + return false +} diff --git a/flyteadmin/auth/handler_utils_test.go b/flyteadmin/auth/handler_utils_test.go index 441f83dbbb..c44a57b934 100644 --- a/flyteadmin/auth/handler_utils_test.go +++ b/flyteadmin/auth/handler_utils_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/flyteorg/flyte/flyteadmin/auth/config" - config2 "github.com/flyteorg/flyte/flytestdlib/config" + flytestdconfig "github.com/flyteorg/flyte/flytestdlib/config" ) func TestGetPublicURL(t *testing.T) { @@ -16,7 +16,7 @@ func TestGetPublicURL(t *testing.T) { req, err := http.NewRequest(http.MethodPost, "https://abc", nil) assert.NoError(t, err) u := GetPublicURL(context.Background(), req, &config.Config{ - AuthorizedURIs: []config2.URL{ + AuthorizedURIs: []flytestdconfig.URL{ {URL: *config.MustParseURL("https://xyz")}, {URL: *config.MustParseURL("https://abc")}, }, @@ -28,7 +28,7 @@ func TestGetPublicURL(t *testing.T) { req, err := http.NewRequest(http.MethodPost, "https://abc", nil) assert.NoError(t, err) u := GetPublicURL(context.Background(), req, &config.Config{ - AuthorizedURIs: []config2.URL{ + AuthorizedURIs: []flytestdconfig.URL{ {URL: *config.MustParseURL("https://xyz")}, {URL: *config.MustParseURL("http://abc")}, }, @@ -40,7 +40,7 @@ func TestGetPublicURL(t *testing.T) { req, err := http.NewRequest(http.MethodPost, "https://abc", nil) assert.NoError(t, err) u := GetPublicURL(context.Background(), req, &config.Config{ - AuthorizedURIs: []config2.URL{ + AuthorizedURIs: []flytestdconfig.URL{ {URL: *config.MustParseURL("https://xyz")}, {URL: *config.MustParseURL("http://xyz")}, }, @@ -61,7 +61,7 @@ func TestGetPublicURL(t *testing.T) { assert.NoError(t, err) u := GetPublicURL(context.Background(), req, &config.Config{ - AuthorizedURIs: []config2.URL{ + AuthorizedURIs: []flytestdconfig.URL{ {URL: *config.MustParseURL("http://flyteadmin:80")}, {URL: *config.MustParseURL("http://localhost:30081")}, {URL: *config.MustParseURL("http://localhost:8089")}, @@ -72,3 +72,28 @@ func TestGetPublicURL(t *testing.T) { assert.Equal(t, "http://localhost:30081", u.String()) }) } + +func TestGetRedirectURLAllowed(t *testing.T) { + ctx := context.TODO() + t.Run("relative url", func(t *testing.T) { + assert.True(t, GetRedirectURLAllowed(ctx, "/console", &config.Config{})) + }) + t.Run("no redirect url", func(t *testing.T) { + assert.True(t, GetRedirectURLAllowed(ctx, "", &config.Config{})) + }) + cfg := &config.Config{ + AuthorizedURIs: []flytestdconfig.URL{ + {URL: *config.MustParseURL("https://example.com")}, + {URL: *config.MustParseURL("http://localhost:3008")}, + }, + } + t.Run("authorized url", func(t *testing.T) { + assert.True(t, GetRedirectURLAllowed(ctx, "https://example.com", cfg)) + }) + t.Run("authorized localhost url", func(t *testing.T) { + assert.True(t, GetRedirectURLAllowed(ctx, "http://localhost:3008", cfg)) + }) + t.Run("unauthorized url", func(t *testing.T) { + assert.False(t, GetRedirectURLAllowed(ctx, "https://flyte.com", cfg)) + }) +} diff --git a/flyteadmin/auth/handlers.go b/flyteadmin/auth/handlers.go index 26e6428df3..a6c2e3b122 100644 --- a/flyteadmin/auth/handlers.go +++ b/flyteadmin/auth/handlers.go @@ -141,6 +141,11 @@ func GetLoginHandler(ctx context.Context, authCtx interfaces.AuthenticationConte logger.Debugf(ctx, "Setting CSRF state cookie to %s and state to %s\n", csrfToken, state) url := authCtx.OAuth2ClientConfig(GetPublicURL(ctx, request, authCtx.Options())).AuthCodeURL(state) queryParams := request.URL.Query() + if !GetRedirectURLAllowed(ctx, queryParams.Get(RedirectURLParameter), authCtx.Options()) { + logger.Infof(ctx, "unauthorized redirect URI") + writer.WriteHeader(http.StatusForbidden) + return + } if flowEndRedirectURL := queryParams.Get(RedirectURLParameter); flowEndRedirectURL != "" { redirectCookie := NewRedirectCookie(ctx, flowEndRedirectURL) if redirectCookie != nil { diff --git a/flyteadmin/dataproxy/service.go b/flyteadmin/dataproxy/service.go index 07c8ae1196..3d657c6958 100644 --- a/flyteadmin/dataproxy/service.go +++ b/flyteadmin/dataproxy/service.go @@ -81,10 +81,10 @@ func (s Service) CreateUploadLocation(ctx context.Context, req *service.CreateUp base32Digest := base32.StdEncoding.EncodeToString(req.ContentMd5) base64Digest := base64.StdEncoding.EncodeToString(req.ContentMd5) if hexDigest != metadata.Etag() && base32Digest != metadata.Etag() && base64Digest != metadata.Etag() { - logger.Debug(ctx, "File already exists at location [%v] but hashes do not match", knownLocation) + logger.Debugf(ctx, "File already exists at location [%v] but hashes do not match", knownLocation) return nil, errors.NewFlyteAdminErrorf(codes.AlreadyExists, "file already exists at location [%v], specify a matching hash if you wish to rewrite", knownLocation) } - logger.Debug(ctx, "File already exists at location [%v] but allowing rewrite", knownLocation) + logger.Debugf(ctx, "File already exists at location [%v] but allowing rewrite", knownLocation) } } diff --git a/flyteadmin/flyteadmin_config.yaml b/flyteadmin/flyteadmin_config.yaml index e3d19f7326..443814572b 100644 --- a/flyteadmin/flyteadmin_config.yaml +++ b/flyteadmin/flyteadmin_config.yaml @@ -212,4 +212,4 @@ qualityOfService: staging: MEDIUM # by default production has an UNDEFINED tier when it is omitted from the configuration namespace_mapping: - template: "{{ project }}-{{ domain }}" # Default namespace mapping template. \ No newline at end of file + template: "{{ project }}-{{ domain }}" # Default namespace mapping template. diff --git a/flyteadmin/pkg/artifacts/registry.go b/flyteadmin/pkg/artifacts/registry.go deleted file mode 100644 index cdc0717ac6..0000000000 --- a/flyteadmin/pkg/artifacts/registry.go +++ /dev/null @@ -1,101 +0,0 @@ -package artifacts - -import ( - "context" - "fmt" - - "google.golang.org/grpc" - - admin2 "github.com/flyteorg/flyte/flyteidl/clients/go/admin" - "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin" - "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/artifact" - "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" - "github.com/flyteorg/flyte/flytestdlib/logger" -) - -// ArtifactRegistry contains a client to talk to an Artifact service and has helper methods -type ArtifactRegistry struct { - client artifact.ArtifactRegistryClient -} - -func (a *ArtifactRegistry) RegisterArtifactProducer(ctx context.Context, id *core.Identifier, ti core.TypedInterface) { - if a == nil || a.client == nil { - logger.Debugf(ctx, "Artifact client not configured, skipping registration for task [%+v]", id) - return - } - - ap := &artifact.ArtifactProducer{ - EntityId: id, - Outputs: ti.Outputs, - } - _, err := a.client.RegisterProducer(ctx, &artifact.RegisterProducerRequest{ - Producers: []*artifact.ArtifactProducer{ap}, - }) - if err != nil { - logger.Errorf(ctx, "Failed to register artifact producer for task [%+v] with err: %v", id, err) - } - logger.Debugf(ctx, "Registered artifact producer [%+v]", id) -} - -func (a *ArtifactRegistry) RegisterArtifactConsumer(ctx context.Context, id *core.Identifier, pm core.ParameterMap) { - if a == nil || a.client == nil { - logger.Debugf(ctx, "Artifact client not configured, skipping registration for consumer [%+v]", id) - return - } - ac := &artifact.ArtifactConsumer{ - EntityId: id, - Inputs: &pm, - } - _, err := a.client.RegisterConsumer(ctx, &artifact.RegisterConsumerRequest{ - Consumers: []*artifact.ArtifactConsumer{ac}, - }) - if err != nil { - logger.Errorf(ctx, "Failed to register artifact consumer for entity [%+v] with err: %v", id, err) - } - logger.Debugf(ctx, "Registered artifact consumer [%+v]", id) -} - -func (a *ArtifactRegistry) RegisterTrigger(ctx context.Context, plan *admin.LaunchPlan) error { - if a == nil || a.client == nil { - logger.Debugf(ctx, "Artifact client not configured, skipping trigger [%+v]", plan) - return fmt.Errorf("artifact client not configured") - } - _, err := a.client.CreateTrigger(ctx, &artifact.CreateTriggerRequest{ - TriggerLaunchPlan: plan, - }) - if err != nil { - logger.Errorf(ctx, "Failed to register trigger for [%+v] with err: %v", plan.Id, err) - return err - } - logger.Debugf(ctx, "Registered trigger for [%+v]", plan.Id) - return nil -} - -func (a *ArtifactRegistry) GetClient() artifact.ArtifactRegistryClient { - if a == nil { - return nil - } - return a.client -} - -// NewArtifactRegistry todo: update this to return error, and proper cfg handling. -// if nil, should either call the default config or return an error -func NewArtifactRegistry(ctx context.Context, connCfg *admin2.Config, _ ...grpc.DialOption) *ArtifactRegistry { - - if connCfg == nil { - return &ArtifactRegistry{ - client: nil, - } - } - var cfg = connCfg - clients, err := admin2.NewClientsetBuilder().WithConfig(cfg).Build(ctx) - if err != nil { - logger.Errorf(ctx, "Failed to create Artifact client") - // too many calls to this function to update, just panic for now. - panic(err) - } - - return &ArtifactRegistry{ - client: clients.ArtifactServiceClient(), - } -} diff --git a/flyteadmin/pkg/artifacts/registry_test.go b/flyteadmin/pkg/artifacts/registry_test.go deleted file mode 100644 index 2a0e337255..0000000000 --- a/flyteadmin/pkg/artifacts/registry_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package artifacts - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestRegistryNoClient(t *testing.T) { - r := NewArtifactRegistry(context.Background(), nil) - assert.Nil(t, r.GetClient()) -} - -type Parent struct { - R *ArtifactRegistry -} - -func TestPointerReceivers(t *testing.T) { - p := Parent{} - nilClient := p.R.GetClient() - assert.Nil(t, nilClient) -} - -func TestNilCheck(t *testing.T) { - r := NewArtifactRegistry(context.Background(), nil) - err := r.RegisterTrigger(context.Background(), nil) - assert.NotNil(t, err) -} diff --git a/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go b/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go index 20bdc0203d..46bd0f0ede 100644 --- a/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go +++ b/flyteadmin/pkg/async/cloudevent/implementations/cloudevent_publisher.go @@ -136,9 +136,7 @@ func (c *CloudEventWrappedPublisher) TransformWorkflowExecutionEvent(ctx context // For now, don't append any additional information unless succeeded if rawEvent.Phase != core.WorkflowExecution_SUCCEEDED { return &event.CloudEventWorkflowExecution{ - RawEvent: rawEvent, - OutputData: nil, - OutputInterface: nil, + RawEvent: rawEvent, }, nil } @@ -181,13 +179,6 @@ func (c *CloudEventWrappedPublisher) TransformWorkflowExecutionEvent(ctx context } } - // Get inputs to the workflow execution - var inputs *core.LiteralMap - inputs, _, err = util.GetInputs(ctx, c.urlData, &c.remoteDataConfig, - c.storageClient, executionModel.InputsURI.String()) - if err != nil { - logger.Warningf(ctx, "Error fetching input literal map %s", executionModel.InputsURI.String()) - } // The spec is used to retrieve metadata fields spec := &admin.ExecutionSpec{} err = proto.Unmarshal(executionModel.Spec, spec) @@ -195,25 +186,9 @@ func (c *CloudEventWrappedPublisher) TransformWorkflowExecutionEvent(ctx context fmt.Printf("there was an error with spec %v %v", err, executionModel.Spec) } - // Get outputs from the workflow execution - var outputs *core.LiteralMap - if rawEvent.GetOutputData() != nil { - outputs = rawEvent.GetOutputData() - } else if len(rawEvent.GetOutputUri()) > 0 { - // GetInputs actually fetches the data, even though this is an output - outputs, _, err = util.GetInputs(ctx, c.urlData, &c.remoteDataConfig, c.storageClient, rawEvent.GetOutputUri()) - if err != nil { - // gatepr: metric this - logger.Warningf(ctx, "Error fetching output literal map %v", rawEvent) - return nil, err - } - } - return &event.CloudEventWorkflowExecution{ RawEvent: rawEvent, - OutputData: outputs, OutputInterface: &workflowInterface, - InputData: inputs, ArtifactIds: spec.GetMetadata().GetArtifactIds(), ReferenceExecution: spec.GetMetadata().GetReferenceExecution(), Principal: spec.GetMetadata().Principal, @@ -303,40 +278,6 @@ func (c *CloudEventWrappedPublisher) TransformNodeExecutionEvent(ctx context.Con fmt.Printf("there was an error with spec %v %v", err, executionModel.Spec) } - // Get inputs/outputs - // This will likely need to move to the artifact service side, given message size limits. - // Replace with call to GetNodeExecutionData - var inputs *core.LiteralMap - logger.Debugf(ctx, "RawEvent id %v", rawEvent.Id) - if len(rawEvent.GetInputUri()) > 0 { - inputs, _, err = util.GetInputs(ctx, c.urlData, &c.remoteDataConfig, - c.storageClient, rawEvent.GetInputUri()) - logger.Debugf(ctx, "RawEvent input uri %v, %v", rawEvent.GetInputUri(), inputs) - - if err != nil { - fmt.Printf("Error fetching input literal map %v", rawEvent) - } - } else if rawEvent.GetInputData() != nil { - inputs = rawEvent.GetInputData() - logger.Debugf(ctx, "RawEvent input data %v, %v", rawEvent.GetInputData(), inputs) - } else { - logger.Infof(ctx, "Node execution for node exec [%+v] has no input data", rawEvent.Id) - } - - // This will likely need to move to the artifact service side, given message size limits. - var outputs *core.LiteralMap - if rawEvent.GetOutputData() != nil { - outputs = rawEvent.GetOutputData() - } else if len(rawEvent.GetOutputUri()) > 0 { - // GetInputs actually fetches the data, even though this is an output - outputs, _, err = util.GetInputs(ctx, c.urlData, &c.remoteDataConfig, - c.storageClient, rawEvent.GetOutputUri()) - if err != nil { - fmt.Printf("Error fetching output literal map %v", rawEvent) - return nil, err - } - } - // Fetch the latest task execution if any, and pull out the task interface, if applicable. // These are optional fields... if the node execution doesn't have a task execution then these will be empty. var taskExecID *core.TaskExecutionIdentifier @@ -369,13 +310,10 @@ func (c *CloudEventWrappedPublisher) TransformNodeExecutionEvent(ctx context.Con taskExecID = lte.Id } - logger.Debugf(ctx, "RawEvent inputs %v", inputs) return &event.CloudEventNodeExecution{ RawEvent: rawEvent, TaskExecId: taskExecID, - OutputData: outputs, OutputInterface: typedInterface, - InputData: inputs, ArtifactIds: spec.GetMetadata().GetArtifactIds(), Principal: spec.GetMetadata().Principal, LaunchPlanId: spec.LaunchPlan, diff --git a/flyteadmin/pkg/manager/impl/exec_manager_other_test.go b/flyteadmin/pkg/manager/impl/exec_manager_other_test.go deleted file mode 100644 index 35b8e14d5b..0000000000 --- a/flyteadmin/pkg/manager/impl/exec_manager_other_test.go +++ /dev/null @@ -1,65 +0,0 @@ -package impl - -import ( - "context" - "fmt" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" - eventWriterMocks "github.com/flyteorg/flyte/flyteadmin/pkg/async/events/mocks" - "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" - mockScope "github.com/flyteorg/flyte/flytestdlib/promutils" -) - -func TestResolveNotWorking(t *testing.T) { - mockConfig := getMockExecutionsConfigProvider() - - execManager := NewExecutionManager(nil, nil, mockConfig, nil, mockScope.NewTestScope(), mockScope.NewTestScope(), nil, nil, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)).(*ExecutionManager) - - pm, artifactIDs, err := execManager.ResolveParameterMapArtifacts(context.Background(), nil, nil) - assert.Nil(t, err) - fmt.Println(pm, artifactIDs) - -} - -func TestTrackingBitExtract(t *testing.T) { - mockConfig := getMockExecutionsConfigProvider() - - execManager := NewExecutionManager(nil, nil, mockConfig, nil, mockScope.NewTestScope(), mockScope.NewTestScope(), nil, nil, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)).(*ExecutionManager) - - lit := core.Literal{ - Value: &core.Literal_Scalar{ - Scalar: &core.Scalar{ - Value: &core.Scalar_Primitive{ - Primitive: &core.Primitive{ - Value: &core.Primitive_Integer{ - Integer: 1, - }, - }, - }, - }, - }, - Metadata: map[string]string{"_ua": "proj/domain/name@version"}, - } - inputMap := core.LiteralMap{ - Literals: map[string]*core.Literal{ - "a": &lit, - }, - } - inputColl := core.LiteralCollection{ - Literals: []*core.Literal{ - &lit, - }, - } - - trackers := execManager.ExtractArtifactKeys(&lit) - assert.Equal(t, 1, len(trackers)) - - trackers = execManager.ExtractArtifactKeys(&core.Literal{Value: &core.Literal_Map{Map: &inputMap}}) - assert.Equal(t, 1, len(trackers)) - trackers = execManager.ExtractArtifactKeys(&core.Literal{Value: &core.Literal_Collection{Collection: &inputColl}}) - assert.Equal(t, 1, len(trackers)) - assert.Equal(t, "proj/domain/name@version", trackers[0]) -} diff --git a/flyteadmin/pkg/manager/impl/execution_manager.go b/flyteadmin/pkg/manager/impl/execution_manager.go index db02fabf07..16a36f895f 100644 --- a/flyteadmin/pkg/manager/impl/execution_manager.go +++ b/flyteadmin/pkg/manager/impl/execution_manager.go @@ -14,7 +14,6 @@ import ( "google.golang.org/grpc/codes" "github.com/flyteorg/flyte/flyteadmin/auth" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" cloudeventInterfaces "github.com/flyteorg/flyte/flyteadmin/pkg/async/cloudevent/interfaces" eventWriter "github.com/flyteorg/flyte/flyteadmin/pkg/async/events/interfaces" "github.com/flyteorg/flyte/flyteadmin/pkg/async/notifications" @@ -35,9 +34,7 @@ import ( workflowengineInterfaces "github.com/flyteorg/flyte/flyteadmin/pkg/workflowengine/interfaces" "github.com/flyteorg/flyte/flyteadmin/plugins" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin" - "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/artifact" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" - "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/flytek8s" "github.com/flyteorg/flyte/flytestdlib/contextutils" "github.com/flyteorg/flyte/flytestdlib/logger" @@ -94,7 +91,6 @@ type ExecutionManager struct { cloudEventPublisher notificationInterfaces.Publisher dbEventWriter eventWriter.WorkflowExecutionEventWriter pluginRegistry *plugins.Registry - artifactRegistry *artifacts.ArtifactRegistry } func getExecutionContext(ctx context.Context, id *core.WorkflowExecutionIdentifier) context.Context { @@ -253,23 +249,6 @@ func (m *ExecutionManager) setCompiledTaskDefaults(ctx context.Context, task *co }) } - // Only assign storage when it is either requested or limited in the task definition, or a platform - // default exists. - if !taskResourceRequirements.Defaults.Storage.IsZero() || - !taskResourceRequirements.Limits.Storage.IsZero() || - !platformTaskResources.Defaults.Storage.IsZero() { - storageResource := flytek8s.AdjustOrDefaultResource(taskResourceRequirements.Defaults.Storage, taskResourceRequirements.Limits.Storage, - platformTaskResources.Defaults.Storage, platformTaskResources.Limits.Storage) - finalizedResourceRequests = append(finalizedResourceRequests, &core.Resources_ResourceEntry{ - Name: core.Resources_STORAGE, - Value: storageResource.Request.String(), - }) - finalizedResourceLimits = append(finalizedResourceLimits, &core.Resources_ResourceEntry{ - Name: core.Resources_STORAGE, - Value: storageResource.Limit.String(), - }) - } - // Only assign gpu when it is either requested or limited in the task definition, or a platform default exists. if !taskResourceRequirements.Defaults.GPU.IsZero() || !taskResourceRequirements.Limits.GPU.IsZero() || @@ -700,33 +679,6 @@ func resolveSecurityCtx(ctx context.Context, executionConfigSecurityCtx *core.Se } } -// ExtractArtifactKeys pulls out artifact keys from Literals for lineage -// todo: rename this function to be less confusing -func (m *ExecutionManager) ExtractArtifactKeys(input *core.Literal) []string { - var artifactKeys []string - - if input == nil { - return artifactKeys - } - if input.GetMetadata() != nil { - if artifactKey, ok := input.GetMetadata()["_ua"]; ok { - artifactKeys = append(artifactKeys, artifactKey) - } - } - if input.GetCollection() != nil { - for _, v := range input.GetCollection().Literals { - mapKeys := m.ExtractArtifactKeys(v) - artifactKeys = append(artifactKeys, mapKeys...) - } - } else if input.GetMap() != nil { - for _, v := range input.GetMap().Literals { - mapKeys := m.ExtractArtifactKeys(v) - artifactKeys = append(artifactKeys, mapKeys...) - } - } - return artifactKeys -} - // getStringFromInput should be called when a tag or partition value is a binding to an input. the input is looked up // from the input map and the binding, and an error is returned if the input key is not in the map. func (m *ExecutionManager) getStringFromInput(ctx context.Context, inputBinding core.InputBindingData, inputs map[string]*core.Literal) (string, error) { @@ -746,6 +698,7 @@ func (m *ExecutionManager) getStringFromInput(ctx context.Context, inputBinding strVal = p.GetStringValue() case *core.Primitive_Datetime: t := time.Unix(p.GetDatetime().Seconds, int64(p.GetDatetime().Nanos)) + t = t.In(time.UTC) strVal = t.Format("2006-01-02") case *core.Primitive_StringValue: strVal = p.GetStringValue() @@ -780,46 +733,6 @@ func (m *ExecutionManager) fillInTemplateArgs(ctx context.Context, query core.Ar if query.GetUri() != "" { // If a query string, then just pass it through, nothing to fill in. return query, nil - } else if query.GetArtifactTag() != nil { - t := query.GetArtifactTag() - ak := t.GetArtifactKey() - if ak == nil { - return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "tag doesn't have key") - } - var project, domain string - if ak.GetProject() == "" { - project = contextutils.Value(ctx, contextutils.ProjectKey) - } else { - project = ak.GetProject() - } - if ak.GetDomain() == "" { - domain = contextutils.Value(ctx, contextutils.DomainKey) - } else { - domain = ak.GetDomain() - } - strValue, err := m.getLabelValue(ctx, t.GetValue(), inputs) - if err != nil { - logger.Errorf(ctx, "Failed to template input string [%s] [%v]", t.GetValue(), err) - return query, err - } - - return core.ArtifactQuery{ - Identifier: &core.ArtifactQuery_ArtifactTag{ - ArtifactTag: &core.ArtifactTag{ - ArtifactKey: &core.ArtifactKey{ - Project: project, - Domain: domain, - Name: ak.GetName(), - }, - Value: &core.LabelValue{ - Value: &core.LabelValue_StaticValue{ - StaticValue: strValue, - }, - }, - }, - }, - }, nil - } else if query.GetArtifactId() != nil { artifactID := query.GetArtifactId() ak := artifactID.GetArtifactKey() @@ -840,7 +753,7 @@ func (m *ExecutionManager) fillInTemplateArgs(ctx context.Context, query core.Ar var partitions map[string]*core.LabelValue - if artifactID.GetPartitions() != nil && artifactID.GetPartitions().GetValue() != nil { + if artifactID.GetPartitions().GetValue() != nil { partitions = make(map[string]*core.LabelValue, len(artifactID.GetPartitions().Value)) for k, v := range artifactID.GetPartitions().GetValue() { newValue, err := m.getLabelValue(ctx, v, inputs) @@ -851,6 +764,36 @@ func (m *ExecutionManager) fillInTemplateArgs(ctx context.Context, query core.Ar partitions[k] = &core.LabelValue{Value: &core.LabelValue_StaticValue{StaticValue: newValue}} } } + + var timePartition *core.TimePartition + if artifactID.GetTimePartition().GetValue() != nil { + if artifactID.GetTimePartition().Value.GetTimeValue() != nil { + // If the time value is set, then just pass it through, nothing to fill in. + timePartition = artifactID.GetTimePartition() + } else if artifactID.GetTimePartition().Value.GetInputBinding() != nil { + // Evaluate the time partition input binding + lit, ok := inputs[artifactID.GetTimePartition().Value.GetInputBinding().GetVar()] + if !ok { + return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "time partition input binding var [%s] not found in inputs %v", artifactID.GetTimePartition().Value.GetInputBinding().GetVar(), inputs) + } + + if lit.GetScalar().GetPrimitive().GetDatetime() == nil { + return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, + "time partition binding to input var [%s] failing because %v is not a datetime", + artifactID.GetTimePartition().Value.GetInputBinding().GetVar(), lit) + } + timePartition = &core.TimePartition{ + Value: &core.LabelValue{ + Value: &core.LabelValue_TimeValue{ + TimeValue: lit.GetScalar().GetPrimitive().GetDatetime(), + }, + }, + } + } else { + return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "time partition value cannot be empty when evaluating query: %v", query) + } + } + return core.ArtifactQuery{ Identifier: &core.ArtifactQuery_ArtifactId{ ArtifactId: &core.ArtifactID{ @@ -859,11 +802,10 @@ func (m *ExecutionManager) fillInTemplateArgs(ctx context.Context, query core.Ar Domain: domain, Name: ak.GetName(), }, - Dimensions: &core.ArtifactID_Partitions{ - Partitions: &core.Partitions{ - Value: partitions, - }, + Partitions: &core.Partitions{ + Value: partitions, }, + TimePartition: timePartition, }, }, }, nil @@ -871,86 +813,10 @@ func (m *ExecutionManager) fillInTemplateArgs(ctx context.Context, query core.Ar return query, errors.NewFlyteAdminErrorf(codes.InvalidArgument, "query doesn't have uri, tag, or id") } -// ResolveParameterMapArtifacts will go through the parameter map, and resolve any artifact queries. -func (m *ExecutionManager) ResolveParameterMapArtifacts(ctx context.Context, inputs *core.ParameterMap, inputsForQueryTemplating map[string]*core.Literal) (*core.ParameterMap, []*core.ArtifactID, error) { - - // only top level replace for now. Need to make this recursive - var artifactIDs []*core.ArtifactID - if inputs == nil { - return nil, artifactIDs, nil - } - outputs := map[string]*core.Parameter{} - - for k, v := range inputs.Parameters { - if inputsForQueryTemplating != nil { - if _, ok := inputsForQueryTemplating[k]; ok { - // Mark these as required as they're already provided by the other two LiteralMaps - outputs[k] = &core.Parameter{ - Var: v.Var, - Behavior: &core.Parameter_Required{Required: true}, - } - continue - } - } - if v.GetArtifactQuery() != nil { - // This case handles when an Artifact query is specified as a default value. - if m.artifactRegistry.GetClient() == nil { - return nil, nil, errors.NewFlyteAdminErrorf(codes.Internal, "artifact client is not initialized, can't resolve queries") - } - filledInQuery, err := m.fillInTemplateArgs(ctx, *v.GetArtifactQuery(), inputsForQueryTemplating) - if err != nil { - logger.Errorf(ctx, "Failed to fill in template args for [%s] [%v]", k, err) - return nil, nil, err - } - req := &artifact.GetArtifactRequest{ - Query: &filledInQuery, - Details: false, - } - - resp, err := m.artifactRegistry.GetClient().GetArtifact(ctx, req) - if err != nil { - return nil, nil, err - } - artifactIDs = append(artifactIDs, resp.Artifact.GetArtifactId()) - logger.Debugf(ctx, "Resolved query for [%s] to [%+v]", k, resp.Artifact.ArtifactId) - outputs[k] = &core.Parameter{ - Var: v.Var, - Behavior: &core.Parameter_Default{Default: resp.Artifact.Spec.Value}, - } - } else if v.GetArtifactId() != nil { - // This case is for when someone hard-codes a known ArtifactID as a default value. - req := &artifact.GetArtifactRequest{ - Query: &core.ArtifactQuery{ - Identifier: &core.ArtifactQuery_ArtifactId{ - ArtifactId: v.GetArtifactId(), - }, - }, - Details: false, - } - resp, err := m.artifactRegistry.GetClient().GetArtifact(ctx, req) - if err != nil { - return nil, nil, err - } - artifactIDs = append(artifactIDs, v.GetArtifactId()) - logger.Debugf(ctx, "Using specified artifactID for [%+v] for [%s]", v.GetArtifactId(), k) - outputs[k] = &core.Parameter{ - Var: v.Var, - Behavior: &core.Parameter_Default{Default: resp.Artifact.Spec.Value}, - } - } else { - outputs[k] = v - } - } - pm := &core.ParameterMap{Parameters: outputs} - return pm, artifactIDs, nil -} - func (m *ExecutionManager) launchExecutionAndPrepareModel( ctx context.Context, request admin.ExecutionCreateRequest, requestedAt time.Time) ( context.Context, *models.Execution, error) { - ctxPD := contextutils.WithProjectDomain(ctx, request.Project, request.Domain) - err := validation.ValidateExecutionRequest(ctx, request, m.db, m.config.ApplicationConfiguration()) if err != nil { logger.Debugf(ctx, "Failed to validate ExecutionCreateRequest %+v with err %v", request, err) @@ -974,56 +840,9 @@ func (m *ExecutionManager) launchExecutionAndPrepareModel( return nil, nil, err } - // TODO: Artifact feature gate, remove when ready var lpExpectedInputs *core.ParameterMap - var artifactTrackers []string var usedArtifactIDs []*core.ArtifactID - if m.artifactRegistry.GetClient() != nil { - // Literals may have an artifact key in the metadata field. This is something the artifact service should have - // added. Pull these back out so we can keep track of them for lineage purposes. Use a dummy wrapper object for - // easier recursion. - requestInputMap := &core.Literal{ - Value: &core.Literal_Map{Map: request.Inputs}, - } - fixedInputMap := &core.Literal{ - Value: &core.Literal_Map{Map: launchPlan.Spec.FixedInputs}, - } - artifactTrackers = m.ExtractArtifactKeys(requestInputMap) - fixedInputArtifactKeys := m.ExtractArtifactKeys(fixedInputMap) - artifactTrackers = append(artifactTrackers, fixedInputArtifactKeys...) - - // Put together the inputs that we've already resolved so that the artifact querying bit can fill them in. - // This is to support artifact queries that depend on other inputs using the {{ .inputs.var }} construct. - var inputsForQueryTemplating = make(map[string]*core.Literal) - if request.Inputs != nil { - for k, v := range request.Inputs.Literals { - inputsForQueryTemplating[k] = v - } - } - for k, v := range launchPlan.Spec.FixedInputs.Literals { - inputsForQueryTemplating[k] = v - } - logger.Debugf(ctx, "Inputs for query templating: [%+v]", inputsForQueryTemplating) - - // Resolve artifact queries - // Within the launch plan, the artifact will be in the Parameter map, and can come in form of an ArtifactID, - // or as an ArtifactQuery. - // Also send in the inputsForQueryTemplating for two reasons, so we don't run queries for things we don't need to - // and so we can fill in template args. - // ArtifactIDs are also returned for lineage purposes. - lpExpectedInputs, usedArtifactIDs, err = m.ResolveParameterMapArtifacts(ctxPD, launchPlan.Closure.ExpectedInputs, inputsForQueryTemplating) - if err != nil { - logger.Errorf(ctx, "Error looking up launch plan closure parameter map: %v", err) - return nil, nil, err - } - - logger.Debugf(ctx, "Resolved launch plan closure expected inputs from [%+v] to [%+v]", launchPlan.Closure.ExpectedInputs, lpExpectedInputs) - logger.Debugf(ctx, "Found artifact keys: %v", artifactTrackers) - logger.Debugf(ctx, "Found artifact IDs: %v", usedArtifactIDs) - - } else { - lpExpectedInputs = launchPlan.Closure.ExpectedInputs - } + lpExpectedInputs = launchPlan.Closure.ExpectedInputs // Artifacts retrieved will need to be stored somewhere to ensure that we can re-emit events if necessary // in the future, and also to make sure that relaunch and recover can use it if necessary. @@ -1170,12 +989,6 @@ func (m *ExecutionManager) launchExecutionAndPrepareModel( notificationsSettings = make([]*admin.Notification, 0) } - // Publish of event is also gated on the artifact client being available, even though it's not directly required. - // TODO: Artifact feature gate, remove when ready - if m.artifactRegistry.GetClient() != nil { - m.publishExecutionStart(ctx, workflowExecutionID, request.Spec.LaunchPlan, workflow.Id, artifactTrackers, usedArtifactIDs) - } - createExecModelInput := transformers.CreateExecutionModelInput{ WorkflowExecutionID: workflowExecutionID, RequestSpec: requestSpec, @@ -1225,30 +1038,6 @@ func (m *ExecutionManager) launchExecutionAndPrepareModel( return ctx, executionModel, nil } -// publishExecutionStart is an event that Admin publishes for artifact lineage. -func (m *ExecutionManager) publishExecutionStart(ctx context.Context, executionID core.WorkflowExecutionIdentifier, - launchPlanID *core.Identifier, workflowID *core.Identifier, inputArtifactKeys []string, usedArtifactIDs []*core.ArtifactID) { - - if len(inputArtifactKeys) > 0 || len(usedArtifactIDs) > 0 { - logger.Debugf(ctx, "Sending execution start event for execution [%+v] with input artifact keys [%+v] and used artifact ids [%+v]", executionID, inputArtifactKeys, usedArtifactIDs) - - request := event.CloudEventExecutionStart{ - ExecutionId: &executionID, - LaunchPlanId: launchPlanID, - WorkflowId: workflowID, - ArtifactIds: usedArtifactIDs, - ArtifactKeys: inputArtifactKeys, - } - go func() { - ceCtx := context.TODO() - if err := m.cloudEventPublisher.Publish(ceCtx, proto.MessageName(&request), &request); err != nil { - m.systemMetrics.PublishEventError.Inc() - logger.Infof(ctx, "error publishing cloud event [%+v] with err: [%v]", request, err) - } - }() - } -} - // Inserts an execution model into the database store and emits platform metrics. func (m *ExecutionManager) createExecutionModel( ctx context.Context, executionModel *models.Execution) (*core.WorkflowExecutionIdentifier, error) { @@ -1968,7 +1757,7 @@ func NewExecutionManager(db repositoryInterfaces.Repository, pluginRegistry *plu publisher notificationInterfaces.Publisher, urlData dataInterfaces.RemoteURLInterface, workflowManager interfaces.WorkflowInterface, namedEntityManager interfaces.NamedEntityInterface, eventPublisher notificationInterfaces.Publisher, cloudEventPublisher cloudeventInterfaces.Publisher, - eventWriter eventWriter.WorkflowExecutionEventWriter, artifactRegistry *artifacts.ArtifactRegistry) interfaces.ExecutionInterface { + eventWriter eventWriter.WorkflowExecutionEventWriter) interfaces.ExecutionInterface { queueAllocator := executions.NewQueueAllocator(config, db) systemMetrics := newExecutionSystemMetrics(systemScope) @@ -2002,7 +1791,6 @@ func NewExecutionManager(db repositoryInterfaces.Repository, pluginRegistry *plu cloudEventPublisher: cloudEventPublisher, dbEventWriter: eventWriter, pluginRegistry: pluginRegistry, - artifactRegistry: artifactRegistry, } } diff --git a/flyteadmin/pkg/manager/impl/execution_manager_test.go b/flyteadmin/pkg/manager/impl/execution_manager_test.go index e1f59cdf43..f1c5bb9eb5 100644 --- a/flyteadmin/pkg/manager/impl/execution_manager_test.go +++ b/flyteadmin/pkg/manager/impl/execution_manager_test.go @@ -22,7 +22,6 @@ import ( "k8s.io/apimachinery/pkg/util/sets" "github.com/flyteorg/flyte/flyteadmin/auth" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" eventWriterMocks "github.com/flyteorg/flyte/flyteadmin/pkg/async/events/mocks" notificationMocks "github.com/flyteorg/flyte/flyteadmin/pkg/async/notifications/mocks" "github.com/flyteorg/flyte/flyteadmin/pkg/common" @@ -376,7 +375,7 @@ func TestCreateExecution(t *testing.T) { mockConfig := getMockExecutionsConfigProvider() mockConfig.(*runtimeMocks.MockConfigurationProvider).AddQualityOfServiceConfiguration(qosProvider) - execManager := NewExecutionManager(repository, r, mockConfig, getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, mockConfig, getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() request.Spec.Metadata = &admin.ExecutionMetadata{ Principal: "unused - populated from authenticated context", @@ -473,7 +472,7 @@ func TestCreateExecutionFromWorkflowNode(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() request.Spec.Metadata = &admin.ExecutionMetadata{ Mode: admin.ExecutionMetadata_CHILD_WORKFLOW, @@ -510,7 +509,7 @@ func TestCreateExecution_NoAssignedName(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() request.Name = "" response, err := execManager.CreateExecution(context.Background(), request, requestedAt) @@ -561,7 +560,7 @@ func TestCreateExecution_TaggedQueue(t *testing.T) { mockExecutor.OnID().Return("customMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, configProvider, getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, configProvider, getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() response, err := execManager.CreateExecution(context.Background(), request, requestedAt) @@ -579,7 +578,7 @@ func TestCreateExecutionValidationError(t *testing.T) { setDefaultLpCallbackForExecTest(repository) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() request.Domain = "" @@ -593,7 +592,7 @@ func TestCreateExecution_InvalidLpIdentifier(t *testing.T) { setDefaultLpCallbackForExecTest(repository) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() request.Spec.LaunchPlan = nil @@ -607,7 +606,7 @@ func TestCreateExecutionInCompatibleInputs(t *testing.T) { setDefaultLpCallbackForExecTest(repository) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() request.Inputs = &core.LiteralMap{ @@ -660,7 +659,7 @@ func TestCreateExecutionPropellerFailure(t *testing.T) { identity, err := auth.NewIdentityContext("", principal, "", time.Now(), sets.NewString(), nil, nil) assert.NoError(t, err) ctx := identity.WithContext(context.Background()) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) expectedResponse := &admin.ExecutionCreateResponse{Id: &executionIdentifier} @@ -684,7 +683,7 @@ func TestCreateExecutionDatabaseFailure(t *testing.T) { } repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetCreateCallback(exCreateFunc) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() response, err := execManager.CreateExecution(context.Background(), request, requestedAt) @@ -752,7 +751,7 @@ func TestCreateExecutionVerifyDbModel(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execManager.(*ExecutionManager)._clock = mockClock @@ -795,7 +794,7 @@ func TestCreateExecutionDefaultNotifications(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) response, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) @@ -834,7 +833,7 @@ func TestCreateExecutionDisableNotifications(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) response, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) @@ -904,7 +903,7 @@ func TestCreateExecutionNoNotifications(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) response, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) @@ -933,7 +932,7 @@ func TestCreateExecutionDynamicLabelsAndAnnotations(t *testing.T) { mockExecutor.OnID().Return("customMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := testutils.GetExecutionRequest() request.Spec.Labels = &admin.Labels{ Values: map[string]string{ @@ -1052,7 +1051,7 @@ func TestCreateExecutionInterruptible(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) @@ -1132,7 +1131,7 @@ func TestCreateExecutionOverwriteCache(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) @@ -1216,7 +1215,7 @@ func TestCreateExecutionWithEnvs(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.CreateExecution(context.Background(), request, requestedAt) assert.Nil(t, err) @@ -1258,7 +1257,7 @@ func TestCreateExecution_CustomNamespaceMappingConfig(t *testing.T) { mockExecutionsConfigProvider.(*runtimeMocks.MockConfigurationProvider).AddRegistrationValidationConfiguration( runtimeMocks.NewMockRegistrationValidationProvider()) - execManager := NewExecutionManager(repository, r, mockExecutionsConfigProvider, storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, mockExecutionsConfigProvider, storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execManager.(*ExecutionManager)._clock = mockClock @@ -1431,7 +1430,7 @@ func TestRelaunchExecution(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -1491,7 +1490,7 @@ func TestRelaunchExecution_GetExistingFailure(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) expectedErr := errors.New("expected error") repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback( @@ -1530,7 +1529,7 @@ func TestRelaunchExecution_CreateFailure(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -1570,7 +1569,7 @@ func TestRelaunchExecutionInterruptibleOverride(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -1621,7 +1620,7 @@ func TestRelaunchExecutionOverwriteCacheOverride(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -1744,7 +1743,7 @@ func TestRelaunchExecutionEnvsOverride(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -1796,7 +1795,7 @@ func TestRecoverExecution(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -1855,7 +1854,7 @@ func TestRecoverExecution_RecoveredChildNode(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -1956,7 +1955,7 @@ func TestRecoverExecution_GetExistingFailure(t *testing.T) { setDefaultLpCallbackForExecTest(repository) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) expectedErr := errors.New("expected error") repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback( @@ -1999,7 +1998,7 @@ func TestRecoverExecution_GetExistingInputsFailure(t *testing.T) { } r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), mockStorage, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), mockStorage, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -2033,7 +2032,7 @@ func TestRecoverExecutionInterruptibleOverride(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -2096,7 +2095,7 @@ func TestRecoverExecutionOverwriteCacheOverride(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -2157,7 +2156,7 @@ func TestRecoverExecutionEnvsOverride(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := admin.ExecutionClosure{ @@ -2270,7 +2269,7 @@ func TestCreateWorkflowEvent(t *testing.T) { mockDbEventWriter.On("Write", request) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter) resp, err := execManager.CreateWorkflowEvent(context.Background(), request) assert.Nil(t, err) assert.NotNil(t, resp) @@ -2300,7 +2299,7 @@ func TestCreateWorkflowEvent_TerminalState(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetUpdateCallback(updateExecutionFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", @@ -2340,7 +2339,7 @@ func TestCreateWorkflowEvent_NoRunningToQueued(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetUpdateCallback(updateExecutionFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", @@ -2388,7 +2387,7 @@ func TestCreateWorkflowEvent_CurrentlyAborting(t *testing.T) { mockDbEventWriter.On("Write", req) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter) resp, err := execManager.CreateWorkflowEvent(context.Background(), req) assert.NotNil(t, resp) @@ -2456,7 +2455,7 @@ func TestCreateWorkflowEvent_StartedRunning(t *testing.T) { mockDbEventWriter.On("Write", request) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter) resp, err := execManager.CreateWorkflowEvent(context.Background(), request) assert.Nil(t, err) assert.NotNil(t, resp) @@ -2489,7 +2488,7 @@ func TestCreateWorkflowEvent_DuplicateRunning(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) occurredAtTimestamp, _ := ptypes.TimestampProto(occurredAt) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", @@ -2532,7 +2531,7 @@ func TestCreateWorkflowEvent_InvalidPhaseChange(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) occurredAtTimestamp, _ := ptypes.TimestampProto(occurredAt) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", @@ -2599,7 +2598,7 @@ func TestCreateWorkflowEvent_ClusterReassignmentOnQueued(t *testing.T) { mockDbEventWriter.On("Write", request) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, &mockPublisher, &mockPublisher, mockDbEventWriter) resp, err := execManager.CreateWorkflowEvent(context.Background(), request) assert.Nil(t, err) @@ -2622,7 +2621,7 @@ func TestCreateWorkflowEvent_InvalidEvent(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetUpdateCallback(updateExecutionFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", Event: &event.WorkflowExecutionEvent{ @@ -2652,7 +2651,7 @@ func TestCreateWorkflowEvent_UpdateModelError(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", Event: &event.WorkflowExecutionEvent{ @@ -2687,7 +2686,7 @@ func TestCreateWorkflowEvent_DatabaseGetError(t *testing.T) { } r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", Event: &event.WorkflowExecutionEvent{ @@ -2723,7 +2722,7 @@ func TestCreateWorkflowEvent_DatabaseUpdateError(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetUpdateCallback(updateExecutionFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", Event: &event.WorkflowExecutionEvent{ @@ -2768,7 +2767,7 @@ func TestCreateWorkflowEvent_IncompatibleCluster(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) occurredAtTimestamp, _ := ptypes.TimestampProto(occurredAt) resp, err := execManager.CreateWorkflowEvent(context.Background(), admin.WorkflowExecutionEventRequest{ RequestId: "1", @@ -2826,7 +2825,7 @@ func TestGetExecution(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback(executionGetFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execution, err := execManager.GetExecution(context.Background(), admin.WorkflowExecutionGetRequest{ Id: &executionIdentifier, }) @@ -2849,7 +2848,7 @@ func TestGetExecution_DatabaseError(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback(executionGetFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execution, err := execManager.GetExecution(context.Background(), admin.WorkflowExecutionGetRequest{ Id: &executionIdentifier, }) @@ -2881,7 +2880,7 @@ func TestGetExecution_TransformerError(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback(executionGetFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execution, err := execManager.GetExecution(context.Background(), admin.WorkflowExecutionGetRequest{ Id: &executionIdentifier, }) @@ -2894,7 +2893,7 @@ func TestUpdateExecution(t *testing.T) { repository := repositoryMocks.NewMockRepository() r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.UpdateExecution(context.Background(), admin.ExecutionUpdateRequest{ Id: &core.WorkflowExecutionIdentifier{ Project: "project", @@ -2916,7 +2915,7 @@ func TestUpdateExecution(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetUpdateCallback(updateExecFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) updateResponse, err := execManager.UpdateExecution(context.Background(), admin.ExecutionUpdateRequest{ Id: &executionIdentifier, }, time.Now()) @@ -2937,7 +2936,7 @@ func TestUpdateExecution(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetUpdateCallback(updateExecFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) updateResponse, err := execManager.UpdateExecution(context.Background(), admin.ExecutionUpdateRequest{ Id: &executionIdentifier, State: admin.ExecutionState_EXECUTION_ARCHIVED, @@ -2955,7 +2954,7 @@ func TestUpdateExecution(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetUpdateCallback(updateExecFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.UpdateExecution(context.Background(), admin.ExecutionUpdateRequest{ Id: &executionIdentifier, State: admin.ExecutionState_EXECUTION_ARCHIVED, @@ -2972,7 +2971,7 @@ func TestUpdateExecution(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback(getExecFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.UpdateExecution(context.Background(), admin.ExecutionUpdateRequest{ Id: &executionIdentifier, State: admin.ExecutionState_EXECUTION_ARCHIVED, @@ -3042,7 +3041,7 @@ func TestListExecutions(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetListCallback(executionListFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) executionList, err := execManager.ListExecutions(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ @@ -3075,7 +3074,7 @@ func TestListExecutions(t *testing.T) { func TestListExecutions_MissingParameters(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.ListExecutions(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ Domain: domainValue, @@ -3114,7 +3113,7 @@ func TestListExecutions_DatabaseError(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetListCallback(executionListFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.ListExecutions(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ Project: projectValue, @@ -3147,7 +3146,7 @@ func TestListExecutions_TransformerError(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetListCallback(executionListFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) executionList, err := execManager.ListExecutions(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ @@ -3450,7 +3449,7 @@ func TestTerminateExecution(t *testing.T) { mockExecutor.OnID().Return("customMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) identity, err := auth.NewIdentityContext("", principal, "", time.Now(), sets.NewString(), nil, nil) assert.NoError(t, err) @@ -3485,7 +3484,7 @@ func TestTerminateExecution_PropellerError(t *testing.T) { assert.Equal(t, core.WorkflowExecution_ABORTING.String(), execution.Phase) return nil }) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.TerminateExecution(context.Background(), admin.ExecutionTerminateRequest{ Id: &core.WorkflowExecutionIdentifier{ @@ -3517,7 +3516,7 @@ func TestTerminateExecution_DatabaseError(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.TerminateExecution(context.Background(), admin.ExecutionTerminateRequest{ Id: &core.WorkflowExecutionIdentifier{ Project: "project", @@ -3547,7 +3546,7 @@ func TestTerminateExecution_AlreadyTerminated(t *testing.T) { Phase: core.WorkflowExecution_SUCCEEDED.String(), }, nil }) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) resp, err := execManager.TerminateExecution(context.Background(), admin.ExecutionTerminateRequest{ Id: &core.WorkflowExecutionIdentifier{ Project: "project", @@ -3639,7 +3638,7 @@ func TestGetExecutionData(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback(executionGetFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), mockStorage, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), mockStorage, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) dataResponse, err := execManager.GetExecutionData(context.Background(), admin.WorkflowExecutionGetDataRequest{ Id: &executionIdentifier, }) @@ -3704,7 +3703,7 @@ func TestAddPluginOverrides(t *testing.T) { } r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(db, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(db, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) taskPluginOverrides, err := execManager.(*ExecutionManager).addPluginOverrides( context.Background(), executionID, workflowName, launchPlanName) @@ -3737,7 +3736,7 @@ func TestPluginOverrides_ResourceGetFailure(t *testing.T) { } r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(db, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(db, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) _, err := execManager.(*ExecutionManager).addPluginOverrides( context.Background(), executionID, workflowName, launchPlanName) @@ -3771,7 +3770,7 @@ func TestGetExecution_Legacy(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetGetCallback(executionGetFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execution, err := execManager.GetExecution(context.Background(), admin.WorkflowExecutionGetRequest{ Id: &executionIdentifier, }) @@ -3834,7 +3833,7 @@ func TestGetExecutionData_LegacyModel(t *testing.T) { storageClient := getMockStorageForExecTest(context.Background()) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) dataResponse, err := execManager.GetExecutionData(context.Background(), admin.WorkflowExecutionGetDataRequest{ Id: &executionIdentifier, }) @@ -3882,7 +3881,7 @@ func TestCreateExecution_LegacyClient(t *testing.T) { mockExecutor.OnID().Return("customMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) response, err := execManager.CreateExecution(context.Background(), *getLegacyExecutionRequest(), requestedAt) assert.Nil(t, err) @@ -3904,7 +3903,7 @@ func TestRelaunchExecution_LegacyModel(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), storageClient, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) startTime := time.Now() startTimeProto, _ := ptypes.TimestampProto(startTime) existingClosure := getLegacyClosure() @@ -4024,7 +4023,7 @@ func TestListExecutions_LegacyModel(t *testing.T) { repository.ExecutionRepo().(*repositoryMocks.MockExecutionRepo).SetListCallback(executionListFunc) r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) executionList, err := execManager.ListExecutions(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ @@ -4080,7 +4079,7 @@ func TestSetDefaults(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execManager.(*ExecutionManager).setCompiledTaskDefaults(context.Background(), task, workflowengineInterfaces.TaskResources{ Defaults: runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("200m"), @@ -4165,7 +4164,7 @@ func TestSetDefaults_MissingRequests_ExistingRequestsPreserved(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execManager.(*ExecutionManager).setCompiledTaskDefaults(context.Background(), task, workflowengineInterfaces.TaskResources{ Defaults: runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("200m"), @@ -4243,7 +4242,7 @@ func TestSetDefaults_OptionalRequiredResources(t *testing.T) { t.Run("don't inject ephemeral storage or gpu when only the limit is set in config", func(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execManager.(*ExecutionManager).setCompiledTaskDefaults(context.Background(), task, workflowengineInterfaces.TaskResources{ Defaults: runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("200m"), @@ -4282,7 +4281,7 @@ func TestSetDefaults_OptionalRequiredResources(t *testing.T) { t.Run("respect non-required resources when defaults exist in config", func(t *testing.T) { r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &defaultTestExecutor) - execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repositoryMocks.NewMockRepository(), r, getMockExecutionsConfigProvider(), getMockStorageForExecTest(context.Background()), mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, nil, nil, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) execManager.(*ExecutionManager).setCompiledTaskDefaults(context.Background(), task, workflowengineInterfaces.TaskResources{ Limits: taskConfigLimits, Defaults: runtimeInterfaces.TaskResourceSet{ @@ -4480,7 +4479,7 @@ func TestCreateSingleTaskExecution(t *testing.T) { workflowManager := NewWorkflowManager( repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorage, - storagePrefix, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + storagePrefix, mockScope.NewTestScope()) namedEntityManager := NewNamedEntityManager(repository, getMockConfigForNETest(), mockScope.NewTestScope()) mockExecutor := workflowengineMocks.WorkflowExecutor{} @@ -4488,7 +4487,7 @@ func TestCreateSingleTaskExecution(t *testing.T) { mockExecutor.OnID().Return("testMockExecutor") r := plugins.NewRegistry() r.RegisterDefault(plugins.PluginIDWorkflowExecutor, &mockExecutor) - execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), mockStorage, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, workflowManager, namedEntityManager, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}, artifacts.NewArtifactRegistry(context.Background(), nil)) + execManager := NewExecutionManager(repository, r, getMockExecutionsConfigProvider(), mockStorage, mockScope.NewTestScope(), mockScope.NewTestScope(), &mockPublisher, mockExecutionRemoteURL, workflowManager, namedEntityManager, nil, nil, &eventWriterMocks.WorkflowExecutionEventWriter{}) request := admin.ExecutionCreateRequest{ Project: "flytekit", Domain: "production", @@ -5719,5 +5718,96 @@ func TestAddStateFilter(t *testing.T) { assert.NoError(t, err) assert.Equal(t, "state <> ?", expression.Query) }) +} + +func TestQueryTemplate(t *testing.T) { + ctx := context.Background() + + aTime := time.Date( + 2063, 4, 5, 00, 00, 00, 0, time.UTC) + + rawInputs := map[string]interface{}{ + "aStr": "hello world", + "anInt": 1, + "aFloat": 1.3, + "aTime": aTime, + } + + otherInputs, err := coreutils.MakeLiteralMap(rawInputs) + assert.NoError(t, err) + + m := ExecutionManager{} + + ak := &core.ArtifactKey{ + Project: "project", + Domain: "domain", + Name: "testname", + } + t.Run("test all present, nothing to fill in", func(t *testing.T) { + pMap := map[string]*core.LabelValue{ + "partition1": {Value: &core.LabelValue_StaticValue{StaticValue: "my value"}}, + "partition2": {Value: &core.LabelValue_StaticValue{StaticValue: "my value 2"}}, + } + p := &core.Partitions{Value: pMap} + + q := core.ArtifactQuery{ + Identifier: &core.ArtifactQuery_ArtifactId{ + ArtifactId: &core.ArtifactID{ + ArtifactKey: ak, + Partitions: p, + TimePartition: nil, + }, + }, + } + + filledQuery, err := m.fillInTemplateArgs(ctx, q, otherInputs.Literals) + assert.NoError(t, err) + assert.True(t, proto.Equal(&q, &filledQuery)) + }) + + t.Run("template date-times, both in explicit tp and not", func(t *testing.T) { + pMap := map[string]*core.LabelValue{ + "partition1": {Value: &core.LabelValue_InputBinding{InputBinding: &core.InputBindingData{Var: "aTime"}}}, + "partition2": {Value: &core.LabelValue_StaticValue{StaticValue: "my value 2"}}, + } + p := &core.Partitions{Value: pMap} + + q := core.ArtifactQuery{ + Identifier: &core.ArtifactQuery_ArtifactId{ + ArtifactId: &core.ArtifactID{ + ArtifactKey: ak, + Partitions: p, + TimePartition: &core.TimePartition{Value: &core.LabelValue{Value: &core.LabelValue_InputBinding{InputBinding: &core.InputBindingData{Var: "aTime"}}}}, + }, + }, + } + + filledQuery, err := m.fillInTemplateArgs(ctx, q, otherInputs.Literals) + assert.NoError(t, err) + staticTime := filledQuery.GetArtifactId().Partitions.Value["partition1"].GetStaticValue() + assert.Equal(t, "2063-04-05", staticTime) + assert.Equal(t, int64(2942956800), filledQuery.GetArtifactId().TimePartition.Value.GetTimeValue().Seconds) + }) + + t.Run("something missing", func(t *testing.T) { + pMap := map[string]*core.LabelValue{ + "partition1": {Value: &core.LabelValue_StaticValue{StaticValue: "my value"}}, + "partition2": {Value: &core.LabelValue_StaticValue{StaticValue: "my value 2"}}, + } + p := &core.Partitions{Value: pMap} + + q := core.ArtifactQuery{ + Identifier: &core.ArtifactQuery_ArtifactId{ + ArtifactId: &core.ArtifactID{ + ArtifactKey: ak, + Partitions: p, + TimePartition: &core.TimePartition{Value: &core.LabelValue{Value: &core.LabelValue_InputBinding{InputBinding: &core.InputBindingData{Var: "wrong var"}}}}, + }, + }, + } + + _, err := m.fillInTemplateArgs(ctx, q, otherInputs.Literals) + assert.Error(t, err) + }) } diff --git a/flyteadmin/pkg/manager/impl/launch_plan_manager.go b/flyteadmin/pkg/manager/impl/launch_plan_manager.go index 48441905a1..093b4d7cce 100644 --- a/flyteadmin/pkg/manager/impl/launch_plan_manager.go +++ b/flyteadmin/pkg/manager/impl/launch_plan_manager.go @@ -9,7 +9,6 @@ import ( "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc/codes" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" scheduleInterfaces "github.com/flyteorg/flyte/flyteadmin/pkg/async/schedule/interfaces" "github.com/flyteorg/flyte/flyteadmin/pkg/common" "github.com/flyteorg/flyte/flyteadmin/pkg/errors" @@ -35,11 +34,10 @@ type launchPlanMetrics struct { } type LaunchPlanManager struct { - db repoInterfaces.Repository - config runtimeInterfaces.Configuration - scheduler scheduleInterfaces.EventScheduler - metrics launchPlanMetrics - artifactRegistry *artifacts.ArtifactRegistry + db repoInterfaces.Repository + config runtimeInterfaces.Configuration + scheduler scheduleInterfaces.EventScheduler + metrics launchPlanMetrics } func getLaunchPlanContext(ctx context.Context, identifier *core.Identifier) context.Context { @@ -87,21 +85,6 @@ func (m *LaunchPlanManager) CreateLaunchPlan( return nil, err } - // The presence of this field indicates that this is a trigger launch plan - // Return true and send this request over to the artifact registry instead - if launchPlan.Spec.GetEntityMetadata() != nil && launchPlan.Spec.GetEntityMetadata().GetLaunchConditions() != nil { - // TODO: Artifact feature gate, remove when ready - if m.artifactRegistry.GetClient() == nil { - logger.Debugf(ctx, "artifact feature not enabled, skipping launch plan %v", launchPlan.Id) - return &admin.LaunchPlanCreateResponse{}, nil - } - err := m.artifactRegistry.RegisterTrigger(ctx, &launchPlan) - if err != nil { - return nil, err - } - return &admin.LaunchPlanCreateResponse{}, nil - } - existingLaunchPlanModel, err := util.GetLaunchPlanModel(ctx, m.db, *request.Id) if err == nil { if bytes.Equal(existingLaunchPlanModel.Digest, launchPlanDigest) { @@ -128,17 +111,6 @@ func (m *LaunchPlanManager) CreateLaunchPlan( } m.metrics.SpecSizeBytes.Observe(float64(len(launchPlanModel.Spec))) m.metrics.ClosureSizeBytes.Observe(float64(len(launchPlanModel.Closure))) - // TODO: Artifact feature gate, remove when ready - if m.artifactRegistry.GetClient() != nil { - go func() { - ceCtx := context.TODO() - if launchPlan.Spec.DefaultInputs == nil { - logger.Debugf(ceCtx, "Insufficient fields to submit launchplan interface %v", launchPlan.Id) - return - } - m.artifactRegistry.RegisterArtifactConsumer(ceCtx, launchPlan.Id, *launchPlan.Spec.DefaultInputs) - }() - } return &admin.LaunchPlanCreateResponse{}, nil } @@ -573,8 +545,7 @@ func NewLaunchPlanManager( db repoInterfaces.Repository, config runtimeInterfaces.Configuration, scheduler scheduleInterfaces.EventScheduler, - scope promutils.Scope, - artifactRegistry *artifacts.ArtifactRegistry) interfaces.LaunchPlanInterface { + scope promutils.Scope) interfaces.LaunchPlanInterface { metrics := launchPlanMetrics{ Scope: scope, @@ -584,10 +555,9 @@ func NewLaunchPlanManager( ClosureSizeBytes: scope.MustNewSummary("closure_size_bytes", "size in bytes of serialized launch plan closure"), } return &LaunchPlanManager{ - db: db, - config: config, - scheduler: scheduler, - metrics: metrics, - artifactRegistry: artifactRegistry, + db: db, + config: config, + scheduler: scheduler, + metrics: metrics, } } diff --git a/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go b/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go index 64e069f26d..2863f2747d 100644 --- a/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go +++ b/flyteadmin/pkg/manager/impl/launch_plan_manager_test.go @@ -12,7 +12,6 @@ import ( "github.com/stretchr/testify/assert" "google.golang.org/grpc/codes" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" scheduleInterfaces "github.com/flyteorg/flyte/flyteadmin/pkg/async/schedule/interfaces" "github.com/flyteorg/flyte/flyteadmin/pkg/async/schedule/mocks" "github.com/flyteorg/flyte/flyteadmin/pkg/common" @@ -90,7 +89,7 @@ func TestCreateLaunchPlan(t *testing.T) { return nil }) setDefaultWorkflowCallbackForLpTest(repository) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) request := testutils.GetLaunchPlanRequest() response, err := lpManager.CreateLaunchPlan(context.Background(), request) assert.Nil(t, err) @@ -102,7 +101,7 @@ func TestCreateLaunchPlan(t *testing.T) { func TestLaunchPlanManager_GetLaunchPlan(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) state := int32(0) lpRequest := testutils.GetLaunchPlanRequest() workflowRequest := testutils.GetWorkflowRequest() @@ -138,7 +137,7 @@ func TestLaunchPlanManager_GetLaunchPlan(t *testing.T) { func TestLaunchPlanManager_GetActiveLaunchPlan(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) state := int32(1) lpRequest := testutils.GetLaunchPlanRequest() workflowRequest := testutils.GetWorkflowRequest() @@ -197,7 +196,7 @@ func TestLaunchPlanManager_GetActiveLaunchPlan(t *testing.T) { func TestLaunchPlanManager_GetActiveLaunchPlan_NoneActive(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) lpRequest := testutils.GetLaunchPlanRequest() launchPlanListFunc := func(input interfaces.ListResourceInput) (interfaces.LaunchPlanCollectionOutput, error) { @@ -217,7 +216,7 @@ func TestLaunchPlanManager_GetActiveLaunchPlan_NoneActive(t *testing.T) { func TestLaunchPlanManager_GetActiveLaunchPlan_InvalidRequest(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) response, err := lpManager.GetActiveLaunchPlan(context.Background(), admin.ActiveLaunchPlanRequest{ Id: &admin.NamedEntityIdentifier{ Domain: domain, @@ -229,7 +228,7 @@ func TestLaunchPlanManager_GetActiveLaunchPlan_InvalidRequest(t *testing.T) { } func TestLaunchPlan_ValidationError(t *testing.T) { - lpManager := NewLaunchPlanManager(repositoryMocks.NewMockRepository(), getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repositoryMocks.NewMockRepository(), getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) request := testutils.GetLaunchPlanRequest() request.Id = nil response, err := lpManager.CreateLaunchPlan(context.Background(), request) @@ -239,7 +238,7 @@ func TestLaunchPlan_ValidationError(t *testing.T) { func TestLaunchPlanManager_CreateLaunchPlanErrorDueToBadLabels(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) request := testutils.GetLaunchPlanRequest() request.Spec.Labels = &admin.Labels{ Values: map[string]string{ @@ -264,7 +263,7 @@ func TestLaunchPlan_DatabaseError(t *testing.T) { } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetCreateCallback(lpCreateFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) request := testutils.GetLaunchPlanRequest() response, err := lpManager.CreateLaunchPlan(context.Background(), request) assert.EqualError(t, err, expectedErr.Error()) @@ -274,7 +273,7 @@ func TestLaunchPlan_DatabaseError(t *testing.T) { func TestCreateLaunchPlanInCompatibleInputs(t *testing.T) { repository := getMockRepositoryForLpTest() setDefaultWorkflowCallbackForLpTest(repository) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) request := testutils.GetLaunchPlanRequest() request.Spec.DefaultInputs = &core.ParameterMap{ Parameters: map[string]*core.Parameter{ @@ -324,7 +323,7 @@ func TestCreateLaunchPlanValidateCreate(t *testing.T) { } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetCreateCallback(lpCreateFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) request := testutils.GetLaunchPlanRequest() response, err := lpManager.CreateLaunchPlan(context.Background(), request) assert.Nil(t, err) @@ -365,7 +364,7 @@ func TestCreateLaunchPlanNoWorkflowInterface(t *testing.T) { } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetCreateCallback(lpCreateFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) request := testutils.GetLaunchPlanRequest() request.Spec.FixedInputs = nil request.Spec.DefaultInputs = nil @@ -413,7 +412,7 @@ func TestEnableSchedule(t *testing.T) { *input.Payload) return nil }) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).enableSchedule( context.Background(), launchPlanNamedIdentifier, @@ -434,7 +433,7 @@ func TestEnableSchedule_Error(t *testing.T) { func(ctx context.Context, input scheduleInterfaces.AddScheduleInput) error { return expectedErr }) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).enableSchedule( context.Background(), launchPlanNamedIdentifier, admin.LaunchPlanSpec{ @@ -453,7 +452,7 @@ func TestDisableSchedule(t *testing.T) { assert.True(t, proto.Equal(&launchPlanNamedIdentifier, &input.Identifier)) return nil }) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).disableSchedule(context.Background(), launchPlanNamedIdentifier) assert.Nil(t, err) } @@ -467,7 +466,7 @@ func TestDisableSchedule_Error(t *testing.T) { func(ctx context.Context, input scheduleInterfaces.RemoveScheduleInput) error { return expectedErr }) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).disableSchedule(context.Background(), launchPlanNamedIdentifier) assert.EqualError(t, err, expectedErr.Error()) } @@ -515,7 +514,7 @@ func TestUpdateSchedules(t *testing.T) { return nil }) repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).updateSchedules( context.Background(), models.LaunchPlan{ @@ -568,7 +567,7 @@ func TestUpdateSchedules_NothingToDisableButRedo(t *testing.T) { return nil }) repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).updateSchedules(context.Background(), models.LaunchPlan{ LaunchPlanKey: models.LaunchPlanKey{ Project: project, @@ -637,7 +636,7 @@ func TestUpdateSchedules_NothingToEnableButRedo(t *testing.T) { }) repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).updateSchedules(context.Background(), models.LaunchPlan{ LaunchPlanKey: models.LaunchPlanKey{ Project: project, @@ -686,7 +685,7 @@ func TestUpdateSchedules_NothingToDoButRedo(t *testing.T) { }) repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).updateSchedules(context.Background(), models.LaunchPlan{ LaunchPlanKey: models.LaunchPlanKey{ Project: project, @@ -747,7 +746,7 @@ func TestUpdateSchedules_EnableNoSchedule(t *testing.T) { }) repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) err := lpManager.(*LaunchPlanManager).updateSchedules(context.Background(), models.LaunchPlan{ LaunchPlanKey: models.LaunchPlanKey{ Project: project, @@ -828,7 +827,7 @@ func TestDisableLaunchPlan(t *testing.T) { repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetUpdateCallback(disableFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) _, err := lpManager.UpdateLaunchPlan(context.Background(), admin.LaunchPlanUpdateRequest{ Id: &launchPlanIdentifier, State: admin.LaunchPlanState_INACTIVE, @@ -849,7 +848,7 @@ func TestDisableLaunchPlan_DatabaseError(t *testing.T) { return models.LaunchPlan{}, expectedError } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetGetCallback(lpGetFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) _, err := lpManager.UpdateLaunchPlan(context.Background(), admin.LaunchPlanUpdateRequest{ Id: &launchPlanIdentifier, State: admin.LaunchPlanState_INACTIVE, @@ -882,7 +881,7 @@ func TestDisableLaunchPlan_DatabaseError(t *testing.T) { return expectedError } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetUpdateCallback(disableFunc) - lpManager = NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager = NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) _, err = lpManager.UpdateLaunchPlan(context.Background(), admin.LaunchPlanUpdateRequest{ Id: &launchPlanIdentifier, State: admin.LaunchPlanState_INACTIVE, @@ -939,7 +938,7 @@ func TestEnableLaunchPlan(t *testing.T) { } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetSetActiveCallback(enableFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) _, err := lpManager.UpdateLaunchPlan(context.Background(), admin.LaunchPlanUpdateRequest{ Id: &launchPlanIdentifier, State: admin.LaunchPlanState_ACTIVE, @@ -968,7 +967,7 @@ func TestEnableLaunchPlan_NoCurrentlyActiveVersion(t *testing.T) { } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetSetActiveCallback(enableFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) _, err := lpManager.UpdateLaunchPlan(context.Background(), admin.LaunchPlanUpdateRequest{ Id: &launchPlanIdentifier, State: admin.LaunchPlanState_ACTIVE, @@ -988,7 +987,7 @@ func TestEnableLaunchPlan_DatabaseError(t *testing.T) { return models.LaunchPlan{}, expectedError } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetGetCallback(lpGetFunc) - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) _, err := lpManager.UpdateLaunchPlan(context.Background(), admin.LaunchPlanUpdateRequest{ Id: &launchPlanIdentifier, State: admin.LaunchPlanState_ACTIVE, @@ -996,7 +995,7 @@ func TestEnableLaunchPlan_DatabaseError(t *testing.T) { assert.EqualError(t, err, expectedError.Error(), "Failures on getting the existing launch plan should propagate") lpGetFunc = makeLaunchPlanRepoGetCallback(t) - lpManager = NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager = NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) listFunc := func(input interfaces.ListResourceInput) (interfaces.LaunchPlanCollectionOutput, error) { return interfaces.LaunchPlanCollectionOutput{}, expectedError } @@ -1044,7 +1043,7 @@ func TestEnableLaunchPlan_DatabaseError(t *testing.T) { return expectedError } repository.LaunchPlanRepo().(*repositoryMocks.MockLaunchPlanRepo).SetSetActiveCallback(enableFunc) - lpManager = NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager = NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) _, err = lpManager.UpdateLaunchPlan(context.Background(), admin.LaunchPlanUpdateRequest{ Id: &launchPlanIdentifier, State: admin.LaunchPlanState_ACTIVE, @@ -1054,7 +1053,7 @@ func TestEnableLaunchPlan_DatabaseError(t *testing.T) { func TestLaunchPlanManager_ListLaunchPlans(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) state := int32(0) lpRequest := testutils.GetLaunchPlanRequest() workflowRequest := testutils.GetWorkflowRequest() @@ -1161,7 +1160,7 @@ func TestLaunchPlanManager_ListLaunchPlans(t *testing.T) { func TestLaunchPlanManager_ListLaunchPlanIds(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) state := int32(0) lpRequest := testutils.GetLaunchPlanRequest() workflowRequest := testutils.GetWorkflowRequest() @@ -1244,7 +1243,7 @@ func TestLaunchPlanManager_ListLaunchPlanIds(t *testing.T) { func TestLaunchPlanManager_ListActiveLaunchPlans(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) state := int32(admin.LaunchPlanState_ACTIVE) lpRequest := testutils.GetLaunchPlanRequest() workflowRequest := testutils.GetWorkflowRequest() @@ -1331,7 +1330,7 @@ func TestLaunchPlanManager_ListActiveLaunchPlans(t *testing.T) { func TestLaunchPlanManager_ListActiveLaunchPlans_BadRequest(t *testing.T) { repository := getMockRepositoryForLpTest() - lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + lpManager := NewLaunchPlanManager(repository, getMockConfigForLpTest(), mockScheduler, mockScope.NewTestScope()) lpList, err := lpManager.ListActiveLaunchPlans(context.Background(), admin.ActiveLaunchPlanListRequest{ Domain: domain, Limit: 10, diff --git a/flyteadmin/pkg/manager/impl/node_execution_manager.go b/flyteadmin/pkg/manager/impl/node_execution_manager.go index afe17b6ac6..d618a1784f 100644 --- a/flyteadmin/pkg/manager/impl/node_execution_manager.go +++ b/flyteadmin/pkg/manager/impl/node_execution_manager.go @@ -297,6 +297,31 @@ func (m *NodeExecutionManager) CreateNodeEvent(ctx context.Context, request admi return &admin.NodeExecutionEventResponse{}, nil } +func (m *NodeExecutionManager) GetDynamicNodeWorkflow(ctx context.Context, request admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) { + if err := validation.ValidateNodeExecutionIdentifier(request.Id); err != nil { + logger.Debugf(ctx, "can't get node execution data with invalid identifier [%+v]: %v", request.Id, err) + } + + ctx = getNodeExecutionContext(ctx, request.Id) + nodeExecutionModel, err := util.GetNodeExecutionModel(ctx, m.db, request.Id) + if err != nil { + logger.Errorf(ctx, "failed to get node execution with id [%+v] with err %v", + request.Id, err) + return nil, err + } + + if nodeExecutionModel.DynamicWorkflowRemoteClosureReference == "" { + return &admin.DynamicNodeWorkflowResponse{}, errors.NewFlyteAdminErrorf(codes.NotFound, "node does not contain dynamic workflow") + } + + closure, err := m.fetchDynamicWorkflowClosure(ctx, nodeExecutionModel.DynamicWorkflowRemoteClosureReference) + if err != nil { + return nil, err + } + + return &admin.DynamicNodeWorkflowResponse{CompiledWorkflow: closure}, nil +} + // Handles making additional database calls, if necessary, to populate IsParent & IsDynamic data using the historical pattern of // preloading child node executions. Otherwise, simply calls transform on the input model. func (m *NodeExecutionManager) transformNodeExecutionModel(ctx context.Context, nodeExecutionModel models.NodeExecution, @@ -516,23 +541,15 @@ func (m *NodeExecutionManager) GetNodeExecutionData( } if len(nodeExecutionModel.DynamicWorkflowRemoteClosureReference) > 0 { - closure := &core.CompiledWorkflowClosure{} - err := m.storageClient.ReadProtobuf(ctx, storage.DataReference(nodeExecutionModel.DynamicWorkflowRemoteClosureReference), closure) + closure, err := m.fetchDynamicWorkflowClosure(ctx, nodeExecutionModel.DynamicWorkflowRemoteClosureReference) if err != nil { - return nil, errors.NewFlyteAdminErrorf(codes.Internal, - "Unable to read WorkflowClosure from location %s : %v", nodeExecutionModel.DynamicWorkflowRemoteClosureReference, err) + return nil, err } - if wf := closure.Primary; wf == nil { - return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Empty primary workflow definition in loaded dynamic workflow model.") - } else if template := wf.Template; template == nil { - return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Empty primary workflow template in loaded dynamic workflow model.") - } else { - response.DynamicWorkflow = &admin.DynamicWorkflowNodeMetadata{ - Id: closure.Primary.Template.Id, - CompiledWorkflow: closure, - DynamicJobSpecUri: nodeExecution.Closure.DynamicJobSpecUri, - } + response.DynamicWorkflow = &admin.DynamicWorkflowNodeMetadata{ + Id: closure.Primary.Template.Id, + CompiledWorkflow: closure, + DynamicJobSpecUri: nodeExecution.Closure.DynamicJobSpecUri, } } @@ -546,6 +563,21 @@ func (m *NodeExecutionManager) GetNodeExecutionData( return response, nil } +func (m *NodeExecutionManager) fetchDynamicWorkflowClosure(ctx context.Context, ref string) (*core.CompiledWorkflowClosure, error) { + closure := &core.CompiledWorkflowClosure{} + err := m.storageClient.ReadProtobuf(ctx, storage.DataReference(ref), closure) + if err != nil { + return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Unable to read WorkflowClosure from location %s : %v", ref, err) + } + + if wf := closure.Primary; wf == nil { + return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Empty primary workflow definition in loaded dynamic workflow model.") + } else if template := wf.Template; template == nil { + return nil, errors.NewFlyteAdminErrorf(codes.Internal, "Empty primary workflow template in loaded dynamic workflow model.") + } + return closure, nil +} + func NewNodeExecutionManager(db repoInterfaces.Repository, config runtimeInterfaces.Configuration, storagePrefix []string, storageClient *storage.DataStore, scope promutils.Scope, urlData dataInterfaces.RemoteURLInterface, eventPublisher notificationInterfaces.Publisher, cloudEventPublisher cloudeventInterfaces.Publisher, diff --git a/flyteadmin/pkg/manager/impl/node_execution_manager_test.go b/flyteadmin/pkg/manager/impl/node_execution_manager_test.go index 1f36d28afc..821a3c8ca1 100644 --- a/flyteadmin/pkg/manager/impl/node_execution_manager_test.go +++ b/flyteadmin/pkg/manager/impl/node_execution_manager_test.go @@ -10,7 +10,9 @@ import ( "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" eventWriterMocks "github.com/flyteorg/flyte/flyteadmin/pkg/async/events/mocks" "github.com/flyteorg/flyte/flyteadmin/pkg/common" @@ -1319,3 +1321,148 @@ func TestGetNodeExecutionData(t *testing.T) { }, }, dataResponse)) } + +func Test_GetDynamicNodeWorkflow_Success(t *testing.T) { + repo := repositoryMocks.NewMockRepository() + nodeExecID := core.NodeExecutionIdentifier{ + ExecutionId: &core.WorkflowExecutionIdentifier{ + Project: project, + Domain: domain, + Name: name, + }, + } + repo.NodeExecutionRepo().(*repositoryMocks.MockNodeExecutionRepo). + SetGetCallback(func(ctx context.Context, input interfaces.NodeExecutionResource) (models.NodeExecution, error) { + assert.Equal(t, nodeExecID, input.NodeExecutionIdentifier) + return models.NodeExecution{DynamicWorkflowRemoteClosureReference: remoteClosureIdentifier}, nil + }) + mockStorageClient := commonMocks.GetMockStorageClient() + expectedClosure := testutils.GetWorkflowClosure().CompiledWorkflow + mockStorageClient.ComposedProtobufStore.(*commonMocks.TestDataStore).ReadProtobufCb = func(ctx context.Context, reference storage.DataReference, msg proto.Message) error { + assert.Equal(t, remoteClosureIdentifier, reference.String()) + bytes, err := proto.Marshal(expectedClosure) + require.NoError(t, err) + return proto.Unmarshal(bytes, msg) + } + ctx := context.TODO() + nodeExecManager := NewNodeExecutionManager(repo, + getMockExecutionsConfigProvider(), + storagePrefix, + mockStorageClient, + mockScope.NewTestScope(), + mockNodeExecutionRemoteURL, + nil, nil, + &eventWriterMocks.NodeExecutionEventWriter{}) + expected := &admin.DynamicNodeWorkflowResponse{ + CompiledWorkflow: expectedClosure, + } + + resp, err := nodeExecManager.GetDynamicNodeWorkflow(ctx, admin.GetDynamicNodeWorkflowRequest{Id: &nodeExecID}) + + assert.NoError(t, err) + assert.True(t, proto.Equal(expected, resp)) +} + +func Test_GetDynamicNodeWorkflow_DBError(t *testing.T) { + repo := repositoryMocks.NewMockRepository() + nodeExecID := core.NodeExecutionIdentifier{ + ExecutionId: &core.WorkflowExecutionIdentifier{ + Project: project, + Domain: domain, + Name: name, + }, + } + expectedErr := errors.New("failure") + repo.NodeExecutionRepo().(*repositoryMocks.MockNodeExecutionRepo). + SetGetCallback(func(ctx context.Context, input interfaces.NodeExecutionResource) (models.NodeExecution, error) { + assert.Equal(t, nodeExecID, input.NodeExecutionIdentifier) + return models.NodeExecution{}, expectedErr + }) + mockStorageClient := commonMocks.GetMockStorageClient() + ctx := context.TODO() + nodeExecManager := NewNodeExecutionManager(repo, + getMockExecutionsConfigProvider(), + storagePrefix, + mockStorageClient, + mockScope.NewTestScope(), + mockNodeExecutionRemoteURL, + nil, nil, + &eventWriterMocks.NodeExecutionEventWriter{}) + + resp, err := nodeExecManager.GetDynamicNodeWorkflow(ctx, admin.GetDynamicNodeWorkflowRequest{Id: &nodeExecID}) + + assert.Equal(t, expectedErr, err) + assert.Empty(t, resp) +} + +func Test_GetDynamicNodeWorkflow_NoRemoteReference(t *testing.T) { + repo := repositoryMocks.NewMockRepository() + nodeExecID := core.NodeExecutionIdentifier{ + ExecutionId: &core.WorkflowExecutionIdentifier{ + Project: project, + Domain: domain, + Name: name, + }, + } + repo.NodeExecutionRepo().(*repositoryMocks.MockNodeExecutionRepo). + SetGetCallback(func(ctx context.Context, input interfaces.NodeExecutionResource) (models.NodeExecution, error) { + assert.Equal(t, nodeExecID, input.NodeExecutionIdentifier) + return models.NodeExecution{DynamicWorkflowRemoteClosureReference: ""}, nil + }) + mockStorageClient := commonMocks.GetMockStorageClient() + ctx := context.TODO() + nodeExecManager := NewNodeExecutionManager(repo, + getMockExecutionsConfigProvider(), + storagePrefix, + mockStorageClient, + mockScope.NewTestScope(), + mockNodeExecutionRemoteURL, + nil, nil, + &eventWriterMocks.NodeExecutionEventWriter{}) + + resp, err := nodeExecManager.GetDynamicNodeWorkflow(ctx, admin.GetDynamicNodeWorkflowRequest{Id: &nodeExecID}) + + st, ok := status.FromError(err) + assert.True(t, ok) + assert.Equal(t, codes.NotFound, st.Code()) + assert.Equal(t, "node does not contain dynamic workflow", st.Message()) + assert.Empty(t, resp) +} + +func Test_GetDynamicNodeWorkflow_StorageError(t *testing.T) { + repo := repositoryMocks.NewMockRepository() + nodeExecID := core.NodeExecutionIdentifier{ + ExecutionId: &core.WorkflowExecutionIdentifier{ + Project: project, + Domain: domain, + Name: name, + }, + } + repo.NodeExecutionRepo().(*repositoryMocks.MockNodeExecutionRepo). + SetGetCallback(func(ctx context.Context, input interfaces.NodeExecutionResource) (models.NodeExecution, error) { + assert.Equal(t, nodeExecID, input.NodeExecutionIdentifier) + return models.NodeExecution{DynamicWorkflowRemoteClosureReference: remoteClosureIdentifier}, nil + }) + mockStorageClient := commonMocks.GetMockStorageClient() + mockStorageClient.ComposedProtobufStore.(*commonMocks.TestDataStore).ReadProtobufCb = func(ctx context.Context, reference storage.DataReference, msg proto.Message) error { + assert.Equal(t, remoteClosureIdentifier, reference.String()) + return errors.New("failure") + } + ctx := context.TODO() + nodeExecManager := NewNodeExecutionManager(repo, + getMockExecutionsConfigProvider(), + storagePrefix, + mockStorageClient, + mockScope.NewTestScope(), + mockNodeExecutionRemoteURL, + nil, nil, + &eventWriterMocks.NodeExecutionEventWriter{}) + + resp, err := nodeExecManager.GetDynamicNodeWorkflow(ctx, admin.GetDynamicNodeWorkflowRequest{Id: &nodeExecID}) + + st, ok := status.FromError(err) + assert.True(t, ok) + assert.Equal(t, codes.Internal, st.Code()) + assert.Equal(t, "Unable to read WorkflowClosure from location s3://flyte/metadata/admin/remote closure id : failure", st.Message()) + assert.Empty(t, resp) +} diff --git a/flyteadmin/pkg/manager/impl/task_manager.go b/flyteadmin/pkg/manager/impl/task_manager.go index 2d6bc2a91e..d42639c31e 100644 --- a/flyteadmin/pkg/manager/impl/task_manager.go +++ b/flyteadmin/pkg/manager/impl/task_manager.go @@ -6,12 +6,10 @@ import ( "strconv" "time" - "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes" "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc/codes" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" "github.com/flyteorg/flyte/flyteadmin/pkg/common" "github.com/flyteorg/flyte/flyteadmin/pkg/errors" "github.com/flyteorg/flyte/flyteadmin/pkg/manager/impl/resources" @@ -38,12 +36,11 @@ type taskMetrics struct { } type TaskManager struct { - db repoInterfaces.Repository - config runtimeInterfaces.Configuration - compiler workflowengine.Compiler - metrics taskMetrics - resourceManager interfaces.ResourceInterface - artifactRegistry *artifacts.ArtifactRegistry + db repoInterfaces.Repository + config runtimeInterfaces.Configuration + compiler workflowengine.Compiler + metrics taskMetrics + resourceManager interfaces.ResourceInterface } func getTaskContext(ctx context.Context, identifier *core.Identifier) context.Context { @@ -133,18 +130,6 @@ func (t *TaskManager) CreateTask( contextWithRuntimeMeta, common.RuntimeVersionKey, finalizedRequest.Spec.Template.Metadata.Runtime.Version) t.metrics.Registered.Inc(contextWithRuntimeMeta) } - // TODO: Artifact feature gate, remove when ready - if t.artifactRegistry.GetClient() != nil { - tIfaceCopy := proto.Clone(finalizedRequest.Spec.Template.Interface).(*core.TypedInterface) - go func() { - ceCtx := context.TODO() - if finalizedRequest.Spec.Template.Interface == nil { - logger.Debugf(ceCtx, "Task [%+v] has no interface, skipping registration", finalizedRequest.Id) - return - } - t.artifactRegistry.RegisterArtifactProducer(ceCtx, finalizedRequest.Id, *tIfaceCopy) - }() - } return &admin.TaskCreateResponse{}, nil } @@ -276,8 +261,7 @@ func (t *TaskManager) ListUniqueTaskIdentifiers(ctx context.Context, request adm func NewTaskManager( db repoInterfaces.Repository, config runtimeInterfaces.Configuration, compiler workflowengine.Compiler, - scope promutils.Scope, - artifactRegistry *artifacts.ArtifactRegistry) interfaces.TaskInterface { + scope promutils.Scope) interfaces.TaskInterface { metrics := taskMetrics{ Scope: scope, @@ -286,11 +270,10 @@ func NewTaskManager( } resourceManager := resources.NewResourceManager(db, config.ApplicationConfiguration()) return &TaskManager{ - db: db, - config: config, - compiler: compiler, - metrics: metrics, - resourceManager: resourceManager, - artifactRegistry: artifactRegistry, + db: db, + config: config, + compiler: compiler, + metrics: metrics, + resourceManager: resourceManager, } } diff --git a/flyteadmin/pkg/manager/impl/task_manager_test.go b/flyteadmin/pkg/manager/impl/task_manager_test.go index bc19311bb3..6037e431b7 100644 --- a/flyteadmin/pkg/manager/impl/task_manager_test.go +++ b/flyteadmin/pkg/manager/impl/task_manager_test.go @@ -10,7 +10,6 @@ import ( "github.com/stretchr/testify/assert" "google.golang.org/grpc/codes" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" "github.com/flyteorg/flyte/flyteadmin/pkg/common" adminErrors "github.com/flyteorg/flyte/flyteadmin/pkg/errors" "github.com/flyteorg/flyte/flyteadmin/pkg/manager/impl/testutils" @@ -86,7 +85,7 @@ func TestCreateTask(t *testing.T) { return models.DescriptionEntity{}, adminErrors.NewFlyteAdminErrorf(codes.NotFound, "NotFound") }) taskManager := NewTaskManager(mockRepository, getMockConfigForTaskTest(), getMockTaskCompiler(), - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) request := testutils.GetValidTaskRequest() response, err := taskManager.CreateTask(context.Background(), request) assert.NoError(t, err) @@ -103,7 +102,7 @@ func TestCreateTask(t *testing.T) { func TestCreateTask_ValidationError(t *testing.T) { mockRepository := getMockTaskRepository() taskManager := NewTaskManager(mockRepository, getMockConfigForTaskTest(), getMockTaskCompiler(), - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) request := testutils.GetValidTaskRequest() request.Id = nil response, err := taskManager.CreateTask(context.Background(), request) @@ -120,7 +119,7 @@ func TestCreateTask_CompilerError(t *testing.T) { }) mockRepository := getMockTaskRepository() taskManager := NewTaskManager(mockRepository, getMockConfigForTaskTest(), mockCompiler, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) request := testutils.GetValidTaskRequest() response, err := taskManager.CreateTask(context.Background(), request) assert.EqualError(t, err, expectedErr.Error()) @@ -139,7 +138,7 @@ func TestCreateTask_DatabaseError(t *testing.T) { } repository.TaskRepo().(*repositoryMocks.MockTaskRepo).SetCreateCallback(taskCreateFunc) - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) request := testutils.GetValidTaskRequest() response, err := taskManager.CreateTask(context.Background(), request) assert.EqualError(t, err, expectedErr.Error()) @@ -167,7 +166,7 @@ func TestGetTask(t *testing.T) { }, nil } repository.TaskRepo().(*repositoryMocks.MockTaskRepo).SetGetCallback(taskGetFunc) - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) task, err := taskManager.GetTask(context.Background(), admin.ObjectGetRequest{ Id: &taskIdentifier, @@ -187,7 +186,7 @@ func TestGetTask_DatabaseError(t *testing.T) { return models.Task{}, expectedErr } repository.TaskRepo().(*repositoryMocks.MockTaskRepo).SetGetCallback(taskGetFunc) - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) task, err := taskManager.GetTask(context.Background(), admin.ObjectGetRequest{ Id: &taskIdentifier, }) @@ -213,7 +212,7 @@ func TestGetTask_TransformerError(t *testing.T) { }, nil } repository.TaskRepo().(*repositoryMocks.MockTaskRepo).SetGetCallback(taskGetFunc) - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) task, err := taskManager.GetTask(context.Background(), admin.ObjectGetRequest{ Id: &taskIdentifier, @@ -272,7 +271,7 @@ func TestListTasks(t *testing.T) { }, nil } repository.TaskRepo().(*repositoryMocks.MockTaskRepo).SetListCallback(taskListFunc) - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) taskList, err := taskManager.ListTasks(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ @@ -304,7 +303,7 @@ func TestListTasks(t *testing.T) { func TestListTasks_MissingParameters(t *testing.T) { repository := getMockTaskRepository() - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) _, err := taskManager.ListTasks(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ Domain: domainValue, @@ -334,7 +333,7 @@ func TestListTasks_DatabaseError(t *testing.T) { } repository.TaskRepo().(*repositoryMocks.MockTaskRepo).SetListCallback(taskListFunc) - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) _, err := taskManager.ListTasks(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ Project: projectValue, @@ -348,7 +347,7 @@ func TestListTasks_DatabaseError(t *testing.T) { func TestListUniqueTaskIdentifiers(t *testing.T) { repository := getMockTaskRepository() - taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + taskManager := NewTaskManager(repository, getMockConfigForTaskTest(), getMockTaskCompiler(), mockScope.NewTestScope()) listFunc := func(input interfaces.ListResourceInput) (interfaces.TaskCollectionOutput, error) { // Test that parameters are being passed in diff --git a/flyteadmin/pkg/manager/impl/util/resources.go b/flyteadmin/pkg/manager/impl/util/resources.go index 8001b907dd..79aadb61b2 100644 --- a/flyteadmin/pkg/manager/impl/util/resources.go +++ b/flyteadmin/pkg/manager/impl/util/resources.go @@ -66,10 +66,6 @@ func fromAdminProtoTaskResourceSpec(ctx context.Context, spec *admin.TaskResourc result.Memory = parseQuantityNoError(ctx, "project", "memory", spec.Memory) } - if len(spec.Storage) > 0 { - result.Storage = parseQuantityNoError(ctx, "project", "storage", spec.Storage) - } - if len(spec.EphemeralStorage) > 0 { result.EphemeralStorage = parseQuantityNoError(ctx, "project", "ephemeral storage", spec.EphemeralStorage) } diff --git a/flyteadmin/pkg/manager/impl/util/resources_test.go b/flyteadmin/pkg/manager/impl/util/resources_test.go index af5c15f87d..c163b44e0c 100644 --- a/flyteadmin/pkg/manager/impl/util/resources_test.go +++ b/flyteadmin/pkg/manager/impl/util/resources_test.go @@ -31,14 +31,12 @@ func TestGetTaskResources(t *testing.T) { GPU: resource.MustParse("8"), Memory: resource.MustParse("200Gi"), EphemeralStorage: resource.MustParse("500Mi"), - Storage: resource.MustParse("400Mi"), } taskConfig.Limits = runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("300m"), GPU: resource.MustParse("8"), Memory: resource.MustParse("500Gi"), EphemeralStorage: resource.MustParse("501Mi"), - Storage: resource.MustParse("450Mi"), } t.Run("use runtime application values", func(t *testing.T) { @@ -61,14 +59,12 @@ func TestGetTaskResources(t *testing.T) { GPU: resource.MustParse("8"), Memory: resource.MustParse("200Gi"), EphemeralStorage: resource.MustParse("500Mi"), - Storage: resource.MustParse("400Mi"), }, Limits: runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("300m"), GPU: resource.MustParse("8"), Memory: resource.MustParse("500Gi"), EphemeralStorage: resource.MustParse("501Mi"), - Storage: resource.MustParse("450Mi"), }, }) }) @@ -91,14 +87,12 @@ func TestGetTaskResources(t *testing.T) { Gpu: "18", Memory: "1200Gi", EphemeralStorage: "1500Mi", - Storage: "1400Mi", }, Limits: &admin.TaskResourceSpec{ Cpu: "300m", Gpu: "8", Memory: "500Gi", EphemeralStorage: "501Mi", - Storage: "450Mi", }, }, }, @@ -112,14 +106,12 @@ func TestGetTaskResources(t *testing.T) { GPU: resource.MustParse("18"), Memory: resource.MustParse("1200Gi"), EphemeralStorage: resource.MustParse("1500Mi"), - Storage: resource.MustParse("1400Mi"), }, Limits: runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("300m"), GPU: resource.MustParse("8"), Memory: resource.MustParse("500Gi"), EphemeralStorage: resource.MustParse("501Mi"), - Storage: resource.MustParse("450Mi"), }, }) }) @@ -129,14 +121,12 @@ func TestFromAdminProtoTaskResourceSpec(t *testing.T) { taskResourceSet := fromAdminProtoTaskResourceSpec(context.TODO(), &admin.TaskResourceSpec{ Cpu: "1", Memory: "100", - Storage: "200", EphemeralStorage: "300", Gpu: "2", }) assert.EqualValues(t, runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("1"), Memory: resource.MustParse("100"), - Storage: resource.MustParse("200"), EphemeralStorage: resource.MustParse("300"), GPU: resource.MustParse("2"), }, taskResourceSet) diff --git a/flyteadmin/pkg/manager/impl/workflow_manager.go b/flyteadmin/pkg/manager/impl/workflow_manager.go index 726e3988c3..4b755a4707 100644 --- a/flyteadmin/pkg/manager/impl/workflow_manager.go +++ b/flyteadmin/pkg/manager/impl/workflow_manager.go @@ -6,12 +6,10 @@ import ( "strconv" "time" - "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes" "github.com/prometheus/client_golang/prometheus" "google.golang.org/grpc/codes" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" "github.com/flyteorg/flyte/flyteadmin/pkg/common" "github.com/flyteorg/flyte/flyteadmin/pkg/errors" "github.com/flyteorg/flyte/flyteadmin/pkg/manager/impl/util" @@ -41,13 +39,12 @@ type workflowMetrics struct { } type WorkflowManager struct { - db repoInterfaces.Repository - config runtimeInterfaces.Configuration - compiler workflowengineInterfaces.Compiler - storageClient *storage.DataStore - storagePrefix []string - metrics workflowMetrics - artifactRegistry *artifacts.ArtifactRegistry + db repoInterfaces.Repository + config runtimeInterfaces.Configuration + compiler workflowengineInterfaces.Compiler + storageClient *storage.DataStore + storagePrefix []string + metrics workflowMetrics } func getWorkflowContext(ctx context.Context, identifier *core.Identifier) context.Context { @@ -220,22 +217,6 @@ func (w *WorkflowManager) CreateWorkflow( } w.metrics.TypedInterfaceSizeBytes.Observe(float64(len(workflowModel.TypedInterface))) - // Send the interface definition to Artifact service, this is so that it can statically pick up one dimension of - // lineage information - tIfaceCopy := proto.Clone(workflowClosure.CompiledWorkflow.Primary.Template.Interface).(*core.TypedInterface) - // TODO: Artifact feature gate, remove when ready - if w.artifactRegistry.GetClient() != nil { - go func() { - ceCtx := context.TODO() - if workflowClosure.CompiledWorkflow == nil || workflowClosure.CompiledWorkflow.Primary == nil { - logger.Debugf(ceCtx, "Insufficient fields to submit workflow interface %v", finalizedRequest.Id) - return - } - - w.artifactRegistry.RegisterArtifactProducer(ceCtx, finalizedRequest.Id, *tIfaceCopy) - }() - } - return &admin.WorkflowCreateResponse{}, nil } @@ -367,8 +348,7 @@ func NewWorkflowManager( compiler workflowengineInterfaces.Compiler, storageClient *storage.DataStore, storagePrefix []string, - scope promutils.Scope, - artifactRegistry *artifacts.ArtifactRegistry) interfaces.WorkflowInterface { + scope promutils.Scope) interfaces.WorkflowInterface { metrics := workflowMetrics{ Scope: scope, @@ -378,12 +358,11 @@ func NewWorkflowManager( "size in bytes of serialized workflow TypedInterface"), } return &WorkflowManager{ - db: db, - config: config, - compiler: compiler, - storageClient: storageClient, - storagePrefix: storagePrefix, - metrics: metrics, - artifactRegistry: artifactRegistry, + db: db, + config: config, + compiler: compiler, + storageClient: storageClient, + storagePrefix: storagePrefix, + metrics: metrics, } } diff --git a/flyteadmin/pkg/manager/impl/workflow_manager_test.go b/flyteadmin/pkg/manager/impl/workflow_manager_test.go index 9b9fb611d7..99da70a64b 100644 --- a/flyteadmin/pkg/manager/impl/workflow_manager_test.go +++ b/flyteadmin/pkg/manager/impl/workflow_manager_test.go @@ -11,7 +11,6 @@ import ( "google.golang.org/grpc/codes" "google.golang.org/grpc/status" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" "github.com/flyteorg/flyte/flyteadmin/pkg/common" commonMocks "github.com/flyteorg/flyte/flyteadmin/pkg/common/mocks" adminErrors "github.com/flyteorg/flyte/flyteadmin/pkg/errors" @@ -124,7 +123,7 @@ func TestSetWorkflowDefaults(t *testing.T) { workflowManager := NewWorkflowManager( getMockRepository(returnWorkflowOnGet), getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), commonMocks.GetMockStorageClient(), storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() finalizedRequest, err := workflowManager.(*WorkflowManager).setDefaults(request) assert.NoError(t, err) @@ -144,7 +143,7 @@ func TestCreateWorkflow(t *testing.T) { workflowManager := NewWorkflowManager( repository, - getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), getMockStorage(), storagePrefix, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), getMockStorage(), storagePrefix, mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() response, err := workflowManager.CreateWorkflow(context.Background(), request) assert.NoError(t, err) @@ -165,7 +164,7 @@ func TestCreateWorkflow_ValidationError(t *testing.T) { workflowManager := NewWorkflowManager( repositoryMocks.NewMockRepository(), getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), commonMocks.GetMockStorageClient(), storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() request.Id = nil response, err := workflowManager.CreateWorkflow(context.Background(), request) @@ -184,7 +183,7 @@ func TestCreateWorkflow_ExistingWorkflow(t *testing.T) { } workflowManager := NewWorkflowManager( getMockRepository(returnWorkflowOnGet), - getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() response, err := workflowManager.CreateWorkflow(context.Background(), request) assert.EqualError(t, err, "workflow with different structure already exists") @@ -201,7 +200,7 @@ func TestCreateWorkflow_ExistingWorkflow_Different(t *testing.T) { } workflowManager := NewWorkflowManager( getMockRepository(returnWorkflowOnGet), - getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() response, err := workflowManager.CreateWorkflow(context.Background(), request) @@ -222,7 +221,7 @@ func TestCreateWorkflow_CompilerGetRequirementsError(t *testing.T) { workflowManager := NewWorkflowManager( getMockRepository(!returnWorkflowOnGet), - getMockWorkflowConfigProvider(), mockCompiler, getMockStorage(), storagePrefix, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + getMockWorkflowConfigProvider(), mockCompiler, getMockStorage(), storagePrefix, mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() response, err := workflowManager.CreateWorkflow(context.Background(), request) assert.EqualError(t, err, fmt.Sprintf( @@ -242,7 +241,7 @@ func TestCreateWorkflow_CompileWorkflowError(t *testing.T) { workflowManager := NewWorkflowManager( getMockRepository(!returnWorkflowOnGet), - getMockWorkflowConfigProvider(), mockCompiler, getMockStorage(), storagePrefix, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + getMockWorkflowConfigProvider(), mockCompiler, getMockStorage(), storagePrefix, mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() response, err := workflowManager.CreateWorkflow(context.Background(), request) assert.Nil(t, response) @@ -264,7 +263,7 @@ func TestCreateWorkflow_DatabaseError(t *testing.T) { repository.WorkflowRepo().(*repositoryMocks.MockWorkflowRepo).SetCreateCallback(workflowCreateFunc) workflowManager := NewWorkflowManager( repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), getMockStorage(), storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) request := testutils.GetWorkflowRequest() response, err := workflowManager.CreateWorkflow(context.Background(), request) assert.EqualError(t, err, expectedErr.Error()) @@ -304,7 +303,7 @@ func TestGetWorkflow(t *testing.T) { } workflowManager := NewWorkflowManager( repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) workflow, err := workflowManager.GetWorkflow(context.Background(), admin.ObjectGetRequest{ Id: &workflowIdentifier, }) @@ -326,7 +325,7 @@ func TestGetWorkflow_DatabaseError(t *testing.T) { repository.WorkflowRepo().(*repositoryMocks.MockWorkflowRepo).SetGetCallback(workflowGetFunc) workflowManager := NewWorkflowManager( repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), commonMocks.GetMockStorageClient(), - storagePrefix, mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + storagePrefix, mockScope.NewTestScope()) workflow, err := workflowManager.GetWorkflow(context.Background(), admin.ObjectGetRequest{ Id: &workflowIdentifier, }) @@ -362,7 +361,7 @@ func TestGetWorkflow_TransformerError(t *testing.T) { workflowManager := NewWorkflowManager( repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) workflow, err := workflowManager.GetWorkflow(context.Background(), admin.ObjectGetRequest{ Id: &workflowIdentifier, }) @@ -433,7 +432,7 @@ func TestListWorkflows(t *testing.T) { } workflowManager := NewWorkflowManager( repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) workflowList, err := workflowManager.ListWorkflows(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ @@ -475,7 +474,7 @@ func TestListWorkflows_MissingParameters(t *testing.T) { workflowManager := NewWorkflowManager( repositoryMocks.NewMockRepository(), getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), commonMocks.GetMockStorageClient(), storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) _, err := workflowManager.ListWorkflows(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ Domain: domainValue, @@ -507,7 +506,7 @@ func TestListWorkflows_DatabaseError(t *testing.T) { repository.WorkflowRepo().(*repositoryMocks.MockWorkflowRepo).SetListCallback(workflowListFunc) workflowManager := NewWorkflowManager(repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), commonMocks.GetMockStorageClient(), storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) _, err := workflowManager.ListWorkflows(context.Background(), admin.ResourceListRequest{ Id: &admin.NamedEntityIdentifier{ Project: projectValue, @@ -570,7 +569,7 @@ func TestWorkflowManager_ListWorkflowIdentifiers(t *testing.T) { } workflowManager := NewWorkflowManager( repository, getMockWorkflowConfigProvider(), getMockWorkflowCompiler(), mockStorageClient, storagePrefix, - mockScope.NewTestScope(), artifacts.NewArtifactRegistry(context.Background(), nil)) + mockScope.NewTestScope()) workflowList, err := workflowManager.ListWorkflowIdentifiers(context.Background(), admin.NamedEntityIdentifierListRequest{ diff --git a/flyteadmin/pkg/manager/interfaces/node_execution.go b/flyteadmin/pkg/manager/interfaces/node_execution.go index 3aa949ec85..97f2a1a166 100644 --- a/flyteadmin/pkg/manager/interfaces/node_execution.go +++ b/flyteadmin/pkg/manager/interfaces/node_execution.go @@ -15,4 +15,5 @@ type NodeExecutionInterface interface { ListNodeExecutionsForTask(ctx context.Context, request admin.NodeExecutionForTaskListRequest) (*admin.NodeExecutionList, error) GetNodeExecutionData( ctx context.Context, request admin.NodeExecutionGetDataRequest) (*admin.NodeExecutionGetDataResponse, error) + GetDynamicNodeWorkflow(ctx context.Context, request admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) } diff --git a/flyteadmin/pkg/manager/mocks/node_execution.go b/flyteadmin/pkg/manager/mocks/node_execution.go index 313566fc3d..83295e423a 100644 --- a/flyteadmin/pkg/manager/mocks/node_execution.go +++ b/flyteadmin/pkg/manager/mocks/node_execution.go @@ -84,3 +84,7 @@ func (m *MockNodeExecutionManager) GetNodeExecutionData( } return nil, nil } + +func (m *MockNodeExecutionManager) GetDynamicNodeWorkflow(ctx context.Context, request admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) { + return nil, nil +} diff --git a/flyteadmin/pkg/rpc/adminservice/base.go b/flyteadmin/pkg/rpc/adminservice/base.go index 1e91fec5d4..5a2cb2ad89 100644 --- a/flyteadmin/pkg/rpc/adminservice/base.go +++ b/flyteadmin/pkg/rpc/adminservice/base.go @@ -7,7 +7,6 @@ import ( "github.com/golang/protobuf/proto" - "github.com/flyteorg/flyte/flyteadmin/pkg/artifacts" "github.com/flyteorg/flyte/flyteadmin/pkg/async/cloudevent" eventWriter "github.com/flyteorg/flyte/flyteadmin/pkg/async/events/implementations" "github.com/flyteorg/flyte/flyteadmin/pkg/async/notifications" @@ -22,7 +21,6 @@ import ( runtimeIfaces "github.com/flyteorg/flyte/flyteadmin/pkg/runtime/interfaces" workflowengineImpl "github.com/flyteorg/flyte/flyteadmin/pkg/workflowengine/impl" "github.com/flyteorg/flyte/flyteadmin/plugins" - admin2 "github.com/flyteorg/flyte/flyteidl/clients/go/admin" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flytestdlib/logger" "github.com/flyteorg/flyte/flytestdlib/promutils" @@ -44,7 +42,6 @@ type AdminService struct { DescriptionEntityManager interfaces.DescriptionEntityInterface MetricsManager interfaces.MetricsInterface Metrics AdminMetrics - Artifacts *artifacts.ArtifactRegistry } // Intercepts all admin requests to handle panics during execution. @@ -114,14 +111,8 @@ func NewAdminServer(ctx context.Context, pluginRegistry *plugins.Registry, confi eventScheduler := workflowScheduler.GetEventScheduler() - var artifactRegistry *artifacts.ArtifactRegistry - if configuration.ApplicationConfiguration().GetTopLevelConfig().FeatureGates.EnableArtifacts { - adminClientCfg := admin2.GetConfig(ctx) - artifactRegistry = artifacts.NewArtifactRegistry(ctx, adminClientCfg) - } - launchPlanManager := manager.NewLaunchPlanManager( - repo, configuration, eventScheduler, adminScope.NewSubScope("launch_plan_manager"), artifactRegistry) + repo, configuration, eventScheduler, adminScope.NewSubScope("launch_plan_manager")) // Configure admin-specific remote data handler (separate from storage) remoteDataConfig := configuration.ApplicationConfiguration().GetRemoteDataConfig() @@ -138,7 +129,7 @@ func NewAdminServer(ctx context.Context, pluginRegistry *plugins.Registry, confi workflowManager := manager.NewWorkflowManager( repo, configuration, workflowengineImpl.NewCompiler(), dataStorageClient, applicationConfiguration.GetMetadataStoragePrefix(), - adminScope.NewSubScope("workflow_manager"), artifactRegistry) + adminScope.NewSubScope("workflow_manager")) namedEntityManager := manager.NewNamedEntityManager(repo, configuration, adminScope.NewSubScope("named_entity_manager")) descriptionEntityManager := manager.NewDescriptionEntityManager(repo, configuration, adminScope.NewSubScope("description_entity_manager")) @@ -149,7 +140,7 @@ func NewAdminServer(ctx context.Context, pluginRegistry *plugins.Registry, confi executionManager := manager.NewExecutionManager(repo, pluginRegistry, configuration, dataStorageClient, adminScope.NewSubScope("execution_manager"), adminScope.NewSubScope("user_execution_metrics"), - publisher, urlData, workflowManager, namedEntityManager, eventPublisher, cloudEventPublisher, executionEventWriter, artifactRegistry) + publisher, urlData, workflowManager, namedEntityManager, eventPublisher, cloudEventPublisher, executionEventWriter) versionManager := manager.NewVersionManager() scheduledWorkflowExecutor := workflowScheduler.GetWorkflowExecutor(executionManager, launchPlanManager) @@ -172,7 +163,7 @@ func NewAdminServer(ctx context.Context, pluginRegistry *plugins.Registry, confi logger.Info(ctx, "Initializing a new AdminService") return &AdminService{ TaskManager: manager.NewTaskManager(repo, configuration, workflowengineImpl.NewCompiler(), - adminScope.NewSubScope("task_manager"), artifactRegistry), + adminScope.NewSubScope("task_manager")), WorkflowManager: workflowManager, LaunchPlanManager: launchPlanManager, ExecutionManager: executionManager, @@ -185,7 +176,6 @@ func NewAdminServer(ctx context.Context, pluginRegistry *plugins.Registry, confi ResourceManager: resources.NewResourceManager(repo, configuration.ApplicationConfiguration()), MetricsManager: manager.NewMetricsManager(workflowManager, executionManager, nodeExecutionManager, taskExecutionManager, adminScope.NewSubScope("metrics_manager")), - Metrics: InitMetrics(adminScope), - Artifacts: artifactRegistry, + Metrics: InitMetrics(adminScope), } } diff --git a/flyteadmin/pkg/rpc/adminservice/metrics.go b/flyteadmin/pkg/rpc/adminservice/metrics.go index 3409dfd18d..6e5060cab5 100644 --- a/flyteadmin/pkg/rpc/adminservice/metrics.go +++ b/flyteadmin/pkg/rpc/adminservice/metrics.go @@ -46,12 +46,13 @@ type namedEntityEndpointMetrics struct { type nodeExecutionEndpointMetrics struct { scope promutils.Scope - createEvent util.RequestMetrics - get util.RequestMetrics - getData util.RequestMetrics - getMetrics util.RequestMetrics - list util.RequestMetrics - listChildren util.RequestMetrics + createEvent util.RequestMetrics + get util.RequestMetrics + getData util.RequestMetrics + getMetrics util.RequestMetrics + list util.RequestMetrics + listChildren util.RequestMetrics + getDynamicNodeWorkflow util.RequestMetrics } type projectEndpointMetrics struct { @@ -161,13 +162,14 @@ func InitMetrics(adminScope promutils.Scope) AdminMetrics { update: util.NewRequestMetrics(adminScope, "update_named_entity"), }, nodeExecutionEndpointMetrics: nodeExecutionEndpointMetrics{ - scope: adminScope, - createEvent: util.NewRequestMetrics(adminScope, "create_node_execution_event"), - get: util.NewRequestMetrics(adminScope, "get_node_execution"), - getData: util.NewRequestMetrics(adminScope, "get_node_execution_data"), - getMetrics: util.NewRequestMetrics(adminScope, "get_node_execution_metrics"), - list: util.NewRequestMetrics(adminScope, "list_node_execution"), - listChildren: util.NewRequestMetrics(adminScope, "list_children_node_executions"), + scope: adminScope, + createEvent: util.NewRequestMetrics(adminScope, "create_node_execution_event"), + get: util.NewRequestMetrics(adminScope, "get_node_execution"), + getData: util.NewRequestMetrics(adminScope, "get_node_execution_data"), + getMetrics: util.NewRequestMetrics(adminScope, "get_node_execution_metrics"), + list: util.NewRequestMetrics(adminScope, "list_node_execution"), + listChildren: util.NewRequestMetrics(adminScope, "list_children_node_executions"), + getDynamicNodeWorkflow: util.NewRequestMetrics(adminScope, "get_dynamic_node_workflow"), }, projectEndpointMetrics: projectEndpointMetrics{ scope: adminScope, diff --git a/flyteadmin/pkg/rpc/adminservice/node_execution.go b/flyteadmin/pkg/rpc/adminservice/node_execution.go index 82ae3e1b24..cf17e3ff70 100644 --- a/flyteadmin/pkg/rpc/adminservice/node_execution.go +++ b/flyteadmin/pkg/rpc/adminservice/node_execution.go @@ -48,6 +48,24 @@ func (m *AdminService) GetNodeExecution( return response, nil } +func (m *AdminService) GetDynamicNodeWorkflow(ctx context.Context, request *admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) { + defer m.interceptPanic(ctx, request) + if request == nil { + return nil, status.Errorf(codes.InvalidArgument, "Incorrect request, nil requests not allowed") + } + + var response *admin.DynamicNodeWorkflowResponse + var err error + m.Metrics.nodeExecutionEndpointMetrics.getDynamicNodeWorkflow.Time(func() { + response, err = m.NodeExecutionManager.GetDynamicNodeWorkflow(ctx, *request) + }) + if err != nil { + return nil, util.TransformAndRecordError(err, &m.Metrics.workflowEndpointMetrics.get) + } + m.Metrics.nodeExecutionEndpointMetrics.getDynamicNodeWorkflow.Success() + return response, nil +} + func (m *AdminService) ListNodeExecutions( ctx context.Context, request *admin.NodeExecutionListRequest) (*admin.NodeExecutionList, error) { defer m.interceptPanic(ctx, request) diff --git a/flyteadmin/pkg/runtime/interfaces/task_resource_configuration.go b/flyteadmin/pkg/runtime/interfaces/task_resource_configuration.go index 8c55651693..cbe8130376 100644 --- a/flyteadmin/pkg/runtime/interfaces/task_resource_configuration.go +++ b/flyteadmin/pkg/runtime/interfaces/task_resource_configuration.go @@ -6,7 +6,6 @@ type TaskResourceSet struct { CPU resource.Quantity `json:"cpu"` GPU resource.Quantity `json:"gpu"` Memory resource.Quantity `json:"memory"` - Storage resource.Quantity `json:"storage"` EphemeralStorage resource.Quantity `json:"ephemeralStorage"` } diff --git a/flyteadmin/pkg/workflowengine/impl/prepare_execution.go b/flyteadmin/pkg/workflowengine/impl/prepare_execution.go index 27574c28a4..91642599a6 100644 --- a/flyteadmin/pkg/workflowengine/impl/prepare_execution.go +++ b/flyteadmin/pkg/workflowengine/impl/prepare_execution.go @@ -85,9 +85,6 @@ func addExecutionOverrides(taskPluginOverrides []*admin.PluginOverride, if !taskResources.Defaults.EphemeralStorage.IsZero() { requests.EphemeralStorage = taskResources.Defaults.EphemeralStorage } - if !taskResources.Defaults.Storage.IsZero() { - requests.Storage = taskResources.Defaults.Storage - } if !taskResources.Defaults.GPU.IsZero() { requests.GPU = taskResources.Defaults.GPU } @@ -102,9 +99,6 @@ func addExecutionOverrides(taskPluginOverrides []*admin.PluginOverride, if !taskResources.Limits.EphemeralStorage.IsZero() { limits.EphemeralStorage = taskResources.Limits.EphemeralStorage } - if !taskResources.Limits.Storage.IsZero() { - limits.Storage = taskResources.Limits.Storage - } if !taskResources.Limits.GPU.IsZero() { limits.GPU = taskResources.Limits.GPU } diff --git a/flyteadmin/pkg/workflowengine/impl/prepare_execution_test.go b/flyteadmin/pkg/workflowengine/impl/prepare_execution_test.go index 7b1c0f725b..be659208b2 100644 --- a/flyteadmin/pkg/workflowengine/impl/prepare_execution_test.go +++ b/flyteadmin/pkg/workflowengine/impl/prepare_execution_test.go @@ -131,7 +131,6 @@ func TestAddExecutionOverrides(t *testing.T) { Limits: runtimeInterfaces.TaskResourceSet{ CPU: resource.MustParse("2"), Memory: resource.MustParse("200Gi"), - Storage: resource.MustParse("5Gi"), EphemeralStorage: resource.MustParse("1Gi"), GPU: resource.MustParse("1"), }, @@ -144,7 +143,6 @@ func TestAddExecutionOverrides(t *testing.T) { assert.EqualValues(t, v1alpha1.TaskResourceSpec{ CPU: resource.MustParse("2"), Memory: resource.MustParse("200Gi"), - Storage: resource.MustParse("5Gi"), EphemeralStorage: resource.MustParse("1Gi"), GPU: resource.MustParse("1"), }, workflow.ExecutionConfig.TaskResources.Limits) diff --git a/flyteadmin/pull_request_template.md b/flyteadmin/pull_request_template.md deleted file mode 100644 index 9cdab99b46..0000000000 --- a/flyteadmin/pull_request_template.md +++ /dev/null @@ -1,35 +0,0 @@ -## _Read then delete this section_ - -_- Make sure to use a concise title for the pull-request._ - -_- Use #patch, #minor or #major in the pull-request title to bump the corresponding version. Otherwise, the patch version -will be bumped. [More details](https://github.com/marketplace/actions/github-tag-bump)_ - -# TL;DR -_Please replace this text with a description of what this PR accomplishes._ - -## Type - - [ ] Bug Fix - - [ ] Feature - - [ ] Plugin - -## Are all requirements met? - - - [ ] Code completed - - [ ] Smoke tested - - [ ] Unit tests added - - [ ] Code documentation added - - [ ] Any pending items have an associated Issue - -## Complete description - _How did you fix the bug, make the feature etc. Link to any design docs etc_ - -## Tracking Issue -_Remove the '*fixes*' keyword if there will be multiple PRs to fix the linked issue_ - -fixes https://github.com/flyteorg/flyte/issues/ - -## Follow-up issue -_NA_ -OR -_https://github.com/flyteorg/flyte/issues/_ diff --git a/flyteadmin/scheduler.Dockerfile b/flyteadmin/scheduler.Dockerfile deleted file mode 100644 index 35753d84be..0000000000 --- a/flyteadmin/scheduler.Dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'LYFT/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst - -FROM --platform=${BUILDPLATFORM} golang:1.18-alpine3.15 as builder - -ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux - -RUN apk add git openssh-client make curl - -# COPY only the go mod files for efficient caching -COPY go.mod go.sum /go/src/github.com/flyteorg/flyteadmin/ -WORKDIR /go/src/github.com/flyteorg/flyteadmin - -# Pull dependencies -RUN go mod download - -# COPY the rest of the source code -COPY . /go/src/github.com/flyteorg/flyteadmin/ - -# This 'linux_compile_scheduler' target should compile binaries to the /artifacts directory -# The main entrypoint should be compiled to /artifacts/flytescheduler -RUN make linux_compile_scheduler - -# update the PATH to include the /artifacts directory -ENV PATH="/artifacts:${PATH}" - -# This will eventually move to centurylink/ca-certs:latest for minimum possible image size -FROM alpine:3.15 -LABEL org.opencontainers.image.source https://github.com/flyteorg/flyteadmin - -COPY --from=builder /artifacts /bin - -# Ensure the latest CA certs are present to authenticate SSL connections. -RUN apk --update add ca-certificates - -RUN addgroup -S flyte && adduser -S flyte -G flyte -USER flyte - -CMD ["flytescheduler"] - diff --git a/flyteadmin/scheduler/executor/executor_impl.go b/flyteadmin/scheduler/executor/executor_impl.go index de711ff85e..30ab7f0677 100644 --- a/flyteadmin/scheduler/executor/executor_impl.go +++ b/flyteadmin/scheduler/executor/executor_impl.go @@ -62,7 +62,7 @@ func (w *executor) Execute(ctx context.Context, scheduledTime time.Time, s model }, scheduledTime) if err != nil { - logger.Error(ctx, "failed to generate execution identifier for schedule %+v due to %v", s, err) + logger.Errorf(ctx, "failed to generate execution identifier for schedule %+v due to %v", s, err) return err } @@ -107,7 +107,7 @@ func (w *executor) Execute(ctx context.Context, scheduledTime time.Time, s model return false } w.metrics.FailedExecutionCounter.Inc() - logger.Error(ctx, "failed to create execution create request %+v due to %v", executionRequest, err) + logger.Errorf(ctx, "failed to create execution create request %+v due to %v", executionRequest, err) // TODO: Handle the case when admin launch plan state is archived but the schedule is active. // After this bug is fixed in admin https://github.com/flyteorg/flyte/issues/1354 return true @@ -118,7 +118,7 @@ func (w *executor) Execute(ctx context.Context, scheduledTime time.Time, s model }, ) if err != nil && status.Code(err) != codes.AlreadyExists { - logger.Error(ctx, "failed to create execution create request %+v due to %v after all retries", executionRequest, err) + logger.Errorf(ctx, "failed to create execution create request %+v due to %v after all retries", executionRequest, err) return err } w.metrics.SuccessfulExecutionCounter.Inc() diff --git a/flyteadmin/tests/bootstrap.go b/flyteadmin/tests/bootstrap.go index 0bf1714f27..9fd3b27026 100644 --- a/flyteadmin/tests/bootstrap.go +++ b/flyteadmin/tests/bootstrap.go @@ -76,7 +76,7 @@ func truncateAllTablesForTestingOnly() { ctx := context.Background() db, err := repositories.GetDB(ctx, getDbConfig(), getLoggerConfig()) if err != nil { - logger.Fatal(ctx, "Failed to open DB connection due to %v", err) + logger.Fatalf(ctx, "Failed to open DB connection due to %v", err) } sqlDB, err := db.DB() if err != nil { @@ -110,7 +110,7 @@ func populateWorkflowExecutionForTestingOnly(project, domain, name string) { db, err := repositories.GetDB(context.Background(), getDbConfig(), getLoggerConfig()) ctx := context.Background() if err != nil { - logger.Fatal(ctx, "Failed to open DB connection due to %v", err) + logger.Fatalf(ctx, "Failed to open DB connection due to %v", err) } sqlDB, err := db.DB() if err != nil { diff --git a/flyteadmin/tests/execution_test.go b/flyteadmin/tests/execution_test.go index 2f97cfcd05..d0d373a1d5 100644 --- a/flyteadmin/tests/execution_test.go +++ b/flyteadmin/tests/execution_test.go @@ -160,7 +160,7 @@ func populateWorkflowExecutionsForTestingOnly() { db, err := repositories.GetDB(context.Background(), getDbConfig(), getLoggerConfig()) ctx := context.Background() if err != nil { - logger.Fatal(ctx, "Failed to open DB connection due to %v", err) + logger.Fatalf(ctx, "Failed to open DB connection due to %v", err) } sqlDB, err := db.DB() if err != nil { diff --git a/flyteadmin/tests/node_execution_test.go b/flyteadmin/tests/node_execution_test.go index 6b34749e3c..60787e430b 100644 --- a/flyteadmin/tests/node_execution_test.go +++ b/flyteadmin/tests/node_execution_test.go @@ -5,12 +5,17 @@ package tests import ( "context" + "fmt" + "io" + "net/http" "testing" "time" "github.com/golang/protobuf/proto" "github.com/golang/protobuf/ptypes" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/types/known/timestamppb" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" @@ -478,3 +483,120 @@ func TestCreateChildNodeExecutionForTaskExecution(t *testing.T) { assert.True(t, proto.Equal(taskExecutionIdentifier, taskExecutionResp.Id)) assert.True(t, taskExecutionResp.IsParent) } + +func TestGetDynamicNodeWorkflow(t *testing.T) { + ctx := context.Background() + truncateAllTablesForTestingOnly() + populateWorkflowExecutionForTestingOnly(project, domain, name) + client, conn := GetTestAdminServiceClient() + defer conn.Close() + + occurredAt := time.Now() + occurredAtProto := timestamppb.New(occurredAt) + + createTaskAndNodeExecution(ctx, t, client, conn, occurredAtProto) + + _, err := client.CreateTaskEvent(ctx, &admin.TaskExecutionEventRequest{ + RequestId: "request id", + Event: &event.TaskExecutionEvent{ + TaskId: taskIdentifier, + ParentNodeExecutionId: nodeExecutionId, + Phase: core.TaskExecution_RUNNING, + RetryAttempt: 1, + OccurredAt: occurredAtProto, + }, + }) + require.NoError(t, err) + + dynamicWfId := core.Identifier{ + ResourceType: core.ResourceType_WORKFLOW, + Project: "admintests", + Domain: "development", + Name: "name", + Version: "version", + } + dynamicWf := &core.CompiledWorkflowClosure{ + Primary: &core.CompiledWorkflow{ + Template: &core.WorkflowTemplate{ + Id: &dynamicWfId, + Interface: &core.TypedInterface{}, + Nodes: []*core.Node{ + { + Id: "I'm a node", + Target: &core.Node_TaskNode{ + TaskNode: &core.TaskNode{ + Reference: &core.TaskNode_ReferenceId{ + ReferenceId: taskIdentifier, + }, + }, + }, + }, + }, + }, + }, + } + childOccurredAt := occurredAt.Add(time.Minute) + childOccurredAtProto := timestamppb.New(childOccurredAt) + childNodeExecutionID := &core.NodeExecutionIdentifier{ + NodeId: "child_node", + ExecutionId: &core.WorkflowExecutionIdentifier{ + Project: project, + Domain: domain, + Name: name, + }, + } + _, err = client.CreateNodeEvent(ctx, &admin.NodeExecutionEventRequest{ + RequestId: "request id", + Event: &event.NodeExecutionEvent{ + Id: childNodeExecutionID, + Phase: core.NodeExecution_RUNNING, + InputValue: &event.NodeExecutionEvent_InputUri{ + InputUri: inputURI, + }, + OccurredAt: childOccurredAtProto, + ParentTaskMetadata: &event.ParentTaskExecutionMetadata{ + Id: taskExecutionIdentifier, + }, + IsDynamic: true, + IsParent: true, + TargetMetadata: &event.NodeExecutionEvent_TaskNodeMetadata{ + TaskNodeMetadata: &event.TaskNodeMetadata{ + DynamicWorkflow: &event.DynamicWorkflowNodeMetadata{ + Id: &dynamicWfId, + CompiledWorkflow: dynamicWf, + DynamicJobSpecUri: "s3://bla-bla", + }, + }, + }, + }, + }) + require.NoError(t, err) + + t.Run("TestGetDynamicNodeWorkflowGrpc", func(t *testing.T) { + resp, err := client.GetDynamicNodeWorkflow(ctx, &admin.GetDynamicNodeWorkflowRequest{ + Id: childNodeExecutionID, + }) + + assert.NoError(t, err) + assert.True(t, proto.Equal(dynamicWf, resp.GetCompiledWorkflow())) + }) + + t.Run("TestGetDynamicNodeWorkflowHttp", func(t *testing.T) { + url := fmt.Sprintf("%s/api/v1/node_executions/project/domain/execution%%20name/child_node/dynamic_workflow", GetTestHostEndpoint()) + getRequest, err := http.NewRequest(http.MethodGet, url, nil) + require.NoError(t, err) + addHTTPRequestHeaders(getRequest) + + httpClient := &http.Client{Timeout: 2 * time.Second} + resp, err := httpClient.Do(getRequest) + require.NoError(t, err) + defer resp.Body.Close() + + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Equal(t, http.StatusOK, resp.StatusCode, "unexpected resp: %s", string(body)) + wfResp := &admin.DynamicNodeWorkflowResponse{} + require.NoError(t, proto.Unmarshal(body, wfResp)) + assert.True(t, proto.Equal(dynamicWf, wfResp.GetCompiledWorkflow())) + }) +} diff --git a/flyteadmin/tests/shared.go b/flyteadmin/tests/shared.go index 5c81522972..4654fa4c09 100644 --- a/flyteadmin/tests/shared.go +++ b/flyteadmin/tests/shared.go @@ -10,6 +10,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/flyteorg/flyte/flyteadmin/pkg/manager/impl/testutils" "github.com/flyteorg/flyte/flyteidl/clients/go/coreutils" @@ -48,7 +49,7 @@ func insertTasksForTests(t *testing.T, client service.AdminServiceClient) { } _, err := client.CreateTask(ctx, &req) - assert.Nil(t, err) + require.NoError(t, err) } } } @@ -105,7 +106,7 @@ func insertWorkflowsForTests(t *testing.T, client service.AdminServiceClient) { } _, err := client.CreateWorkflow(ctx, &req) - assert.Nil(t, err, "Failed to create workflow test data with err %v", err) + require.NoError(t, err, "Failed to create workflow test data with err %v", err) } } } diff --git a/flyteadmin/tests/task_execution_test.go b/flyteadmin/tests/task_execution_test.go index f0fad9d7f4..e380104684 100644 --- a/flyteadmin/tests/task_execution_test.go +++ b/flyteadmin/tests/task_execution_test.go @@ -14,6 +14,7 @@ import ( ptypesStruct "github.com/golang/protobuf/ptypes/struct" "github.com/golang/protobuf/ptypes/timestamp" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "google.golang.org/grpc" "github.com/flyteorg/flyte/flyteadmin/pkg/manager/impl/testutils" @@ -51,7 +52,7 @@ func createTaskAndNodeExecution( Id: taskIdentifier, Spec: testutils.GetValidTaskRequest().Spec, }) - assert.Nil(t, err) + require.NoError(t, err) _, err = client.CreateNodeEvent(ctx, &admin.NodeExecutionEventRequest{ RequestId: "request id", @@ -64,7 +65,7 @@ func createTaskAndNodeExecution( OccurredAt: occurredAtProto, }, }) - assert.Nil(t, err) + require.NoError(t, err) } func TestCreateTaskExecution(t *testing.T) { diff --git a/flytearchives/LICENSE b/flytearchives/LICENSE deleted file mode 100644 index bed437514f..0000000000 --- a/flytearchives/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2019 Lyft, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/flytearchives/README.md b/flytearchives/README.md deleted file mode 100644 index 9c80ac52f3..0000000000 --- a/flytearchives/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# datacatalog -Service that catalogs data to allow for data discovery, lineage and tagging diff --git a/flytearchives/datacatalog.tar.gz b/flytearchives/datacatalog.tar.gz deleted file mode 100644 index 1cc1e56b46..0000000000 Binary files a/flytearchives/datacatalog.tar.gz and /dev/null differ diff --git a/flytecopilot/Dockerfile b/flytecopilot/Dockerfile deleted file mode 100644 index 741b668bfd..0000000000 --- a/flytecopilot/Dockerfile +++ /dev/null @@ -1,39 +0,0 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'LYFT/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst - -FROM --platform=${BUILDPLATFORM} golang:1.19-alpine3.16 as builder - -ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux - -RUN apk add git openssh-client make curl - -# COPY only the go mod files for efficient caching -COPY go.mod go.sum /go/src/github.com/lyft/flyteplugins/ -WORKDIR /go/src/github.com/lyft/flyteplugins - -# Pull dependencies -RUN go mod download - -# COPY the rest of the source code -COPY . /go/src/github.com/lyft/flyteplugins/ - -# This 'linux_compile' target should compile binaries to the /artifacts directory -# The main entrypoint should be compiled to /artifacts/flyteplugins -RUN make linux_compile - -# update the PATH to include the /artifacts directory -ENV PATH="/artifacts:${PATH}" - -# This will eventually move to centurylink/ca-certs:latest for minimum possible image size -FROM alpine:3.16 -LABEL org.opencontainers.image.source https://github.com/lyft/flyteplugins - -COPY --from=builder /artifacts /bin - -RUN apk --update add ca-certificates - -CMD ["flyte-copilot"] diff --git a/flytecopilot/cmd/root.go b/flytecopilot/cmd/root.go index 3e81f3655b..dd7683ef49 100644 --- a/flytecopilot/cmd/root.go +++ b/flytecopilot/cmd/root.go @@ -184,7 +184,7 @@ func init() { pflag.CommandLine.AddGoFlagSet(flag.CommandLine) err := flag.CommandLine.Parse([]string{}) if err != nil { - logger.Error(context.TODO(), "Error in initializing: %v", err) + logger.Errorf(context.TODO(), "Error in initializing: %v", err) os.Exit(-1) } labeled.SetMetricKeys(contextutils.ProjectKey, contextutils.DomainKey, contextutils.WorkflowIDKey, contextutils.TaskIDKey) diff --git a/flytecopilot/pull_request_template.md b/flytecopilot/pull_request_template.md deleted file mode 100644 index 9cdab99b46..0000000000 --- a/flytecopilot/pull_request_template.md +++ /dev/null @@ -1,35 +0,0 @@ -## _Read then delete this section_ - -_- Make sure to use a concise title for the pull-request._ - -_- Use #patch, #minor or #major in the pull-request title to bump the corresponding version. Otherwise, the patch version -will be bumped. [More details](https://github.com/marketplace/actions/github-tag-bump)_ - -# TL;DR -_Please replace this text with a description of what this PR accomplishes._ - -## Type - - [ ] Bug Fix - - [ ] Feature - - [ ] Plugin - -## Are all requirements met? - - - [ ] Code completed - - [ ] Smoke tested - - [ ] Unit tests added - - [ ] Code documentation added - - [ ] Any pending items have an associated Issue - -## Complete description - _How did you fix the bug, make the feature etc. Link to any design docs etc_ - -## Tracking Issue -_Remove the '*fixes*' keyword if there will be multiple PRs to fix the linked issue_ - -fixes https://github.com/flyteorg/flyte/issues/ - -## Follow-up issue -_NA_ -OR -_https://github.com/flyteorg/flyte/issues/_ diff --git a/flyteidl/clients/go/admin/client.go b/flyteidl/clients/go/admin/client.go index 85476ab8ba..9758bd9dec 100644 --- a/flyteidl/clients/go/admin/client.go +++ b/flyteidl/clients/go/admin/client.go @@ -6,7 +6,6 @@ import ( "crypto/x509" "errors" "fmt" - "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/artifact" grpcRetry "github.com/grpc-ecosystem/go-grpc-middleware/retry" grpcPrometheus "github.com/grpc-ecosystem/go-grpc-prometheus" @@ -31,7 +30,6 @@ type Clientset struct { identityServiceClient service.IdentityServiceClient dataProxyServiceClient service.DataProxyServiceClient signalServiceClient service.SignalServiceClient - artifactServiceClient artifact.ArtifactRegistryClient } // AdminClient retrieves the AdminServiceClient @@ -61,10 +59,6 @@ func (c Clientset) SignalServiceClient() service.SignalServiceClient { return c.signalServiceClient } -func (c Clientset) ArtifactServiceClient() artifact.ArtifactRegistryClient { - return c.artifactServiceClient -} - func NewAdminClient(ctx context.Context, conn *grpc.ClientConn) service.AdminServiceClient { logger.Infof(ctx, "Initialized Admin client") return service.NewAdminServiceClient(conn) @@ -205,7 +199,6 @@ func initializeClients(ctx context.Context, cfg *Config, tokenCache cache.TokenC cs.healthServiceClient = grpc_health_v1.NewHealthClient(adminConnection) cs.dataProxyServiceClient = service.NewDataProxyServiceClient(adminConnection) cs.signalServiceClient = service.NewSignalServiceClient(adminConnection) - cs.artifactServiceClient = artifact.NewArtifactRegistryClient(adminConnection) return &cs, nil } diff --git a/flyteidl/clients/go/admin/mocks/AdminServiceClient.go b/flyteidl/clients/go/admin/mocks/AdminServiceClient.go index 38100b9b71..3d4d3039b4 100644 --- a/flyteidl/clients/go/admin/mocks/AdminServiceClient.go +++ b/flyteidl/clients/go/admin/mocks/AdminServiceClient.go @@ -593,6 +593,54 @@ func (_m *AdminServiceClient) GetDescriptionEntity(ctx context.Context, in *admi return r0, r1 } +type AdminServiceClient_GetDynamicNodeWorkflow struct { + *mock.Call +} + +func (_m AdminServiceClient_GetDynamicNodeWorkflow) Return(_a0 *admin.DynamicNodeWorkflowResponse, _a1 error) *AdminServiceClient_GetDynamicNodeWorkflow { + return &AdminServiceClient_GetDynamicNodeWorkflow{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *AdminServiceClient) OnGetDynamicNodeWorkflow(ctx context.Context, in *admin.GetDynamicNodeWorkflowRequest, opts ...grpc.CallOption) *AdminServiceClient_GetDynamicNodeWorkflow { + c_call := _m.On("GetDynamicNodeWorkflow", ctx, in, opts) + return &AdminServiceClient_GetDynamicNodeWorkflow{Call: c_call} +} + +func (_m *AdminServiceClient) OnGetDynamicNodeWorkflowMatch(matchers ...interface{}) *AdminServiceClient_GetDynamicNodeWorkflow { + c_call := _m.On("GetDynamicNodeWorkflow", matchers...) + return &AdminServiceClient_GetDynamicNodeWorkflow{Call: c_call} +} + +// GetDynamicNodeWorkflow provides a mock function with given fields: ctx, in, opts +func (_m *AdminServiceClient) GetDynamicNodeWorkflow(ctx context.Context, in *admin.GetDynamicNodeWorkflowRequest, opts ...grpc.CallOption) (*admin.DynamicNodeWorkflowResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *admin.DynamicNodeWorkflowResponse + if rf, ok := ret.Get(0).(func(context.Context, *admin.GetDynamicNodeWorkflowRequest, ...grpc.CallOption) *admin.DynamicNodeWorkflowResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*admin.DynamicNodeWorkflowResponse) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *admin.GetDynamicNodeWorkflowRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + type AdminServiceClient_GetExecution struct { *mock.Call } diff --git a/flyteidl/clients/go/admin/mocks/AdminServiceServer.go b/flyteidl/clients/go/admin/mocks/AdminServiceServer.go index 00d069d104..37d540f5ae 100644 --- a/flyteidl/clients/go/admin/mocks/AdminServiceServer.go +++ b/flyteidl/clients/go/admin/mocks/AdminServiceServer.go @@ -507,6 +507,47 @@ func (_m *AdminServiceServer) GetDescriptionEntity(_a0 context.Context, _a1 *adm return r0, r1 } +type AdminServiceServer_GetDynamicNodeWorkflow struct { + *mock.Call +} + +func (_m AdminServiceServer_GetDynamicNodeWorkflow) Return(_a0 *admin.DynamicNodeWorkflowResponse, _a1 error) *AdminServiceServer_GetDynamicNodeWorkflow { + return &AdminServiceServer_GetDynamicNodeWorkflow{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *AdminServiceServer) OnGetDynamicNodeWorkflow(_a0 context.Context, _a1 *admin.GetDynamicNodeWorkflowRequest) *AdminServiceServer_GetDynamicNodeWorkflow { + c_call := _m.On("GetDynamicNodeWorkflow", _a0, _a1) + return &AdminServiceServer_GetDynamicNodeWorkflow{Call: c_call} +} + +func (_m *AdminServiceServer) OnGetDynamicNodeWorkflowMatch(matchers ...interface{}) *AdminServiceServer_GetDynamicNodeWorkflow { + c_call := _m.On("GetDynamicNodeWorkflow", matchers...) + return &AdminServiceServer_GetDynamicNodeWorkflow{Call: c_call} +} + +// GetDynamicNodeWorkflow provides a mock function with given fields: _a0, _a1 +func (_m *AdminServiceServer) GetDynamicNodeWorkflow(_a0 context.Context, _a1 *admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) { + ret := _m.Called(_a0, _a1) + + var r0 *admin.DynamicNodeWorkflowResponse + if rf, ok := ret.Get(0).(func(context.Context, *admin.GetDynamicNodeWorkflowRequest) *admin.DynamicNodeWorkflowResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*admin.DynamicNodeWorkflowResponse) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *admin.GetDynamicNodeWorkflowRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + type AdminServiceServer_GetExecution struct { *mock.Call } diff --git a/flyteidl/clients/go/admin/mocks/AsyncAgentServiceClient.go b/flyteidl/clients/go/admin/mocks/AsyncAgentServiceClient.go index 4a2b2c25f3..f11ef1adfe 100644 --- a/flyteidl/clients/go/admin/mocks/AsyncAgentServiceClient.go +++ b/flyteidl/clients/go/admin/mocks/AsyncAgentServiceClient.go @@ -160,3 +160,99 @@ func (_m *AsyncAgentServiceClient) GetTask(ctx context.Context, in *admin.GetTas return r0, r1 } + +type AsyncAgentServiceClient_GetTaskLogs struct { + *mock.Call +} + +func (_m AsyncAgentServiceClient_GetTaskLogs) Return(_a0 *admin.GetTaskLogsResponse, _a1 error) *AsyncAgentServiceClient_GetTaskLogs { + return &AsyncAgentServiceClient_GetTaskLogs{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *AsyncAgentServiceClient) OnGetTaskLogs(ctx context.Context, in *admin.GetTaskLogsRequest, opts ...grpc.CallOption) *AsyncAgentServiceClient_GetTaskLogs { + c_call := _m.On("GetTaskLogs", ctx, in, opts) + return &AsyncAgentServiceClient_GetTaskLogs{Call: c_call} +} + +func (_m *AsyncAgentServiceClient) OnGetTaskLogsMatch(matchers ...interface{}) *AsyncAgentServiceClient_GetTaskLogs { + c_call := _m.On("GetTaskLogs", matchers...) + return &AsyncAgentServiceClient_GetTaskLogs{Call: c_call} +} + +// GetTaskLogs provides a mock function with given fields: ctx, in, opts +func (_m *AsyncAgentServiceClient) GetTaskLogs(ctx context.Context, in *admin.GetTaskLogsRequest, opts ...grpc.CallOption) (*admin.GetTaskLogsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *admin.GetTaskLogsResponse + if rf, ok := ret.Get(0).(func(context.Context, *admin.GetTaskLogsRequest, ...grpc.CallOption) *admin.GetTaskLogsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*admin.GetTaskLogsResponse) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *admin.GetTaskLogsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +type AsyncAgentServiceClient_GetTaskMetrics struct { + *mock.Call +} + +func (_m AsyncAgentServiceClient_GetTaskMetrics) Return(_a0 *admin.GetTaskMetricsResponse, _a1 error) *AsyncAgentServiceClient_GetTaskMetrics { + return &AsyncAgentServiceClient_GetTaskMetrics{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *AsyncAgentServiceClient) OnGetTaskMetrics(ctx context.Context, in *admin.GetTaskMetricsRequest, opts ...grpc.CallOption) *AsyncAgentServiceClient_GetTaskMetrics { + c_call := _m.On("GetTaskMetrics", ctx, in, opts) + return &AsyncAgentServiceClient_GetTaskMetrics{Call: c_call} +} + +func (_m *AsyncAgentServiceClient) OnGetTaskMetricsMatch(matchers ...interface{}) *AsyncAgentServiceClient_GetTaskMetrics { + c_call := _m.On("GetTaskMetrics", matchers...) + return &AsyncAgentServiceClient_GetTaskMetrics{Call: c_call} +} + +// GetTaskMetrics provides a mock function with given fields: ctx, in, opts +func (_m *AsyncAgentServiceClient) GetTaskMetrics(ctx context.Context, in *admin.GetTaskMetricsRequest, opts ...grpc.CallOption) (*admin.GetTaskMetricsResponse, error) { + _va := make([]interface{}, len(opts)) + for _i := range opts { + _va[_i] = opts[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, in) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 *admin.GetTaskMetricsResponse + if rf, ok := ret.Get(0).(func(context.Context, *admin.GetTaskMetricsRequest, ...grpc.CallOption) *admin.GetTaskMetricsResponse); ok { + r0 = rf(ctx, in, opts...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*admin.GetTaskMetricsResponse) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *admin.GetTaskMetricsRequest, ...grpc.CallOption) error); ok { + r1 = rf(ctx, in, opts...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/flyteidl/clients/go/admin/mocks/AsyncAgentServiceServer.go b/flyteidl/clients/go/admin/mocks/AsyncAgentServiceServer.go index 722d6731ff..1803e286eb 100644 --- a/flyteidl/clients/go/admin/mocks/AsyncAgentServiceServer.go +++ b/flyteidl/clients/go/admin/mocks/AsyncAgentServiceServer.go @@ -137,3 +137,85 @@ func (_m *AsyncAgentServiceServer) GetTask(_a0 context.Context, _a1 *admin.GetTa return r0, r1 } + +type AsyncAgentServiceServer_GetTaskLogs struct { + *mock.Call +} + +func (_m AsyncAgentServiceServer_GetTaskLogs) Return(_a0 *admin.GetTaskLogsResponse, _a1 error) *AsyncAgentServiceServer_GetTaskLogs { + return &AsyncAgentServiceServer_GetTaskLogs{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *AsyncAgentServiceServer) OnGetTaskLogs(_a0 context.Context, _a1 *admin.GetTaskLogsRequest) *AsyncAgentServiceServer_GetTaskLogs { + c_call := _m.On("GetTaskLogs", _a0, _a1) + return &AsyncAgentServiceServer_GetTaskLogs{Call: c_call} +} + +func (_m *AsyncAgentServiceServer) OnGetTaskLogsMatch(matchers ...interface{}) *AsyncAgentServiceServer_GetTaskLogs { + c_call := _m.On("GetTaskLogs", matchers...) + return &AsyncAgentServiceServer_GetTaskLogs{Call: c_call} +} + +// GetTaskLogs provides a mock function with given fields: _a0, _a1 +func (_m *AsyncAgentServiceServer) GetTaskLogs(_a0 context.Context, _a1 *admin.GetTaskLogsRequest) (*admin.GetTaskLogsResponse, error) { + ret := _m.Called(_a0, _a1) + + var r0 *admin.GetTaskLogsResponse + if rf, ok := ret.Get(0).(func(context.Context, *admin.GetTaskLogsRequest) *admin.GetTaskLogsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*admin.GetTaskLogsResponse) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *admin.GetTaskLogsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +type AsyncAgentServiceServer_GetTaskMetrics struct { + *mock.Call +} + +func (_m AsyncAgentServiceServer_GetTaskMetrics) Return(_a0 *admin.GetTaskMetricsResponse, _a1 error) *AsyncAgentServiceServer_GetTaskMetrics { + return &AsyncAgentServiceServer_GetTaskMetrics{Call: _m.Call.Return(_a0, _a1)} +} + +func (_m *AsyncAgentServiceServer) OnGetTaskMetrics(_a0 context.Context, _a1 *admin.GetTaskMetricsRequest) *AsyncAgentServiceServer_GetTaskMetrics { + c_call := _m.On("GetTaskMetrics", _a0, _a1) + return &AsyncAgentServiceServer_GetTaskMetrics{Call: c_call} +} + +func (_m *AsyncAgentServiceServer) OnGetTaskMetricsMatch(matchers ...interface{}) *AsyncAgentServiceServer_GetTaskMetrics { + c_call := _m.On("GetTaskMetrics", matchers...) + return &AsyncAgentServiceServer_GetTaskMetrics{Call: c_call} +} + +// GetTaskMetrics provides a mock function with given fields: _a0, _a1 +func (_m *AsyncAgentServiceServer) GetTaskMetrics(_a0 context.Context, _a1 *admin.GetTaskMetricsRequest) (*admin.GetTaskMetricsResponse, error) { + ret := _m.Called(_a0, _a1) + + var r0 *admin.GetTaskMetricsResponse + if rf, ok := ret.Get(0).(func(context.Context, *admin.GetTaskMetricsRequest) *admin.GetTaskMetricsResponse); ok { + r0 = rf(_a0, _a1) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*admin.GetTaskMetricsResponse) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *admin.GetTaskMetricsRequest) error); ok { + r1 = rf(_a0, _a1) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/flyteidl/clients/go/admin/pkce/auth_flow_orchestrator.go b/flyteidl/clients/go/admin/pkce/auth_flow_orchestrator.go index 5d194851f2..f79217443c 100644 --- a/flyteidl/clients/go/admin/pkce/auth_flow_orchestrator.go +++ b/flyteidl/clients/go/admin/pkce/auth_flow_orchestrator.go @@ -77,7 +77,7 @@ func (f TokenOrchestrator) FetchTokenFromAuthFlow(ctx context.Context) (*oauth2. go func() { if err = server.ListenAndServe(); err != nil && err != http.ErrServerClosed { - logger.Fatal(ctx, "Couldn't start the callback http server on host %v due to %v", redirectURL.Host, + logger.Fatalf(ctx, "Couldn't start the callback http server on host %v due to %v", redirectURL.Host, err) } }() diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.cc index 61aebda83d..acb4b787d0 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.cc @@ -25,7 +25,10 @@ extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fadmin_2fagent_2eproto ::google::proto extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fexecution_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_TaskLog_flyteidl_2fcore_2fexecution_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_TaskExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fliterals_2eproto ::google::protobuf::internal::SCCInfo<10> scc_info_Literal_flyteidl_2fcore_2fliterals_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fmetrics_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_ExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2ftasks_2eproto ::google::protobuf::internal::SCCInfo<10> scc_info_TaskTemplate_flyteidl_2fcore_2ftasks_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2fduration_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Duration_google_2fprotobuf_2fduration_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2ftimestamp_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Timestamp_google_2fprotobuf_2ftimestamp_2eproto; namespace flyteidl { namespace admin { class TaskExecutionMetadata_LabelsEntry_DoNotUseDefaultTypeInternal { @@ -94,6 +97,22 @@ class ListAgentsResponseDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; } _ListAgentsResponse_default_instance_; +class GetTaskMetricsRequestDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _GetTaskMetricsRequest_default_instance_; +class GetTaskMetricsResponseDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _GetTaskMetricsResponse_default_instance_; +class GetTaskLogsRequestDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _GetTaskLogsRequest_default_instance_; +class GetTaskLogsResponseDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _GetTaskLogsResponse_default_instance_; } // namespace admin } // namespace flyteidl static void InitDefaultsTaskExecutionMetadata_LabelsEntry_DoNotUse_flyteidl_2fadmin_2fagent_2eproto() { @@ -331,6 +350,65 @@ ::google::protobuf::internal::SCCInfo<1> scc_info_ListAgentsResponse_flyteidl_2f {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsListAgentsResponse_flyteidl_2fadmin_2fagent_2eproto}, { &scc_info_Agent_flyteidl_2fadmin_2fagent_2eproto.base,}}; +static void InitDefaultsGetTaskMetricsRequest_flyteidl_2fadmin_2fagent_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::admin::_GetTaskMetricsRequest_default_instance_; + new (ptr) ::flyteidl::admin::GetTaskMetricsRequest(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::admin::GetTaskMetricsRequest::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<2> scc_info_GetTaskMetricsRequest_flyteidl_2fadmin_2fagent_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsGetTaskMetricsRequest_flyteidl_2fadmin_2fagent_2eproto}, { + &scc_info_Timestamp_google_2fprotobuf_2ftimestamp_2eproto.base, + &scc_info_Duration_google_2fprotobuf_2fduration_2eproto.base,}}; + +static void InitDefaultsGetTaskMetricsResponse_flyteidl_2fadmin_2fagent_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::admin::_GetTaskMetricsResponse_default_instance_; + new (ptr) ::flyteidl::admin::GetTaskMetricsResponse(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::admin::GetTaskMetricsResponse::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<1> scc_info_GetTaskMetricsResponse_flyteidl_2fadmin_2fagent_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsGetTaskMetricsResponse_flyteidl_2fadmin_2fagent_2eproto}, { + &scc_info_ExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto.base,}}; + +static void InitDefaultsGetTaskLogsRequest_flyteidl_2fadmin_2fagent_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::admin::_GetTaskLogsRequest_default_instance_; + new (ptr) ::flyteidl::admin::GetTaskLogsRequest(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::admin::GetTaskLogsRequest::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<0> scc_info_GetTaskLogsRequest_flyteidl_2fadmin_2fagent_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsGetTaskLogsRequest_flyteidl_2fadmin_2fagent_2eproto}, {}}; + +static void InitDefaultsGetTaskLogsResponse_flyteidl_2fadmin_2fagent_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::admin::_GetTaskLogsResponse_default_instance_; + new (ptr) ::flyteidl::admin::GetTaskLogsResponse(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::admin::GetTaskLogsResponse::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<0> scc_info_GetTaskLogsResponse_flyteidl_2fadmin_2fagent_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsGetTaskLogsResponse_flyteidl_2fadmin_2fagent_2eproto}, {}}; + void InitDefaults_flyteidl_2fadmin_2fagent_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_TaskExecutionMetadata_LabelsEntry_DoNotUse_flyteidl_2fadmin_2fagent_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_TaskExecutionMetadata_AnnotationsEntry_DoNotUse_flyteidl_2fadmin_2fagent_2eproto.base); @@ -348,9 +426,13 @@ void InitDefaults_flyteidl_2fadmin_2fagent_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_GetAgentResponse_flyteidl_2fadmin_2fagent_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_ListAgentsRequest_flyteidl_2fadmin_2fagent_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_ListAgentsResponse_flyteidl_2fadmin_2fagent_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_GetTaskMetricsRequest_flyteidl_2fadmin_2fagent_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_GetTaskMetricsResponse_flyteidl_2fadmin_2fagent_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_GetTaskLogsRequest_flyteidl_2fadmin_2fagent_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_GetTaskLogsResponse_flyteidl_2fadmin_2fagent_2eproto.base); } -::google::protobuf::Metadata file_level_metadata_flyteidl_2fadmin_2fagent_2eproto[16]; +::google::protobuf::Metadata file_level_metadata_flyteidl_2fadmin_2fagent_2eproto[20]; const ::google::protobuf::EnumDescriptor* file_level_enum_descriptors_flyteidl_2fadmin_2fagent_2eproto[1]; constexpr ::google::protobuf::ServiceDescriptor const** file_level_service_descriptors_flyteidl_2fadmin_2fagent_2eproto = nullptr; @@ -433,6 +515,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fagent_2eproto::o PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Resource, outputs_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Resource, message_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Resource, log_links_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Resource, phase_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::DeleteTaskRequest, _internal_metadata_), ~0u, // no _extensions_ @@ -475,6 +558,39 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fagent_2eproto::o ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ListAgentsResponse, agents_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsRequest, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsRequest, task_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsRequest, resource_meta_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsRequest, queries_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsRequest, start_time_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsRequest, end_time_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsRequest, step_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsResponse, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskMetricsResponse, results_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsRequest, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsRequest, task_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsRequest, resource_meta_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsRequest, lines_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsRequest, token_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsResponse, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsResponse, results_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetTaskLogsResponse, token_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, 7, sizeof(::flyteidl::admin::TaskExecutionMetadata_LabelsEntry_DoNotUse)}, @@ -486,13 +602,17 @@ static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SE { 55, -1, sizeof(::flyteidl::admin::GetTaskRequest)}, { 62, -1, sizeof(::flyteidl::admin::GetTaskResponse)}, { 69, -1, sizeof(::flyteidl::admin::Resource)}, - { 78, -1, sizeof(::flyteidl::admin::DeleteTaskRequest)}, - { 85, -1, sizeof(::flyteidl::admin::DeleteTaskResponse)}, - { 90, -1, sizeof(::flyteidl::admin::Agent)}, - { 97, -1, sizeof(::flyteidl::admin::GetAgentRequest)}, - { 103, -1, sizeof(::flyteidl::admin::GetAgentResponse)}, - { 109, -1, sizeof(::flyteidl::admin::ListAgentsRequest)}, - { 114, -1, sizeof(::flyteidl::admin::ListAgentsResponse)}, + { 79, -1, sizeof(::flyteidl::admin::DeleteTaskRequest)}, + { 86, -1, sizeof(::flyteidl::admin::DeleteTaskResponse)}, + { 91, -1, sizeof(::flyteidl::admin::Agent)}, + { 98, -1, sizeof(::flyteidl::admin::GetAgentRequest)}, + { 104, -1, sizeof(::flyteidl::admin::GetAgentResponse)}, + { 110, -1, sizeof(::flyteidl::admin::ListAgentsRequest)}, + { 115, -1, sizeof(::flyteidl::admin::ListAgentsResponse)}, + { 121, -1, sizeof(::flyteidl::admin::GetTaskMetricsRequest)}, + { 132, -1, sizeof(::flyteidl::admin::GetTaskMetricsResponse)}, + { 138, -1, sizeof(::flyteidl::admin::GetTaskLogsRequest)}, + { 147, -1, sizeof(::flyteidl::admin::GetTaskLogsResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -512,79 +632,99 @@ static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast(&::flyteidl::admin::_GetAgentResponse_default_instance_), reinterpret_cast(&::flyteidl::admin::_ListAgentsRequest_default_instance_), reinterpret_cast(&::flyteidl::admin::_ListAgentsResponse_default_instance_), + reinterpret_cast(&::flyteidl::admin::_GetTaskMetricsRequest_default_instance_), + reinterpret_cast(&::flyteidl::admin::_GetTaskMetricsResponse_default_instance_), + reinterpret_cast(&::flyteidl::admin::_GetTaskLogsRequest_default_instance_), + reinterpret_cast(&::flyteidl::admin::_GetTaskLogsResponse_default_instance_), }; ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_flyteidl_2fadmin_2fagent_2eproto = { {}, AddDescriptors_flyteidl_2fadmin_2fagent_2eproto, "flyteidl/admin/agent.proto", schemas, file_default_instances, TableStruct_flyteidl_2fadmin_2fagent_2eproto::offsets, - file_level_metadata_flyteidl_2fadmin_2fagent_2eproto, 16, file_level_enum_descriptors_flyteidl_2fadmin_2fagent_2eproto, file_level_service_descriptors_flyteidl_2fadmin_2fagent_2eproto, + file_level_metadata_flyteidl_2fadmin_2fagent_2eproto, 20, file_level_enum_descriptors_flyteidl_2fadmin_2fagent_2eproto, file_level_service_descriptors_flyteidl_2fadmin_2fagent_2eproto, }; const char descriptor_table_protodef_flyteidl_2fadmin_2fagent_2eproto[] = "\n\032flyteidl/admin/agent.proto\022\016flyteidl.a" "dmin\032\034flyteidl/core/literals.proto\032\031flyt" - "eidl/core/tasks.proto\032\035flyteidl/core/int" - "erface.proto\032\036flyteidl/core/identifier.p" - "roto\032\035flyteidl/core/execution.proto\"\232\004\n\025" - "TaskExecutionMetadata\022A\n\021task_execution_" - "id\030\001 \001(\0132&.flyteidl.core.TaskExecutionId" - "entifier\022\021\n\tnamespace\030\002 \001(\t\022A\n\006labels\030\003 " - "\003(\01321.flyteidl.admin.TaskExecutionMetada" - "ta.LabelsEntry\022K\n\013annotations\030\004 \003(\01326.fl" - "yteidl.admin.TaskExecutionMetadata.Annot" - "ationsEntry\022\033\n\023k8s_service_account\030\005 \001(\t" - "\022^\n\025environment_variables\030\006 \003(\0132\?.flytei" - "dl.admin.TaskExecutionMetadata.Environme" - "ntVariablesEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001" - " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0322\n\020AnnotationsEn" - "try\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\032;\n\031E" - "nvironmentVariablesEntry\022\013\n\003key\030\001 \001(\t\022\r\n" - "\005value\030\002 \001(\t:\0028\001\"\314\001\n\021CreateTaskRequest\022)" - "\n\006inputs\030\001 \001(\0132\031.flyteidl.core.LiteralMa" - "p\022-\n\010template\030\002 \001(\0132\033.flyteidl.core.Task" - "Template\022\025\n\routput_prefix\030\003 \001(\t\022F\n\027task_" - "execution_metadata\030\004 \001(\0132%.flyteidl.admi" - "n.TaskExecutionMetadata\"b\n\022CreateTaskRes" - "ponse\022\027\n\rresource_meta\030\001 \001(\014H\000\022,\n\010resour" - "ce\030\002 \001(\0132\030.flyteidl.admin.ResourceH\000B\005\n\003" - "res\":\n\016GetTaskRequest\022\021\n\ttask_type\030\001 \001(\t" - "\022\025\n\rresource_meta\030\002 \001(\014\"h\n\017GetTaskRespon" - "se\022*\n\010resource\030\001 \001(\0132\030.flyteidl.admin.Re" - "source\022)\n\tlog_links\030\002 \003(\0132\026.flyteidl.cor" - "e.TaskLog\"\230\001\n\010Resource\022$\n\005state\030\001 \001(\0162\025." - "flyteidl.admin.State\022*\n\007outputs\030\002 \001(\0132\031." - "flyteidl.core.LiteralMap\022\017\n\007message\030\003 \001(" - "\t\022)\n\tlog_links\030\004 \003(\0132\026.flyteidl.core.Tas" - "kLog\"=\n\021DeleteTaskRequest\022\021\n\ttask_type\030\001" - " \001(\t\022\025\n\rresource_meta\030\002 \001(\014\"\024\n\022DeleteTas" - "kResponse\"3\n\005Agent\022\014\n\004name\030\001 \001(\t\022\034\n\024supp" - "orted_task_types\030\002 \003(\t\"\037\n\017GetAgentReques" - "t\022\014\n\004name\030\001 \001(\t\"8\n\020GetAgentResponse\022$\n\005a" - "gent\030\001 \001(\0132\025.flyteidl.admin.Agent\"\023\n\021Lis" - "tAgentsRequest\";\n\022ListAgentsResponse\022%\n\006" - "agents\030\001 \003(\0132\025.flyteidl.admin.Agent*^\n\005S" - "tate\022\025\n\021RETRYABLE_FAILURE\020\000\022\025\n\021PERMANENT" - "_FAILURE\020\001\022\013\n\007PENDING\020\002\022\013\n\007RUNNING\020\003\022\r\n\t" - "SUCCEEDED\020\004B=Z;github.com/flyteorg/flyte" - "/flyteidl/gen/pb-go/flyteidl/adminb\006prot" - "o3" + "eidl/core/tasks.proto\032\036flyteidl/core/ide" + "ntifier.proto\032\035flyteidl/core/execution.p" + "roto\032\033flyteidl/core/metrics.proto\032\036googl" + "e/protobuf/duration.proto\032\037google/protob" + "uf/timestamp.proto\"\232\004\n\025TaskExecutionMeta" + "data\022A\n\021task_execution_id\030\001 \001(\0132&.flytei" + "dl.core.TaskExecutionIdentifier\022\021\n\tnames" + "pace\030\002 \001(\t\022A\n\006labels\030\003 \003(\01321.flyteidl.ad" + "min.TaskExecutionMetadata.LabelsEntry\022K\n" + "\013annotations\030\004 \003(\01326.flyteidl.admin.Task" + "ExecutionMetadata.AnnotationsEntry\022\033\n\023k8" + "s_service_account\030\005 \001(\t\022^\n\025environment_v" + "ariables\030\006 \003(\0132\?.flyteidl.admin.TaskExec" + "utionMetadata.EnvironmentVariablesEntry\032" + "-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001" + "(\t:\0028\001\0322\n\020AnnotationsEntry\022\013\n\003key\030\001 \001(\t\022" + "\r\n\005value\030\002 \001(\t:\0028\001\032;\n\031EnvironmentVariabl" + "esEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"" + "\314\001\n\021CreateTaskRequest\022)\n\006inputs\030\001 \001(\0132\031." + "flyteidl.core.LiteralMap\022-\n\010template\030\002 \001" + "(\0132\033.flyteidl.core.TaskTemplate\022\025\n\routpu" + "t_prefix\030\003 \001(\t\022F\n\027task_execution_metadat" + "a\030\004 \001(\0132%.flyteidl.admin.TaskExecutionMe" + "tadata\"b\n\022CreateTaskResponse\022\027\n\rresource" + "_meta\030\001 \001(\014H\000\022,\n\010resource\030\002 \001(\0132\030.flytei" + "dl.admin.ResourceH\000B\005\n\003res\":\n\016GetTaskReq" + "uest\022\021\n\ttask_type\030\001 \001(\t\022\025\n\rresource_meta" + "\030\002 \001(\014\"h\n\017GetTaskResponse\022*\n\010resource\030\001 " + "\001(\0132\030.flyteidl.admin.Resource\022)\n\tlog_lin" + "ks\030\002 \003(\0132\026.flyteidl.core.TaskLog\"\317\001\n\010Res" + "ource\022(\n\005state\030\001 \001(\0162\025.flyteidl.admin.St" + "ateB\002\030\001\022*\n\007outputs\030\002 \001(\0132\031.flyteidl.core" + ".LiteralMap\022\017\n\007message\030\003 \001(\t\022)\n\tlog_link" + "s\030\004 \003(\0132\026.flyteidl.core.TaskLog\0221\n\005phase" + "\030\005 \001(\0162\".flyteidl.core.TaskExecution.Pha" + "se\"=\n\021DeleteTaskRequest\022\021\n\ttask_type\030\001 \001" + "(\t\022\025\n\rresource_meta\030\002 \001(\014\"\024\n\022DeleteTaskR" + "esponse\"3\n\005Agent\022\014\n\004name\030\001 \001(\t\022\034\n\024suppor" + "ted_task_types\030\002 \003(\t\"\037\n\017GetAgentRequest\022" + "\014\n\004name\030\001 \001(\t\"8\n\020GetAgentResponse\022$\n\005age" + "nt\030\001 \001(\0132\025.flyteidl.admin.Agent\"\023\n\021ListA" + "gentsRequest\";\n\022ListAgentsResponse\022%\n\006ag" + "ents\030\001 \003(\0132\025.flyteidl.admin.Agent\"\331\001\n\025Ge" + "tTaskMetricsRequest\022\021\n\ttask_type\030\001 \001(\t\022\025" + "\n\rresource_meta\030\002 \001(\014\022\017\n\007queries\030\003 \003(\t\022." + "\n\nstart_time\030\004 \001(\0132\032.google.protobuf.Tim" + "estamp\022,\n\010end_time\030\005 \001(\0132\032.google.protob" + "uf.Timestamp\022\'\n\004step\030\006 \001(\0132\031.google.prot" + "obuf.Duration\"O\n\026GetTaskMetricsResponse\022" + "5\n\007results\030\001 \003(\0132$.flyteidl.core.Executi" + "onMetricResult\"\\\n\022GetTaskLogsRequest\022\021\n\t" + "task_type\030\001 \001(\t\022\025\n\rresource_meta\030\002 \001(\014\022\r" + "\n\005lines\030\003 \001(\004\022\r\n\005token\030\004 \001(\t\"5\n\023GetTaskL" + "ogsResponse\022\017\n\007results\030\001 \003(\t\022\r\n\005token\030\002 " + "\001(\t*^\n\005State\022\025\n\021RETRYABLE_FAILURE\020\000\022\025\n\021P" + "ERMANENT_FAILURE\020\001\022\013\n\007PENDING\020\002\022\013\n\007RUNNI" + "NG\020\003\022\r\n\tSUCCEEDED\020\004B=Z;github.com/flyteo" + "rg/flyte/flyteidl/gen/pb-go/flyteidl/adm" + "inb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fagent_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fagent_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fagent_2eproto, - "flyteidl/admin/agent.proto", &assign_descriptors_table_flyteidl_2fadmin_2fagent_2eproto, 1842, + "flyteidl/admin/agent.proto", &assign_descriptors_table_flyteidl_2fadmin_2fagent_2eproto, 2410, }; void AddDescriptors_flyteidl_2fadmin_2fagent_2eproto() { - static constexpr ::google::protobuf::internal::InitFunc deps[5] = + static constexpr ::google::protobuf::internal::InitFunc deps[7] = { ::AddDescriptors_flyteidl_2fcore_2fliterals_2eproto, ::AddDescriptors_flyteidl_2fcore_2ftasks_2eproto, - ::AddDescriptors_flyteidl_2fcore_2finterface_2eproto, ::AddDescriptors_flyteidl_2fcore_2fidentifier_2eproto, ::AddDescriptors_flyteidl_2fcore_2fexecution_2eproto, + ::AddDescriptors_flyteidl_2fcore_2fmetrics_2eproto, + ::AddDescriptors_google_2fprotobuf_2fduration_2eproto, + ::AddDescriptors_google_2fprotobuf_2ftimestamp_2eproto, }; - ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fadmin_2fagent_2eproto, deps, 5); + ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fadmin_2fagent_2eproto, deps, 7); } // Force running AddDescriptors() at dynamic initialization time. @@ -3304,6 +3444,7 @@ const int Resource::kStateFieldNumber; const int Resource::kOutputsFieldNumber; const int Resource::kMessageFieldNumber; const int Resource::kLogLinksFieldNumber; +const int Resource::kPhaseFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 Resource::Resource() @@ -3325,7 +3466,9 @@ Resource::Resource(const Resource& from) } else { outputs_ = nullptr; } - state_ = from.state_; + ::memcpy(&state_, &from.state_, + static_cast(reinterpret_cast(&phase_) - + reinterpret_cast(&state_)) + sizeof(phase_)); // @@protoc_insertion_point(copy_constructor:flyteidl.admin.Resource) } @@ -3334,8 +3477,8 @@ void Resource::SharedCtor() { &scc_info_Resource_flyteidl_2fadmin_2fagent_2eproto.base); message_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&outputs_, 0, static_cast( - reinterpret_cast(&state_) - - reinterpret_cast(&outputs_)) + sizeof(state_)); + reinterpret_cast(&phase_) - + reinterpret_cast(&outputs_)) + sizeof(phase_)); } Resource::~Resource() { @@ -3369,7 +3512,9 @@ void Resource::Clear() { delete outputs_; } outputs_ = nullptr; - state_ = 0; + ::memset(&state_, 0, static_cast( + reinterpret_cast(&phase_) - + reinterpret_cast(&state_)) + sizeof(phase_)); _internal_metadata_.Clear(); } @@ -3386,7 +3531,7 @@ const char* Resource::_InternalParse(const char* begin, const char* end, void* o ptr = ::google::protobuf::io::Parse32(ptr, &tag); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); switch (tag >> 3) { - // .flyteidl.admin.State state = 1; + // .flyteidl.admin.State state = 1 [deprecated = true]; case 1: { if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual; ::google::protobuf::uint64 val = ::google::protobuf::internal::ReadVarint(&ptr); @@ -3439,6 +3584,14 @@ const char* Resource::_InternalParse(const char* begin, const char* end, void* o } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 34 && (ptr += 1)); break; } + // .flyteidl.core.TaskExecution.Phase phase = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 40) goto handle_unusual; + ::google::protobuf::uint64 val = ::google::protobuf::internal::ReadVarint(&ptr); + msg->set_phase(static_cast<::flyteidl::core::TaskExecution_Phase>(val)); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -3473,7 +3626,7 @@ bool Resource::MergePartialFromCodedStream( tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.admin.State state = 1; + // .flyteidl.admin.State state = 1 [deprecated = true]; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == (8 & 0xFF)) { int value = 0; @@ -3524,6 +3677,20 @@ bool Resource::MergePartialFromCodedStream( break; } + // .flyteidl.core.TaskExecution.Phase phase = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (40 & 0xFF)) { + int value = 0; + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>( + input, &value))); + set_phase(static_cast< ::flyteidl::core::TaskExecution_Phase >(value)); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -3551,7 +3718,7 @@ void Resource::SerializeWithCachedSizes( ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - // .flyteidl.admin.State state = 1; + // .flyteidl.admin.State state = 1 [deprecated = true]; if (this->state() != 0) { ::google::protobuf::internal::WireFormatLite::WriteEnum( 1, this->state(), output); @@ -3582,6 +3749,12 @@ void Resource::SerializeWithCachedSizes( output); } + // .flyteidl.core.TaskExecution.Phase phase = 5; + if (this->phase() != 0) { + ::google::protobuf::internal::WireFormatLite::WriteEnum( + 5, this->phase(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -3595,7 +3768,7 @@ ::google::protobuf::uint8* Resource::InternalSerializeWithCachedSizesToArray( ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - // .flyteidl.admin.State state = 1; + // .flyteidl.admin.State state = 1 [deprecated = true]; if (this->state() != 0) { target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray( 1, this->state(), target); @@ -3627,6 +3800,12 @@ ::google::protobuf::uint8* Resource::InternalSerializeWithCachedSizesToArray( 4, this->log_links(static_cast(i)), target); } + // .flyteidl.core.TaskExecution.Phase phase = 5; + if (this->phase() != 0) { + target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray( + 5, this->phase(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -3673,12 +3852,18 @@ size_t Resource::ByteSizeLong() const { *outputs_); } - // .flyteidl.admin.State state = 1; + // .flyteidl.admin.State state = 1 [deprecated = true]; if (this->state() != 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::EnumSize(this->state()); } + // .flyteidl.core.TaskExecution.Phase phase = 5; + if (this->phase() != 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::EnumSize(this->phase()); + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -3717,6 +3902,9 @@ void Resource::MergeFrom(const Resource& from) { if (from.state() != 0) { set_state(from.state()); } + if (from.phase() != 0) { + set_phase(from.phase()); + } } void Resource::CopyFrom(const ::google::protobuf::Message& from) { @@ -3749,6 +3937,7 @@ void Resource::InternalSwap(Resource* other) { GetArenaNoVirtual()); swap(outputs_, other->outputs_); swap(state_, other->state_); + swap(phase_, other->phase_); } ::google::protobuf::Metadata Resource::GetMetadata() const { @@ -5761,58 +5950,1837 @@ ::google::protobuf::Metadata ListAgentsResponse::GetMetadata() const { } -// @@protoc_insertion_point(namespace_scope) -} // namespace admin -} // namespace flyteidl -namespace google { -namespace protobuf { -template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata_LabelsEntry_DoNotUse* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata_LabelsEntry_DoNotUse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata_LabelsEntry_DoNotUse >(arena); +// =================================================================== + +void GetTaskMetricsRequest::InitAsDefaultInstance() { + ::flyteidl::admin::_GetTaskMetricsRequest_default_instance_._instance.get_mutable()->start_time_ = const_cast< ::google::protobuf::Timestamp*>( + ::google::protobuf::Timestamp::internal_default_instance()); + ::flyteidl::admin::_GetTaskMetricsRequest_default_instance_._instance.get_mutable()->end_time_ = const_cast< ::google::protobuf::Timestamp*>( + ::google::protobuf::Timestamp::internal_default_instance()); + ::flyteidl::admin::_GetTaskMetricsRequest_default_instance_._instance.get_mutable()->step_ = const_cast< ::google::protobuf::Duration*>( + ::google::protobuf::Duration::internal_default_instance()); } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata_AnnotationsEntry_DoNotUse* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata_AnnotationsEntry_DoNotUse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata_AnnotationsEntry_DoNotUse >(arena); +class GetTaskMetricsRequest::HasBitSetters { + public: + static const ::google::protobuf::Timestamp& start_time(const GetTaskMetricsRequest* msg); + static const ::google::protobuf::Timestamp& end_time(const GetTaskMetricsRequest* msg); + static const ::google::protobuf::Duration& step(const GetTaskMetricsRequest* msg); +}; + +const ::google::protobuf::Timestamp& +GetTaskMetricsRequest::HasBitSetters::start_time(const GetTaskMetricsRequest* msg) { + return *msg->start_time_; } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata_EnvironmentVariablesEntry_DoNotUse* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata_EnvironmentVariablesEntry_DoNotUse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata_EnvironmentVariablesEntry_DoNotUse >(arena); +const ::google::protobuf::Timestamp& +GetTaskMetricsRequest::HasBitSetters::end_time(const GetTaskMetricsRequest* msg) { + return *msg->end_time_; } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata >(arena); +const ::google::protobuf::Duration& +GetTaskMetricsRequest::HasBitSetters::step(const GetTaskMetricsRequest* msg) { + return *msg->step_; } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::CreateTaskRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::CreateTaskRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::CreateTaskRequest >(arena); +void GetTaskMetricsRequest::clear_start_time() { + if (GetArenaNoVirtual() == nullptr && start_time_ != nullptr) { + delete start_time_; + } + start_time_ = nullptr; } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::CreateTaskResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::CreateTaskResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::CreateTaskResponse >(arena); +void GetTaskMetricsRequest::clear_end_time() { + if (GetArenaNoVirtual() == nullptr && end_time_ != nullptr) { + delete end_time_; + } + end_time_ = nullptr; } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::GetTaskRequest >(arena); +void GetTaskMetricsRequest::clear_step() { + if (GetArenaNoVirtual() == nullptr && step_ != nullptr) { + delete step_; + } + step_ = nullptr; } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::GetTaskResponse >(arena); +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int GetTaskMetricsRequest::kTaskTypeFieldNumber; +const int GetTaskMetricsRequest::kResourceMetaFieldNumber; +const int GetTaskMetricsRequest::kQueriesFieldNumber; +const int GetTaskMetricsRequest::kStartTimeFieldNumber; +const int GetTaskMetricsRequest::kEndTimeFieldNumber; +const int GetTaskMetricsRequest::kStepFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +GetTaskMetricsRequest::GetTaskMetricsRequest() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.admin.GetTaskMetricsRequest) } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::Resource* Arena::CreateMaybeMessage< ::flyteidl::admin::Resource >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::Resource >(arena); +GetTaskMetricsRequest::GetTaskMetricsRequest(const GetTaskMetricsRequest& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr), + queries_(from.queries_) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + task_type_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.task_type().size() > 0) { + task_type_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.task_type_); + } + resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.resource_meta().size() > 0) { + resource_meta_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.resource_meta_); + } + if (from.has_start_time()) { + start_time_ = new ::google::protobuf::Timestamp(*from.start_time_); + } else { + start_time_ = nullptr; + } + if (from.has_end_time()) { + end_time_ = new ::google::protobuf::Timestamp(*from.end_time_); + } else { + end_time_ = nullptr; + } + if (from.has_step()) { + step_ = new ::google::protobuf::Duration(*from.step_); + } else { + step_ = nullptr; + } + // @@protoc_insertion_point(copy_constructor:flyteidl.admin.GetTaskMetricsRequest) } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::DeleteTaskRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::DeleteTaskRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::DeleteTaskRequest >(arena); + +void GetTaskMetricsRequest::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_GetTaskMetricsRequest_flyteidl_2fadmin_2fagent_2eproto.base); + task_type_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::memset(&start_time_, 0, static_cast( + reinterpret_cast(&step_) - + reinterpret_cast(&start_time_)) + sizeof(step_)); } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::DeleteTaskResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::DeleteTaskResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::DeleteTaskResponse >(arena); + +GetTaskMetricsRequest::~GetTaskMetricsRequest() { + // @@protoc_insertion_point(destructor:flyteidl.admin.GetTaskMetricsRequest) + SharedDtor(); } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::Agent* Arena::CreateMaybeMessage< ::flyteidl::admin::Agent >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::Agent >(arena); + +void GetTaskMetricsRequest::SharedDtor() { + task_type_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + resource_meta_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (this != internal_default_instance()) delete start_time_; + if (this != internal_default_instance()) delete end_time_; + if (this != internal_default_instance()) delete step_; } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetAgentRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::GetAgentRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::GetAgentRequest >(arena); + +void GetTaskMetricsRequest::SetCachedSize(int size) const { + _cached_size_.Set(size); } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetAgentResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::GetAgentResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::GetAgentResponse >(arena); +const GetTaskMetricsRequest& GetTaskMetricsRequest::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_GetTaskMetricsRequest_flyteidl_2fadmin_2fagent_2eproto.base); + return *internal_default_instance(); } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::ListAgentsRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::ListAgentsRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::ListAgentsRequest >(arena); + + +void GetTaskMetricsRequest::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.admin.GetTaskMetricsRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + queries_.Clear(); + task_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + resource_meta_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (GetArenaNoVirtual() == nullptr && start_time_ != nullptr) { + delete start_time_; + } + start_time_ = nullptr; + if (GetArenaNoVirtual() == nullptr && end_time_ != nullptr) { + delete end_time_; + } + end_time_ = nullptr; + if (GetArenaNoVirtual() == nullptr && step_ != nullptr) { + delete step_; + } + step_ = nullptr; + _internal_metadata_.Clear(); } -template<> PROTOBUF_NOINLINE ::flyteidl::admin::ListAgentsResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::ListAgentsResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::admin::ListAgentsResponse >(arena); + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* GetTaskMetricsRequest::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // string task_type = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.GetTaskMetricsRequest.task_type"); + object = msg->mutable_task_type(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // bytes resource_meta = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + object = msg->mutable_resource_meta(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParser; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheck(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // repeated string queries = 3; + case 3: { + if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; + do { + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.GetTaskMetricsRequest.queries"); + object = msg->add_queries(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + if (ptr >= end) break; + } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 26 && (ptr += 1)); + break; + } + // .google.protobuf.Timestamp start_time = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::google::protobuf::Timestamp::_InternalParse; + object = msg->mutable_start_time(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // .google.protobuf.Timestamp end_time = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::google::protobuf::Timestamp::_InternalParse; + object = msg->mutable_end_time(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // .google.protobuf.Duration step = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::google::protobuf::Duration::_InternalParse; + object = msg->mutable_step(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +string_till_end: + static_cast<::std::string*>(object)->clear(); + static_cast<::std::string*>(object)->reserve(size); + goto len_delim_till_end; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool GetTaskMetricsRequest::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.admin.GetTaskMetricsRequest) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // string task_type = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_task_type())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->task_type().data(), static_cast(this->task_type().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.GetTaskMetricsRequest.task_type")); + } else { + goto handle_unusual; + } + break; + } + + // bytes resource_meta = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadBytes( + input, this->mutable_resource_meta())); + } else { + goto handle_unusual; + } + break; + } + + // repeated string queries = 3; + case 3: { + if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->add_queries())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->queries(this->queries_size() - 1).data(), + static_cast(this->queries(this->queries_size() - 1).length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.GetTaskMetricsRequest.queries")); + } else { + goto handle_unusual; + } + break; + } + + // .google.protobuf.Timestamp start_time = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_start_time())); + } else { + goto handle_unusual; + } + break; + } + + // .google.protobuf.Timestamp end_time = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_end_time())); + } else { + goto handle_unusual; + } + break; + } + + // .google.protobuf.Duration step = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_step())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.admin.GetTaskMetricsRequest) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.admin.GetTaskMetricsRequest) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void GetTaskMetricsRequest::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.admin.GetTaskMetricsRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string task_type = 1; + if (this->task_type().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->task_type().data(), static_cast(this->task_type().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskMetricsRequest.task_type"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 1, this->task_type(), output); + } + + // bytes resource_meta = 2; + if (this->resource_meta().size() > 0) { + ::google::protobuf::internal::WireFormatLite::WriteBytesMaybeAliased( + 2, this->resource_meta(), output); + } + + // repeated string queries = 3; + for (int i = 0, n = this->queries_size(); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->queries(i).data(), static_cast(this->queries(i).length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskMetricsRequest.queries"); + ::google::protobuf::internal::WireFormatLite::WriteString( + 3, this->queries(i), output); + } + + // .google.protobuf.Timestamp start_time = 4; + if (this->has_start_time()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 4, HasBitSetters::start_time(this), output); + } + + // .google.protobuf.Timestamp end_time = 5; + if (this->has_end_time()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 5, HasBitSetters::end_time(this), output); + } + + // .google.protobuf.Duration step = 6; + if (this->has_step()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 6, HasBitSetters::step(this), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.admin.GetTaskMetricsRequest) +} + +::google::protobuf::uint8* GetTaskMetricsRequest::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.admin.GetTaskMetricsRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string task_type = 1; + if (this->task_type().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->task_type().data(), static_cast(this->task_type().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskMetricsRequest.task_type"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->task_type(), target); + } + + // bytes resource_meta = 2; + if (this->resource_meta().size() > 0) { + target = + ::google::protobuf::internal::WireFormatLite::WriteBytesToArray( + 2, this->resource_meta(), target); + } + + // repeated string queries = 3; + for (int i = 0, n = this->queries_size(); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->queries(i).data(), static_cast(this->queries(i).length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskMetricsRequest.queries"); + target = ::google::protobuf::internal::WireFormatLite:: + WriteStringToArray(3, this->queries(i), target); + } + + // .google.protobuf.Timestamp start_time = 4; + if (this->has_start_time()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 4, HasBitSetters::start_time(this), target); + } + + // .google.protobuf.Timestamp end_time = 5; + if (this->has_end_time()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 5, HasBitSetters::end_time(this), target); + } + + // .google.protobuf.Duration step = 6; + if (this->has_step()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 6, HasBitSetters::step(this), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.admin.GetTaskMetricsRequest) + return target; +} + +size_t GetTaskMetricsRequest::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.admin.GetTaskMetricsRequest) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // repeated string queries = 3; + total_size += 1 * + ::google::protobuf::internal::FromIntSize(this->queries_size()); + for (int i = 0, n = this->queries_size(); i < n; i++) { + total_size += ::google::protobuf::internal::WireFormatLite::StringSize( + this->queries(i)); + } + + // string task_type = 1; + if (this->task_type().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->task_type()); + } + + // bytes resource_meta = 2; + if (this->resource_meta().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::BytesSize( + this->resource_meta()); + } + + // .google.protobuf.Timestamp start_time = 4; + if (this->has_start_time()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *start_time_); + } + + // .google.protobuf.Timestamp end_time = 5; + if (this->has_end_time()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *end_time_); + } + + // .google.protobuf.Duration step = 6; + if (this->has_step()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *step_); + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void GetTaskMetricsRequest::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.admin.GetTaskMetricsRequest) + GOOGLE_DCHECK_NE(&from, this); + const GetTaskMetricsRequest* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.admin.GetTaskMetricsRequest) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.admin.GetTaskMetricsRequest) + MergeFrom(*source); + } +} + +void GetTaskMetricsRequest::MergeFrom(const GetTaskMetricsRequest& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.admin.GetTaskMetricsRequest) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + queries_.MergeFrom(from.queries_); + if (from.task_type().size() > 0) { + + task_type_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.task_type_); + } + if (from.resource_meta().size() > 0) { + + resource_meta_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.resource_meta_); + } + if (from.has_start_time()) { + mutable_start_time()->::google::protobuf::Timestamp::MergeFrom(from.start_time()); + } + if (from.has_end_time()) { + mutable_end_time()->::google::protobuf::Timestamp::MergeFrom(from.end_time()); + } + if (from.has_step()) { + mutable_step()->::google::protobuf::Duration::MergeFrom(from.step()); + } +} + +void GetTaskMetricsRequest::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.admin.GetTaskMetricsRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void GetTaskMetricsRequest::CopyFrom(const GetTaskMetricsRequest& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.admin.GetTaskMetricsRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool GetTaskMetricsRequest::IsInitialized() const { + return true; +} + +void GetTaskMetricsRequest::Swap(GetTaskMetricsRequest* other) { + if (other == this) return; + InternalSwap(other); +} +void GetTaskMetricsRequest::InternalSwap(GetTaskMetricsRequest* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + queries_.InternalSwap(CastToBase(&other->queries_)); + task_type_.Swap(&other->task_type_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + resource_meta_.Swap(&other->resource_meta_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(start_time_, other->start_time_); + swap(end_time_, other->end_time_); + swap(step_, other->step_); +} + +::google::protobuf::Metadata GetTaskMetricsRequest::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fadmin_2fagent_2eproto); + return ::file_level_metadata_flyteidl_2fadmin_2fagent_2eproto[kIndexInFileMessages]; +} + + +// =================================================================== + +void GetTaskMetricsResponse::InitAsDefaultInstance() { +} +class GetTaskMetricsResponse::HasBitSetters { + public: +}; + +void GetTaskMetricsResponse::clear_results() { + results_.Clear(); +} +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int GetTaskMetricsResponse::kResultsFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +GetTaskMetricsResponse::GetTaskMetricsResponse() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.admin.GetTaskMetricsResponse) +} +GetTaskMetricsResponse::GetTaskMetricsResponse(const GetTaskMetricsResponse& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr), + results_(from.results_) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + // @@protoc_insertion_point(copy_constructor:flyteidl.admin.GetTaskMetricsResponse) +} + +void GetTaskMetricsResponse::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_GetTaskMetricsResponse_flyteidl_2fadmin_2fagent_2eproto.base); +} + +GetTaskMetricsResponse::~GetTaskMetricsResponse() { + // @@protoc_insertion_point(destructor:flyteidl.admin.GetTaskMetricsResponse) + SharedDtor(); +} + +void GetTaskMetricsResponse::SharedDtor() { +} + +void GetTaskMetricsResponse::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const GetTaskMetricsResponse& GetTaskMetricsResponse::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_GetTaskMetricsResponse_flyteidl_2fadmin_2fagent_2eproto.base); + return *internal_default_instance(); +} + + +void GetTaskMetricsResponse::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.admin.GetTaskMetricsResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + results_.Clear(); + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* GetTaskMetricsResponse::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // repeated .flyteidl.core.ExecutionMetricResult results = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + do { + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::ExecutionMetricResult::_InternalParse; + object = msg->add_results(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + if (ptr >= end) break; + } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 10 && (ptr += 1)); + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool GetTaskMetricsResponse::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.admin.GetTaskMetricsResponse) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated .flyteidl.core.ExecutionMetricResult results = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, add_results())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.admin.GetTaskMetricsResponse) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.admin.GetTaskMetricsResponse) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void GetTaskMetricsResponse::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.admin.GetTaskMetricsResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated .flyteidl.core.ExecutionMetricResult results = 1; + for (unsigned int i = 0, + n = static_cast(this->results_size()); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, + this->results(static_cast(i)), + output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.admin.GetTaskMetricsResponse) +} + +::google::protobuf::uint8* GetTaskMetricsResponse::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.admin.GetTaskMetricsResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated .flyteidl.core.ExecutionMetricResult results = 1; + for (unsigned int i = 0, + n = static_cast(this->results_size()); i < n; i++) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 1, this->results(static_cast(i)), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.admin.GetTaskMetricsResponse) + return target; +} + +size_t GetTaskMetricsResponse::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.admin.GetTaskMetricsResponse) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // repeated .flyteidl.core.ExecutionMetricResult results = 1; + { + unsigned int count = static_cast(this->results_size()); + total_size += 1UL * count; + for (unsigned int i = 0; i < count; i++) { + total_size += + ::google::protobuf::internal::WireFormatLite::MessageSize( + this->results(static_cast(i))); + } + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void GetTaskMetricsResponse::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.admin.GetTaskMetricsResponse) + GOOGLE_DCHECK_NE(&from, this); + const GetTaskMetricsResponse* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.admin.GetTaskMetricsResponse) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.admin.GetTaskMetricsResponse) + MergeFrom(*source); + } +} + +void GetTaskMetricsResponse::MergeFrom(const GetTaskMetricsResponse& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.admin.GetTaskMetricsResponse) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + results_.MergeFrom(from.results_); +} + +void GetTaskMetricsResponse::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.admin.GetTaskMetricsResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void GetTaskMetricsResponse::CopyFrom(const GetTaskMetricsResponse& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.admin.GetTaskMetricsResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool GetTaskMetricsResponse::IsInitialized() const { + return true; +} + +void GetTaskMetricsResponse::Swap(GetTaskMetricsResponse* other) { + if (other == this) return; + InternalSwap(other); +} +void GetTaskMetricsResponse::InternalSwap(GetTaskMetricsResponse* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + CastToBase(&results_)->InternalSwap(CastToBase(&other->results_)); +} + +::google::protobuf::Metadata GetTaskMetricsResponse::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fadmin_2fagent_2eproto); + return ::file_level_metadata_flyteidl_2fadmin_2fagent_2eproto[kIndexInFileMessages]; +} + + +// =================================================================== + +void GetTaskLogsRequest::InitAsDefaultInstance() { +} +class GetTaskLogsRequest::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int GetTaskLogsRequest::kTaskTypeFieldNumber; +const int GetTaskLogsRequest::kResourceMetaFieldNumber; +const int GetTaskLogsRequest::kLinesFieldNumber; +const int GetTaskLogsRequest::kTokenFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +GetTaskLogsRequest::GetTaskLogsRequest() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.admin.GetTaskLogsRequest) +} +GetTaskLogsRequest::GetTaskLogsRequest(const GetTaskLogsRequest& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + task_type_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.task_type().size() > 0) { + task_type_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.task_type_); + } + resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.resource_meta().size() > 0) { + resource_meta_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.resource_meta_); + } + token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.token().size() > 0) { + token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); + } + lines_ = from.lines_; + // @@protoc_insertion_point(copy_constructor:flyteidl.admin.GetTaskLogsRequest) +} + +void GetTaskLogsRequest::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_GetTaskLogsRequest_flyteidl_2fadmin_2fagent_2eproto.base); + task_type_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + lines_ = PROTOBUF_ULONGLONG(0); +} + +GetTaskLogsRequest::~GetTaskLogsRequest() { + // @@protoc_insertion_point(destructor:flyteidl.admin.GetTaskLogsRequest) + SharedDtor(); +} + +void GetTaskLogsRequest::SharedDtor() { + task_type_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + resource_meta_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} + +void GetTaskLogsRequest::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const GetTaskLogsRequest& GetTaskLogsRequest::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_GetTaskLogsRequest_flyteidl_2fadmin_2fagent_2eproto.base); + return *internal_default_instance(); +} + + +void GetTaskLogsRequest::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.admin.GetTaskLogsRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + task_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + resource_meta_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + lines_ = PROTOBUF_ULONGLONG(0); + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* GetTaskLogsRequest::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // string task_type = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.GetTaskLogsRequest.task_type"); + object = msg->mutable_task_type(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // bytes resource_meta = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + object = msg->mutable_resource_meta(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParser; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheck(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // uint64 lines = 3; + case 3: { + if (static_cast<::google::protobuf::uint8>(tag) != 24) goto handle_unusual; + msg->set_lines(::google::protobuf::internal::ReadVarint(&ptr)); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + break; + } + // string token = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.GetTaskLogsRequest.token"); + object = msg->mutable_token(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +string_till_end: + static_cast<::std::string*>(object)->clear(); + static_cast<::std::string*>(object)->reserve(size); + goto len_delim_till_end; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool GetTaskLogsRequest::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.admin.GetTaskLogsRequest) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // string task_type = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_task_type())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->task_type().data(), static_cast(this->task_type().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.GetTaskLogsRequest.task_type")); + } else { + goto handle_unusual; + } + break; + } + + // bytes resource_meta = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadBytes( + input, this->mutable_resource_meta())); + } else { + goto handle_unusual; + } + break; + } + + // uint64 lines = 3; + case 3: { + if (static_cast< ::google::protobuf::uint8>(tag) == (24 & 0xFF)) { + + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + ::google::protobuf::uint64, ::google::protobuf::internal::WireFormatLite::TYPE_UINT64>( + input, &lines_))); + } else { + goto handle_unusual; + } + break; + } + + // string token = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_token())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->token().data(), static_cast(this->token().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.GetTaskLogsRequest.token")); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.admin.GetTaskLogsRequest) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.admin.GetTaskLogsRequest) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void GetTaskLogsRequest::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.admin.GetTaskLogsRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string task_type = 1; + if (this->task_type().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->task_type().data(), static_cast(this->task_type().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsRequest.task_type"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 1, this->task_type(), output); + } + + // bytes resource_meta = 2; + if (this->resource_meta().size() > 0) { + ::google::protobuf::internal::WireFormatLite::WriteBytesMaybeAliased( + 2, this->resource_meta(), output); + } + + // uint64 lines = 3; + if (this->lines() != 0) { + ::google::protobuf::internal::WireFormatLite::WriteUInt64(3, this->lines(), output); + } + + // string token = 4; + if (this->token().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->token().data(), static_cast(this->token().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsRequest.token"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 4, this->token(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.admin.GetTaskLogsRequest) +} + +::google::protobuf::uint8* GetTaskLogsRequest::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.admin.GetTaskLogsRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string task_type = 1; + if (this->task_type().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->task_type().data(), static_cast(this->task_type().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsRequest.task_type"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->task_type(), target); + } + + // bytes resource_meta = 2; + if (this->resource_meta().size() > 0) { + target = + ::google::protobuf::internal::WireFormatLite::WriteBytesToArray( + 2, this->resource_meta(), target); + } + + // uint64 lines = 3; + if (this->lines() != 0) { + target = ::google::protobuf::internal::WireFormatLite::WriteUInt64ToArray(3, this->lines(), target); + } + + // string token = 4; + if (this->token().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->token().data(), static_cast(this->token().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsRequest.token"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 4, this->token(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.admin.GetTaskLogsRequest) + return target; +} + +size_t GetTaskLogsRequest::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.admin.GetTaskLogsRequest) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // string task_type = 1; + if (this->task_type().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->task_type()); + } + + // bytes resource_meta = 2; + if (this->resource_meta().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::BytesSize( + this->resource_meta()); + } + + // string token = 4; + if (this->token().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->token()); + } + + // uint64 lines = 3; + if (this->lines() != 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::UInt64Size( + this->lines()); + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void GetTaskLogsRequest::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.admin.GetTaskLogsRequest) + GOOGLE_DCHECK_NE(&from, this); + const GetTaskLogsRequest* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.admin.GetTaskLogsRequest) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.admin.GetTaskLogsRequest) + MergeFrom(*source); + } +} + +void GetTaskLogsRequest::MergeFrom(const GetTaskLogsRequest& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.admin.GetTaskLogsRequest) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.task_type().size() > 0) { + + task_type_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.task_type_); + } + if (from.resource_meta().size() > 0) { + + resource_meta_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.resource_meta_); + } + if (from.token().size() > 0) { + + token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); + } + if (from.lines() != 0) { + set_lines(from.lines()); + } +} + +void GetTaskLogsRequest::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.admin.GetTaskLogsRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void GetTaskLogsRequest::CopyFrom(const GetTaskLogsRequest& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.admin.GetTaskLogsRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool GetTaskLogsRequest::IsInitialized() const { + return true; +} + +void GetTaskLogsRequest::Swap(GetTaskLogsRequest* other) { + if (other == this) return; + InternalSwap(other); +} +void GetTaskLogsRequest::InternalSwap(GetTaskLogsRequest* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + task_type_.Swap(&other->task_type_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + resource_meta_.Swap(&other->resource_meta_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + token_.Swap(&other->token_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(lines_, other->lines_); +} + +::google::protobuf::Metadata GetTaskLogsRequest::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fadmin_2fagent_2eproto); + return ::file_level_metadata_flyteidl_2fadmin_2fagent_2eproto[kIndexInFileMessages]; +} + + +// =================================================================== + +void GetTaskLogsResponse::InitAsDefaultInstance() { +} +class GetTaskLogsResponse::HasBitSetters { + public: +}; + +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int GetTaskLogsResponse::kResultsFieldNumber; +const int GetTaskLogsResponse::kTokenFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +GetTaskLogsResponse::GetTaskLogsResponse() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.admin.GetTaskLogsResponse) +} +GetTaskLogsResponse::GetTaskLogsResponse(const GetTaskLogsResponse& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr), + results_(from.results_) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.token().size() > 0) { + token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); + } + // @@protoc_insertion_point(copy_constructor:flyteidl.admin.GetTaskLogsResponse) +} + +void GetTaskLogsResponse::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_GetTaskLogsResponse_flyteidl_2fadmin_2fagent_2eproto.base); + token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} + +GetTaskLogsResponse::~GetTaskLogsResponse() { + // @@protoc_insertion_point(destructor:flyteidl.admin.GetTaskLogsResponse) + SharedDtor(); +} + +void GetTaskLogsResponse::SharedDtor() { + token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} + +void GetTaskLogsResponse::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const GetTaskLogsResponse& GetTaskLogsResponse::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_GetTaskLogsResponse_flyteidl_2fadmin_2fagent_2eproto.base); + return *internal_default_instance(); +} + + +void GetTaskLogsResponse::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.admin.GetTaskLogsResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + results_.Clear(); + token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* GetTaskLogsResponse::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // repeated string results = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + do { + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.GetTaskLogsResponse.results"); + object = msg->add_results(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + if (ptr >= end) break; + } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 10 && (ptr += 1)); + break; + } + // string token = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.GetTaskLogsResponse.token"); + object = msg->mutable_token(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +string_till_end: + static_cast<::std::string*>(object)->clear(); + static_cast<::std::string*>(object)->reserve(size); + goto len_delim_till_end; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool GetTaskLogsResponse::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.admin.GetTaskLogsResponse) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // repeated string results = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->add_results())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->results(this->results_size() - 1).data(), + static_cast(this->results(this->results_size() - 1).length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.GetTaskLogsResponse.results")); + } else { + goto handle_unusual; + } + break; + } + + // string token = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_token())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->token().data(), static_cast(this->token().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.GetTaskLogsResponse.token")); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.admin.GetTaskLogsResponse) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.admin.GetTaskLogsResponse) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void GetTaskLogsResponse::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.admin.GetTaskLogsResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated string results = 1; + for (int i = 0, n = this->results_size(); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->results(i).data(), static_cast(this->results(i).length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsResponse.results"); + ::google::protobuf::internal::WireFormatLite::WriteString( + 1, this->results(i), output); + } + + // string token = 2; + if (this->token().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->token().data(), static_cast(this->token().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsResponse.token"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 2, this->token(), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.admin.GetTaskLogsResponse) +} + +::google::protobuf::uint8* GetTaskLogsResponse::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.admin.GetTaskLogsResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // repeated string results = 1; + for (int i = 0, n = this->results_size(); i < n; i++) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->results(i).data(), static_cast(this->results(i).length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsResponse.results"); + target = ::google::protobuf::internal::WireFormatLite:: + WriteStringToArray(1, this->results(i), target); + } + + // string token = 2; + if (this->token().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->token().data(), static_cast(this->token().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.GetTaskLogsResponse.token"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->token(), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.admin.GetTaskLogsResponse) + return target; +} + +size_t GetTaskLogsResponse::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.admin.GetTaskLogsResponse) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // repeated string results = 1; + total_size += 1 * + ::google::protobuf::internal::FromIntSize(this->results_size()); + for (int i = 0, n = this->results_size(); i < n; i++) { + total_size += ::google::protobuf::internal::WireFormatLite::StringSize( + this->results(i)); + } + + // string token = 2; + if (this->token().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->token()); + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void GetTaskLogsResponse::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.admin.GetTaskLogsResponse) + GOOGLE_DCHECK_NE(&from, this); + const GetTaskLogsResponse* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.admin.GetTaskLogsResponse) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.admin.GetTaskLogsResponse) + MergeFrom(*source); + } +} + +void GetTaskLogsResponse::MergeFrom(const GetTaskLogsResponse& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.admin.GetTaskLogsResponse) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + results_.MergeFrom(from.results_); + if (from.token().size() > 0) { + + token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); + } +} + +void GetTaskLogsResponse::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.admin.GetTaskLogsResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void GetTaskLogsResponse::CopyFrom(const GetTaskLogsResponse& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.admin.GetTaskLogsResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool GetTaskLogsResponse::IsInitialized() const { + return true; +} + +void GetTaskLogsResponse::Swap(GetTaskLogsResponse* other) { + if (other == this) return; + InternalSwap(other); +} +void GetTaskLogsResponse::InternalSwap(GetTaskLogsResponse* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + results_.InternalSwap(CastToBase(&other->results_)); + token_.Swap(&other->token_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); +} + +::google::protobuf::Metadata GetTaskLogsResponse::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fadmin_2fagent_2eproto); + return ::file_level_metadata_flyteidl_2fadmin_2fagent_2eproto[kIndexInFileMessages]; +} + + +// @@protoc_insertion_point(namespace_scope) +} // namespace admin +} // namespace flyteidl +namespace google { +namespace protobuf { +template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata_LabelsEntry_DoNotUse* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata_LabelsEntry_DoNotUse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata_LabelsEntry_DoNotUse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata_AnnotationsEntry_DoNotUse* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata_AnnotationsEntry_DoNotUse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata_AnnotationsEntry_DoNotUse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata_EnvironmentVariablesEntry_DoNotUse* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata_EnvironmentVariablesEntry_DoNotUse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata_EnvironmentVariablesEntry_DoNotUse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::TaskExecutionMetadata* Arena::CreateMaybeMessage< ::flyteidl::admin::TaskExecutionMetadata >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::TaskExecutionMetadata >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::CreateTaskRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::CreateTaskRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::CreateTaskRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::CreateTaskResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::CreateTaskResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::CreateTaskResponse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetTaskRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetTaskResponse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::Resource* Arena::CreateMaybeMessage< ::flyteidl::admin::Resource >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::Resource >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::DeleteTaskRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::DeleteTaskRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::DeleteTaskRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::DeleteTaskResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::DeleteTaskResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::DeleteTaskResponse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::Agent* Arena::CreateMaybeMessage< ::flyteidl::admin::Agent >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::Agent >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetAgentRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::GetAgentRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetAgentRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetAgentResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::GetAgentResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetAgentResponse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::ListAgentsRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::ListAgentsRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::ListAgentsRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::ListAgentsResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::ListAgentsResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::ListAgentsResponse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskMetricsRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskMetricsRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetTaskMetricsRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskMetricsResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskMetricsResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetTaskMetricsResponse >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskLogsRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskLogsRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetTaskLogsRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetTaskLogsResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::GetTaskLogsResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetTaskLogsResponse >(arena); } } // namespace protobuf } // namespace google diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.h index 86aeccc4b2..4912d899c8 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/agent.pb.h @@ -37,9 +37,11 @@ #include #include "flyteidl/core/literals.pb.h" #include "flyteidl/core/tasks.pb.h" -#include "flyteidl/core/interface.pb.h" #include "flyteidl/core/identifier.pb.h" #include "flyteidl/core/execution.pb.h" +#include "flyteidl/core/metrics.pb.h" +#include +#include // @@protoc_insertion_point(includes) #include #define PROTOBUF_INTERNAL_EXPORT_flyteidl_2fadmin_2fagent_2eproto @@ -50,7 +52,7 @@ struct TableStruct_flyteidl_2fadmin_2fagent_2eproto { PROTOBUF_SECTION_VARIABLE(protodesc_cold); static const ::google::protobuf::internal::AuxillaryParseTableField aux[] PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::google::protobuf::internal::ParseTable schema[16] + static const ::google::protobuf::internal::ParseTable schema[20] PROTOBUF_SECTION_VARIABLE(protodesc_cold); static const ::google::protobuf::internal::FieldMetadata field_metadata[]; static const ::google::protobuf::internal::SerializationTable serialization_table[]; @@ -80,6 +82,18 @@ extern GetAgentRequestDefaultTypeInternal _GetAgentRequest_default_instance_; class GetAgentResponse; class GetAgentResponseDefaultTypeInternal; extern GetAgentResponseDefaultTypeInternal _GetAgentResponse_default_instance_; +class GetTaskLogsRequest; +class GetTaskLogsRequestDefaultTypeInternal; +extern GetTaskLogsRequestDefaultTypeInternal _GetTaskLogsRequest_default_instance_; +class GetTaskLogsResponse; +class GetTaskLogsResponseDefaultTypeInternal; +extern GetTaskLogsResponseDefaultTypeInternal _GetTaskLogsResponse_default_instance_; +class GetTaskMetricsRequest; +class GetTaskMetricsRequestDefaultTypeInternal; +extern GetTaskMetricsRequestDefaultTypeInternal _GetTaskMetricsRequest_default_instance_; +class GetTaskMetricsResponse; +class GetTaskMetricsResponseDefaultTypeInternal; +extern GetTaskMetricsResponseDefaultTypeInternal _GetTaskMetricsResponse_default_instance_; class GetTaskRequest; class GetTaskRequestDefaultTypeInternal; extern GetTaskRequestDefaultTypeInternal _GetTaskRequest_default_instance_; @@ -118,6 +132,10 @@ template<> ::flyteidl::admin::DeleteTaskRequest* Arena::CreateMaybeMessage<::fly template<> ::flyteidl::admin::DeleteTaskResponse* Arena::CreateMaybeMessage<::flyteidl::admin::DeleteTaskResponse>(Arena*); template<> ::flyteidl::admin::GetAgentRequest* Arena::CreateMaybeMessage<::flyteidl::admin::GetAgentRequest>(Arena*); template<> ::flyteidl::admin::GetAgentResponse* Arena::CreateMaybeMessage<::flyteidl::admin::GetAgentResponse>(Arena*); +template<> ::flyteidl::admin::GetTaskLogsRequest* Arena::CreateMaybeMessage<::flyteidl::admin::GetTaskLogsRequest>(Arena*); +template<> ::flyteidl::admin::GetTaskLogsResponse* Arena::CreateMaybeMessage<::flyteidl::admin::GetTaskLogsResponse>(Arena*); +template<> ::flyteidl::admin::GetTaskMetricsRequest* Arena::CreateMaybeMessage<::flyteidl::admin::GetTaskMetricsRequest>(Arena*); +template<> ::flyteidl::admin::GetTaskMetricsResponse* Arena::CreateMaybeMessage<::flyteidl::admin::GetTaskMetricsResponse>(Arena*); template<> ::flyteidl::admin::GetTaskRequest* Arena::CreateMaybeMessage<::flyteidl::admin::GetTaskRequest>(Arena*); template<> ::flyteidl::admin::GetTaskResponse* Arena::CreateMaybeMessage<::flyteidl::admin::GetTaskResponse>(Arena*); template<> ::flyteidl::admin::ListAgentsRequest* Arena::CreateMaybeMessage<::flyteidl::admin::ListAgentsRequest>(Arena*); @@ -1115,11 +1133,17 @@ class Resource final : ::flyteidl::core::LiteralMap* mutable_outputs(); void set_allocated_outputs(::flyteidl::core::LiteralMap* outputs); - // .flyteidl.admin.State state = 1; - void clear_state(); - static const int kStateFieldNumber = 1; - ::flyteidl::admin::State state() const; - void set_state(::flyteidl::admin::State value); + // .flyteidl.admin.State state = 1 [deprecated = true]; + PROTOBUF_DEPRECATED void clear_state(); + PROTOBUF_DEPRECATED static const int kStateFieldNumber = 1; + PROTOBUF_DEPRECATED ::flyteidl::admin::State state() const; + PROTOBUF_DEPRECATED void set_state(::flyteidl::admin::State value); + + // .flyteidl.core.TaskExecution.Phase phase = 5; + void clear_phase(); + static const int kPhaseFieldNumber = 5; + ::flyteidl::core::TaskExecution_Phase phase() const; + void set_phase(::flyteidl::core::TaskExecution_Phase value); // @@protoc_insertion_point(class_scope:flyteidl.admin.Resource) private: @@ -1130,6 +1154,7 @@ class Resource final : ::google::protobuf::internal::ArenaStringPtr message_; ::flyteidl::core::LiteralMap* outputs_; int state_; + int phase_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fagent_2eproto; }; @@ -1974,1300 +1999,2579 @@ class ListAgentsResponse final : mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fagent_2eproto; }; -// =================================================================== - - -// =================================================================== - -#ifdef __GNUC__ - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wstrict-aliasing" -#endif // __GNUC__ -// ------------------------------------------------------------------- - // ------------------------------------------------------------------- -// ------------------------------------------------------------------- +class GetTaskMetricsRequest final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.admin.GetTaskMetricsRequest) */ { + public: + GetTaskMetricsRequest(); + virtual ~GetTaskMetricsRequest(); -// TaskExecutionMetadata + GetTaskMetricsRequest(const GetTaskMetricsRequest& from); -// .flyteidl.core.TaskExecutionIdentifier task_execution_id = 1; -inline bool TaskExecutionMetadata::has_task_execution_id() const { - return this != internal_default_instance() && task_execution_id_ != nullptr; -} -inline const ::flyteidl::core::TaskExecutionIdentifier& TaskExecutionMetadata::task_execution_id() const { - const ::flyteidl::core::TaskExecutionIdentifier* p = task_execution_id_; - // @@protoc_insertion_point(field_get:flyteidl.admin.TaskExecutionMetadata.task_execution_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_TaskExecutionIdentifier_default_instance_); -} -inline ::flyteidl::core::TaskExecutionIdentifier* TaskExecutionMetadata::release_task_execution_id() { - // @@protoc_insertion_point(field_release:flyteidl.admin.TaskExecutionMetadata.task_execution_id) - - ::flyteidl::core::TaskExecutionIdentifier* temp = task_execution_id_; - task_execution_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::TaskExecutionIdentifier* TaskExecutionMetadata::mutable_task_execution_id() { - - if (task_execution_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::TaskExecutionIdentifier>(GetArenaNoVirtual()); - task_execution_id_ = p; + inline GetTaskMetricsRequest& operator=(const GetTaskMetricsRequest& from) { + CopyFrom(from); + return *this; } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.TaskExecutionMetadata.task_execution_id) - return task_execution_id_; -} -inline void TaskExecutionMetadata::set_allocated_task_execution_id(::flyteidl::core::TaskExecutionIdentifier* task_execution_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(task_execution_id_); + #if LANG_CXX11 + GetTaskMetricsRequest(GetTaskMetricsRequest&& from) noexcept + : GetTaskMetricsRequest() { + *this = ::std::move(from); } - if (task_execution_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - task_execution_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, task_execution_id, submessage_arena); + + inline GetTaskMetricsRequest& operator=(GetTaskMetricsRequest&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); } - - } else { - + return *this; } - task_execution_id_ = task_execution_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.TaskExecutionMetadata.task_execution_id) -} + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const GetTaskMetricsRequest& default_instance(); -// string namespace = 2; -inline void TaskExecutionMetadata::clear_namespace_() { - namespace__.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& TaskExecutionMetadata::namespace_() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.TaskExecutionMetadata.namespace) - return namespace__.GetNoArena(); -} -inline void TaskExecutionMetadata::set_namespace_(const ::std::string& value) { - - namespace__.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.TaskExecutionMetadata.namespace) -} -#if LANG_CXX11 -inline void TaskExecutionMetadata::set_namespace_(::std::string&& value) { - - namespace__.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.TaskExecutionMetadata.namespace) -} -#endif -inline void TaskExecutionMetadata::set_namespace_(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - namespace__.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.TaskExecutionMetadata.namespace) -} -inline void TaskExecutionMetadata::set_namespace_(const char* value, size_t size) { - - namespace__.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.TaskExecutionMetadata.namespace) -} -inline ::std::string* TaskExecutionMetadata::mutable_namespace_() { - - // @@protoc_insertion_point(field_mutable:flyteidl.admin.TaskExecutionMetadata.namespace) - return namespace__.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* TaskExecutionMetadata::release_namespace_() { - // @@protoc_insertion_point(field_release:flyteidl.admin.TaskExecutionMetadata.namespace) - - return namespace__.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void TaskExecutionMetadata::set_allocated_namespace_(::std::string* namespace_) { - if (namespace_ != nullptr) { - - } else { - + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const GetTaskMetricsRequest* internal_default_instance() { + return reinterpret_cast( + &_GetTaskMetricsRequest_default_instance_); } - namespace__.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), namespace_); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.TaskExecutionMetadata.namespace) -} + static constexpr int kIndexInFileMessages = + 16; -// map labels = 3; -inline int TaskExecutionMetadata::labels_size() const { - return labels_.size(); -} -inline void TaskExecutionMetadata::clear_labels() { - labels_.Clear(); -} -inline const ::google::protobuf::Map< ::std::string, ::std::string >& -TaskExecutionMetadata::labels() const { - // @@protoc_insertion_point(field_map:flyteidl.admin.TaskExecutionMetadata.labels) - return labels_.GetMap(); -} -inline ::google::protobuf::Map< ::std::string, ::std::string >* -TaskExecutionMetadata::mutable_labels() { - // @@protoc_insertion_point(field_mutable_map:flyteidl.admin.TaskExecutionMetadata.labels) - return labels_.MutableMap(); -} + void Swap(GetTaskMetricsRequest* other); + friend void swap(GetTaskMetricsRequest& a, GetTaskMetricsRequest& b) { + a.Swap(&b); + } -// map annotations = 4; -inline int TaskExecutionMetadata::annotations_size() const { - return annotations_.size(); -} -inline void TaskExecutionMetadata::clear_annotations() { - annotations_.Clear(); -} -inline const ::google::protobuf::Map< ::std::string, ::std::string >& -TaskExecutionMetadata::annotations() const { - // @@protoc_insertion_point(field_map:flyteidl.admin.TaskExecutionMetadata.annotations) - return annotations_.GetMap(); -} -inline ::google::protobuf::Map< ::std::string, ::std::string >* -TaskExecutionMetadata::mutable_annotations() { - // @@protoc_insertion_point(field_mutable_map:flyteidl.admin.TaskExecutionMetadata.annotations) - return annotations_.MutableMap(); -} + // implements Message ---------------------------------------------- -// string k8s_service_account = 5; -inline void TaskExecutionMetadata::clear_k8s_service_account() { - k8s_service_account_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& TaskExecutionMetadata::k8s_service_account() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) - return k8s_service_account_.GetNoArena(); -} -inline void TaskExecutionMetadata::set_k8s_service_account(const ::std::string& value) { - - k8s_service_account_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) -} -#if LANG_CXX11 -inline void TaskExecutionMetadata::set_k8s_service_account(::std::string&& value) { - - k8s_service_account_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) -} -#endif -inline void TaskExecutionMetadata::set_k8s_service_account(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - k8s_service_account_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) -} -inline void TaskExecutionMetadata::set_k8s_service_account(const char* value, size_t size) { - - k8s_service_account_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) -} -inline ::std::string* TaskExecutionMetadata::mutable_k8s_service_account() { - - // @@protoc_insertion_point(field_mutable:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) - return k8s_service_account_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* TaskExecutionMetadata::release_k8s_service_account() { - // @@protoc_insertion_point(field_release:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) - - return k8s_service_account_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void TaskExecutionMetadata::set_allocated_k8s_service_account(::std::string* k8s_service_account) { - if (k8s_service_account != nullptr) { - - } else { - + inline GetTaskMetricsRequest* New() const final { + return CreateMaybeMessage(nullptr); } - k8s_service_account_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), k8s_service_account); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) -} - -// map environment_variables = 6; -inline int TaskExecutionMetadata::environment_variables_size() const { - return environment_variables_.size(); -} -inline void TaskExecutionMetadata::clear_environment_variables() { - environment_variables_.Clear(); -} -inline const ::google::protobuf::Map< ::std::string, ::std::string >& -TaskExecutionMetadata::environment_variables() const { - // @@protoc_insertion_point(field_map:flyteidl.admin.TaskExecutionMetadata.environment_variables) - return environment_variables_.GetMap(); -} -inline ::google::protobuf::Map< ::std::string, ::std::string >* -TaskExecutionMetadata::mutable_environment_variables() { - // @@protoc_insertion_point(field_mutable_map:flyteidl.admin.TaskExecutionMetadata.environment_variables) - return environment_variables_.MutableMap(); -} -// ------------------------------------------------------------------- + GetTaskMetricsRequest* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const GetTaskMetricsRequest& from); + void MergeFrom(const GetTaskMetricsRequest& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; -// CreateTaskRequest + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } -// .flyteidl.core.LiteralMap inputs = 1; -inline bool CreateTaskRequest::has_inputs() const { - return this != internal_default_instance() && inputs_ != nullptr; -} -inline const ::flyteidl::core::LiteralMap& CreateTaskRequest::inputs() const { - const ::flyteidl::core::LiteralMap* p = inputs_; - // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.inputs) - return p != nullptr ? *p : *reinterpret_cast( + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(GetTaskMetricsRequest* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated string queries = 3; + int queries_size() const; + void clear_queries(); + static const int kQueriesFieldNumber = 3; + const ::std::string& queries(int index) const; + ::std::string* mutable_queries(int index); + void set_queries(int index, const ::std::string& value); + #if LANG_CXX11 + void set_queries(int index, ::std::string&& value); + #endif + void set_queries(int index, const char* value); + void set_queries(int index, const char* value, size_t size); + ::std::string* add_queries(); + void add_queries(const ::std::string& value); + #if LANG_CXX11 + void add_queries(::std::string&& value); + #endif + void add_queries(const char* value); + void add_queries(const char* value, size_t size); + const ::google::protobuf::RepeatedPtrField<::std::string>& queries() const; + ::google::protobuf::RepeatedPtrField<::std::string>* mutable_queries(); + + // string task_type = 1; + void clear_task_type(); + static const int kTaskTypeFieldNumber = 1; + const ::std::string& task_type() const; + void set_task_type(const ::std::string& value); + #if LANG_CXX11 + void set_task_type(::std::string&& value); + #endif + void set_task_type(const char* value); + void set_task_type(const char* value, size_t size); + ::std::string* mutable_task_type(); + ::std::string* release_task_type(); + void set_allocated_task_type(::std::string* task_type); + + // bytes resource_meta = 2; + void clear_resource_meta(); + static const int kResourceMetaFieldNumber = 2; + const ::std::string& resource_meta() const; + void set_resource_meta(const ::std::string& value); + #if LANG_CXX11 + void set_resource_meta(::std::string&& value); + #endif + void set_resource_meta(const char* value); + void set_resource_meta(const void* value, size_t size); + ::std::string* mutable_resource_meta(); + ::std::string* release_resource_meta(); + void set_allocated_resource_meta(::std::string* resource_meta); + + // .google.protobuf.Timestamp start_time = 4; + bool has_start_time() const; + void clear_start_time(); + static const int kStartTimeFieldNumber = 4; + const ::google::protobuf::Timestamp& start_time() const; + ::google::protobuf::Timestamp* release_start_time(); + ::google::protobuf::Timestamp* mutable_start_time(); + void set_allocated_start_time(::google::protobuf::Timestamp* start_time); + + // .google.protobuf.Timestamp end_time = 5; + bool has_end_time() const; + void clear_end_time(); + static const int kEndTimeFieldNumber = 5; + const ::google::protobuf::Timestamp& end_time() const; + ::google::protobuf::Timestamp* release_end_time(); + ::google::protobuf::Timestamp* mutable_end_time(); + void set_allocated_end_time(::google::protobuf::Timestamp* end_time); + + // .google.protobuf.Duration step = 6; + bool has_step() const; + void clear_step(); + static const int kStepFieldNumber = 6; + const ::google::protobuf::Duration& step() const; + ::google::protobuf::Duration* release_step(); + ::google::protobuf::Duration* mutable_step(); + void set_allocated_step(::google::protobuf::Duration* step); + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskMetricsRequest) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::RepeatedPtrField<::std::string> queries_; + ::google::protobuf::internal::ArenaStringPtr task_type_; + ::google::protobuf::internal::ArenaStringPtr resource_meta_; + ::google::protobuf::Timestamp* start_time_; + ::google::protobuf::Timestamp* end_time_; + ::google::protobuf::Duration* step_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fadmin_2fagent_2eproto; +}; +// ------------------------------------------------------------------- + +class GetTaskMetricsResponse final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.admin.GetTaskMetricsResponse) */ { + public: + GetTaskMetricsResponse(); + virtual ~GetTaskMetricsResponse(); + + GetTaskMetricsResponse(const GetTaskMetricsResponse& from); + + inline GetTaskMetricsResponse& operator=(const GetTaskMetricsResponse& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + GetTaskMetricsResponse(GetTaskMetricsResponse&& from) noexcept + : GetTaskMetricsResponse() { + *this = ::std::move(from); + } + + inline GetTaskMetricsResponse& operator=(GetTaskMetricsResponse&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const GetTaskMetricsResponse& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const GetTaskMetricsResponse* internal_default_instance() { + return reinterpret_cast( + &_GetTaskMetricsResponse_default_instance_); + } + static constexpr int kIndexInFileMessages = + 17; + + void Swap(GetTaskMetricsResponse* other); + friend void swap(GetTaskMetricsResponse& a, GetTaskMetricsResponse& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline GetTaskMetricsResponse* New() const final { + return CreateMaybeMessage(nullptr); + } + + GetTaskMetricsResponse* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const GetTaskMetricsResponse& from); + void MergeFrom(const GetTaskMetricsResponse& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(GetTaskMetricsResponse* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated .flyteidl.core.ExecutionMetricResult results = 1; + int results_size() const; + void clear_results(); + static const int kResultsFieldNumber = 1; + ::flyteidl::core::ExecutionMetricResult* mutable_results(int index); + ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ExecutionMetricResult >* + mutable_results(); + const ::flyteidl::core::ExecutionMetricResult& results(int index) const; + ::flyteidl::core::ExecutionMetricResult* add_results(); + const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ExecutionMetricResult >& + results() const; + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskMetricsResponse) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ExecutionMetricResult > results_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fadmin_2fagent_2eproto; +}; +// ------------------------------------------------------------------- + +class GetTaskLogsRequest final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.admin.GetTaskLogsRequest) */ { + public: + GetTaskLogsRequest(); + virtual ~GetTaskLogsRequest(); + + GetTaskLogsRequest(const GetTaskLogsRequest& from); + + inline GetTaskLogsRequest& operator=(const GetTaskLogsRequest& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + GetTaskLogsRequest(GetTaskLogsRequest&& from) noexcept + : GetTaskLogsRequest() { + *this = ::std::move(from); + } + + inline GetTaskLogsRequest& operator=(GetTaskLogsRequest&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const GetTaskLogsRequest& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const GetTaskLogsRequest* internal_default_instance() { + return reinterpret_cast( + &_GetTaskLogsRequest_default_instance_); + } + static constexpr int kIndexInFileMessages = + 18; + + void Swap(GetTaskLogsRequest* other); + friend void swap(GetTaskLogsRequest& a, GetTaskLogsRequest& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline GetTaskLogsRequest* New() const final { + return CreateMaybeMessage(nullptr); + } + + GetTaskLogsRequest* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const GetTaskLogsRequest& from); + void MergeFrom(const GetTaskLogsRequest& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(GetTaskLogsRequest* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string task_type = 1; + void clear_task_type(); + static const int kTaskTypeFieldNumber = 1; + const ::std::string& task_type() const; + void set_task_type(const ::std::string& value); + #if LANG_CXX11 + void set_task_type(::std::string&& value); + #endif + void set_task_type(const char* value); + void set_task_type(const char* value, size_t size); + ::std::string* mutable_task_type(); + ::std::string* release_task_type(); + void set_allocated_task_type(::std::string* task_type); + + // bytes resource_meta = 2; + void clear_resource_meta(); + static const int kResourceMetaFieldNumber = 2; + const ::std::string& resource_meta() const; + void set_resource_meta(const ::std::string& value); + #if LANG_CXX11 + void set_resource_meta(::std::string&& value); + #endif + void set_resource_meta(const char* value); + void set_resource_meta(const void* value, size_t size); + ::std::string* mutable_resource_meta(); + ::std::string* release_resource_meta(); + void set_allocated_resource_meta(::std::string* resource_meta); + + // string token = 4; + void clear_token(); + static const int kTokenFieldNumber = 4; + const ::std::string& token() const; + void set_token(const ::std::string& value); + #if LANG_CXX11 + void set_token(::std::string&& value); + #endif + void set_token(const char* value); + void set_token(const char* value, size_t size); + ::std::string* mutable_token(); + ::std::string* release_token(); + void set_allocated_token(::std::string* token); + + // uint64 lines = 3; + void clear_lines(); + static const int kLinesFieldNumber = 3; + ::google::protobuf::uint64 lines() const; + void set_lines(::google::protobuf::uint64 value); + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskLogsRequest) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::ArenaStringPtr task_type_; + ::google::protobuf::internal::ArenaStringPtr resource_meta_; + ::google::protobuf::internal::ArenaStringPtr token_; + ::google::protobuf::uint64 lines_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fadmin_2fagent_2eproto; +}; +// ------------------------------------------------------------------- + +class GetTaskLogsResponse final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.admin.GetTaskLogsResponse) */ { + public: + GetTaskLogsResponse(); + virtual ~GetTaskLogsResponse(); + + GetTaskLogsResponse(const GetTaskLogsResponse& from); + + inline GetTaskLogsResponse& operator=(const GetTaskLogsResponse& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + GetTaskLogsResponse(GetTaskLogsResponse&& from) noexcept + : GetTaskLogsResponse() { + *this = ::std::move(from); + } + + inline GetTaskLogsResponse& operator=(GetTaskLogsResponse&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const GetTaskLogsResponse& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const GetTaskLogsResponse* internal_default_instance() { + return reinterpret_cast( + &_GetTaskLogsResponse_default_instance_); + } + static constexpr int kIndexInFileMessages = + 19; + + void Swap(GetTaskLogsResponse* other); + friend void swap(GetTaskLogsResponse& a, GetTaskLogsResponse& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline GetTaskLogsResponse* New() const final { + return CreateMaybeMessage(nullptr); + } + + GetTaskLogsResponse* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const GetTaskLogsResponse& from); + void MergeFrom(const GetTaskLogsResponse& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(GetTaskLogsResponse* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // repeated string results = 1; + int results_size() const; + void clear_results(); + static const int kResultsFieldNumber = 1; + const ::std::string& results(int index) const; + ::std::string* mutable_results(int index); + void set_results(int index, const ::std::string& value); + #if LANG_CXX11 + void set_results(int index, ::std::string&& value); + #endif + void set_results(int index, const char* value); + void set_results(int index, const char* value, size_t size); + ::std::string* add_results(); + void add_results(const ::std::string& value); + #if LANG_CXX11 + void add_results(::std::string&& value); + #endif + void add_results(const char* value); + void add_results(const char* value, size_t size); + const ::google::protobuf::RepeatedPtrField<::std::string>& results() const; + ::google::protobuf::RepeatedPtrField<::std::string>* mutable_results(); + + // string token = 2; + void clear_token(); + static const int kTokenFieldNumber = 2; + const ::std::string& token() const; + void set_token(const ::std::string& value); + #if LANG_CXX11 + void set_token(::std::string&& value); + #endif + void set_token(const char* value); + void set_token(const char* value, size_t size); + ::std::string* mutable_token(); + ::std::string* release_token(); + void set_allocated_token(::std::string* token); + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskLogsResponse) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::RepeatedPtrField<::std::string> results_; + ::google::protobuf::internal::ArenaStringPtr token_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fadmin_2fagent_2eproto; +}; +// =================================================================== + + +// =================================================================== + +#ifdef __GNUC__ + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstrict-aliasing" +#endif // __GNUC__ +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + +// TaskExecutionMetadata + +// .flyteidl.core.TaskExecutionIdentifier task_execution_id = 1; +inline bool TaskExecutionMetadata::has_task_execution_id() const { + return this != internal_default_instance() && task_execution_id_ != nullptr; +} +inline const ::flyteidl::core::TaskExecutionIdentifier& TaskExecutionMetadata::task_execution_id() const { + const ::flyteidl::core::TaskExecutionIdentifier* p = task_execution_id_; + // @@protoc_insertion_point(field_get:flyteidl.admin.TaskExecutionMetadata.task_execution_id) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_TaskExecutionIdentifier_default_instance_); +} +inline ::flyteidl::core::TaskExecutionIdentifier* TaskExecutionMetadata::release_task_execution_id() { + // @@protoc_insertion_point(field_release:flyteidl.admin.TaskExecutionMetadata.task_execution_id) + + ::flyteidl::core::TaskExecutionIdentifier* temp = task_execution_id_; + task_execution_id_ = nullptr; + return temp; +} +inline ::flyteidl::core::TaskExecutionIdentifier* TaskExecutionMetadata::mutable_task_execution_id() { + + if (task_execution_id_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::TaskExecutionIdentifier>(GetArenaNoVirtual()); + task_execution_id_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.TaskExecutionMetadata.task_execution_id) + return task_execution_id_; +} +inline void TaskExecutionMetadata::set_allocated_task_execution_id(::flyteidl::core::TaskExecutionIdentifier* task_execution_id) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(task_execution_id_); + } + if (task_execution_id) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + task_execution_id = ::google::protobuf::internal::GetOwnedMessage( + message_arena, task_execution_id, submessage_arena); + } + + } else { + + } + task_execution_id_ = task_execution_id; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.TaskExecutionMetadata.task_execution_id) +} + +// string namespace = 2; +inline void TaskExecutionMetadata::clear_namespace_() { + namespace__.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& TaskExecutionMetadata::namespace_() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.TaskExecutionMetadata.namespace) + return namespace__.GetNoArena(); +} +inline void TaskExecutionMetadata::set_namespace_(const ::std::string& value) { + + namespace__.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.TaskExecutionMetadata.namespace) +} +#if LANG_CXX11 +inline void TaskExecutionMetadata::set_namespace_(::std::string&& value) { + + namespace__.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.TaskExecutionMetadata.namespace) +} +#endif +inline void TaskExecutionMetadata::set_namespace_(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + namespace__.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.TaskExecutionMetadata.namespace) +} +inline void TaskExecutionMetadata::set_namespace_(const char* value, size_t size) { + + namespace__.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.TaskExecutionMetadata.namespace) +} +inline ::std::string* TaskExecutionMetadata::mutable_namespace_() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.TaskExecutionMetadata.namespace) + return namespace__.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* TaskExecutionMetadata::release_namespace_() { + // @@protoc_insertion_point(field_release:flyteidl.admin.TaskExecutionMetadata.namespace) + + return namespace__.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void TaskExecutionMetadata::set_allocated_namespace_(::std::string* namespace_) { + if (namespace_ != nullptr) { + + } else { + + } + namespace__.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), namespace_); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.TaskExecutionMetadata.namespace) +} + +// map labels = 3; +inline int TaskExecutionMetadata::labels_size() const { + return labels_.size(); +} +inline void TaskExecutionMetadata::clear_labels() { + labels_.Clear(); +} +inline const ::google::protobuf::Map< ::std::string, ::std::string >& +TaskExecutionMetadata::labels() const { + // @@protoc_insertion_point(field_map:flyteidl.admin.TaskExecutionMetadata.labels) + return labels_.GetMap(); +} +inline ::google::protobuf::Map< ::std::string, ::std::string >* +TaskExecutionMetadata::mutable_labels() { + // @@protoc_insertion_point(field_mutable_map:flyteidl.admin.TaskExecutionMetadata.labels) + return labels_.MutableMap(); +} + +// map annotations = 4; +inline int TaskExecutionMetadata::annotations_size() const { + return annotations_.size(); +} +inline void TaskExecutionMetadata::clear_annotations() { + annotations_.Clear(); +} +inline const ::google::protobuf::Map< ::std::string, ::std::string >& +TaskExecutionMetadata::annotations() const { + // @@protoc_insertion_point(field_map:flyteidl.admin.TaskExecutionMetadata.annotations) + return annotations_.GetMap(); +} +inline ::google::protobuf::Map< ::std::string, ::std::string >* +TaskExecutionMetadata::mutable_annotations() { + // @@protoc_insertion_point(field_mutable_map:flyteidl.admin.TaskExecutionMetadata.annotations) + return annotations_.MutableMap(); +} + +// string k8s_service_account = 5; +inline void TaskExecutionMetadata::clear_k8s_service_account() { + k8s_service_account_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& TaskExecutionMetadata::k8s_service_account() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) + return k8s_service_account_.GetNoArena(); +} +inline void TaskExecutionMetadata::set_k8s_service_account(const ::std::string& value) { + + k8s_service_account_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) +} +#if LANG_CXX11 +inline void TaskExecutionMetadata::set_k8s_service_account(::std::string&& value) { + + k8s_service_account_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) +} +#endif +inline void TaskExecutionMetadata::set_k8s_service_account(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + k8s_service_account_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) +} +inline void TaskExecutionMetadata::set_k8s_service_account(const char* value, size_t size) { + + k8s_service_account_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) +} +inline ::std::string* TaskExecutionMetadata::mutable_k8s_service_account() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) + return k8s_service_account_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* TaskExecutionMetadata::release_k8s_service_account() { + // @@protoc_insertion_point(field_release:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) + + return k8s_service_account_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void TaskExecutionMetadata::set_allocated_k8s_service_account(::std::string* k8s_service_account) { + if (k8s_service_account != nullptr) { + + } else { + + } + k8s_service_account_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), k8s_service_account); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.TaskExecutionMetadata.k8s_service_account) +} + +// map environment_variables = 6; +inline int TaskExecutionMetadata::environment_variables_size() const { + return environment_variables_.size(); +} +inline void TaskExecutionMetadata::clear_environment_variables() { + environment_variables_.Clear(); +} +inline const ::google::protobuf::Map< ::std::string, ::std::string >& +TaskExecutionMetadata::environment_variables() const { + // @@protoc_insertion_point(field_map:flyteidl.admin.TaskExecutionMetadata.environment_variables) + return environment_variables_.GetMap(); +} +inline ::google::protobuf::Map< ::std::string, ::std::string >* +TaskExecutionMetadata::mutable_environment_variables() { + // @@protoc_insertion_point(field_mutable_map:flyteidl.admin.TaskExecutionMetadata.environment_variables) + return environment_variables_.MutableMap(); +} + +// ------------------------------------------------------------------- + +// CreateTaskRequest + +// .flyteidl.core.LiteralMap inputs = 1; +inline bool CreateTaskRequest::has_inputs() const { + return this != internal_default_instance() && inputs_ != nullptr; +} +inline const ::flyteidl::core::LiteralMap& CreateTaskRequest::inputs() const { + const ::flyteidl::core::LiteralMap* p = inputs_; + // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.inputs) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_LiteralMap_default_instance_); +} +inline ::flyteidl::core::LiteralMap* CreateTaskRequest::release_inputs() { + // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.inputs) + + ::flyteidl::core::LiteralMap* temp = inputs_; + inputs_ = nullptr; + return temp; +} +inline ::flyteidl::core::LiteralMap* CreateTaskRequest::mutable_inputs() { + + if (inputs_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); + inputs_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.inputs) + return inputs_; +} +inline void CreateTaskRequest::set_allocated_inputs(::flyteidl::core::LiteralMap* inputs) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(inputs_); + } + if (inputs) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + inputs = ::google::protobuf::internal::GetOwnedMessage( + message_arena, inputs, submessage_arena); + } + + } else { + + } + inputs_ = inputs; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.inputs) +} + +// .flyteidl.core.TaskTemplate template = 2; +inline bool CreateTaskRequest::has_template_() const { + return this != internal_default_instance() && template__ != nullptr; +} +inline const ::flyteidl::core::TaskTemplate& CreateTaskRequest::template_() const { + const ::flyteidl::core::TaskTemplate* p = template__; + // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.template) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_TaskTemplate_default_instance_); +} +inline ::flyteidl::core::TaskTemplate* CreateTaskRequest::release_template_() { + // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.template) + + ::flyteidl::core::TaskTemplate* temp = template__; + template__ = nullptr; + return temp; +} +inline ::flyteidl::core::TaskTemplate* CreateTaskRequest::mutable_template_() { + + if (template__ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::TaskTemplate>(GetArenaNoVirtual()); + template__ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.template) + return template__; +} +inline void CreateTaskRequest::set_allocated_template_(::flyteidl::core::TaskTemplate* template_) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(template__); + } + if (template_) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + template_ = ::google::protobuf::internal::GetOwnedMessage( + message_arena, template_, submessage_arena); + } + + } else { + + } + template__ = template_; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.template) +} + +// string output_prefix = 3; +inline void CreateTaskRequest::clear_output_prefix() { + output_prefix_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& CreateTaskRequest::output_prefix() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.output_prefix) + return output_prefix_.GetNoArena(); +} +inline void CreateTaskRequest::set_output_prefix(const ::std::string& value) { + + output_prefix_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskRequest.output_prefix) +} +#if LANG_CXX11 +inline void CreateTaskRequest::set_output_prefix(::std::string&& value) { + + output_prefix_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.CreateTaskRequest.output_prefix) +} +#endif +inline void CreateTaskRequest::set_output_prefix(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + output_prefix_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.CreateTaskRequest.output_prefix) +} +inline void CreateTaskRequest::set_output_prefix(const char* value, size_t size) { + + output_prefix_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.CreateTaskRequest.output_prefix) +} +inline ::std::string* CreateTaskRequest::mutable_output_prefix() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.output_prefix) + return output_prefix_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* CreateTaskRequest::release_output_prefix() { + // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.output_prefix) + + return output_prefix_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void CreateTaskRequest::set_allocated_output_prefix(::std::string* output_prefix) { + if (output_prefix != nullptr) { + + } else { + + } + output_prefix_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), output_prefix); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.output_prefix) +} + +// .flyteidl.admin.TaskExecutionMetadata task_execution_metadata = 4; +inline bool CreateTaskRequest::has_task_execution_metadata() const { + return this != internal_default_instance() && task_execution_metadata_ != nullptr; +} +inline void CreateTaskRequest::clear_task_execution_metadata() { + if (GetArenaNoVirtual() == nullptr && task_execution_metadata_ != nullptr) { + delete task_execution_metadata_; + } + task_execution_metadata_ = nullptr; +} +inline const ::flyteidl::admin::TaskExecutionMetadata& CreateTaskRequest::task_execution_metadata() const { + const ::flyteidl::admin::TaskExecutionMetadata* p = task_execution_metadata_; + // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.task_execution_metadata) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::admin::_TaskExecutionMetadata_default_instance_); +} +inline ::flyteidl::admin::TaskExecutionMetadata* CreateTaskRequest::release_task_execution_metadata() { + // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.task_execution_metadata) + + ::flyteidl::admin::TaskExecutionMetadata* temp = task_execution_metadata_; + task_execution_metadata_ = nullptr; + return temp; +} +inline ::flyteidl::admin::TaskExecutionMetadata* CreateTaskRequest::mutable_task_execution_metadata() { + + if (task_execution_metadata_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::admin::TaskExecutionMetadata>(GetArenaNoVirtual()); + task_execution_metadata_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.task_execution_metadata) + return task_execution_metadata_; +} +inline void CreateTaskRequest::set_allocated_task_execution_metadata(::flyteidl::admin::TaskExecutionMetadata* task_execution_metadata) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete task_execution_metadata_; + } + if (task_execution_metadata) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + task_execution_metadata = ::google::protobuf::internal::GetOwnedMessage( + message_arena, task_execution_metadata, submessage_arena); + } + + } else { + + } + task_execution_metadata_ = task_execution_metadata; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.task_execution_metadata) +} + +// ------------------------------------------------------------------- + +// CreateTaskResponse + +// bytes resource_meta = 1; +inline bool CreateTaskResponse::has_resource_meta() const { + return res_case() == kResourceMeta; +} +inline void CreateTaskResponse::set_has_resource_meta() { + _oneof_case_[0] = kResourceMeta; +} +inline void CreateTaskResponse::clear_resource_meta() { + if (has_resource_meta()) { + res_.resource_meta_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_res(); + } +} +inline const ::std::string& CreateTaskResponse::resource_meta() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskResponse.resource_meta) + if (has_resource_meta()) { + return res_.resource_meta_.GetNoArena(); + } + return *&::google::protobuf::internal::GetEmptyStringAlreadyInited(); +} +inline void CreateTaskResponse::set_resource_meta(const ::std::string& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskResponse.resource_meta) + if (!has_resource_meta()) { + clear_res(); + set_has_resource_meta(); + res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskResponse.resource_meta) +} +#if LANG_CXX11 +inline void CreateTaskResponse::set_resource_meta(::std::string&& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskResponse.resource_meta) + if (!has_resource_meta()) { + clear_res(); + set_has_resource_meta(); + res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.CreateTaskResponse.resource_meta) +} +#endif +inline void CreateTaskResponse::set_resource_meta(const char* value) { + GOOGLE_DCHECK(value != nullptr); + if (!has_resource_meta()) { + clear_res(); + set_has_resource_meta(); + res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.CreateTaskResponse.resource_meta) +} +inline void CreateTaskResponse::set_resource_meta(const void* value, size_t size) { + if (!has_resource_meta()) { + clear_res(); + set_has_resource_meta(); + res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string( + reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.CreateTaskResponse.resource_meta) +} +inline ::std::string* CreateTaskResponse::mutable_resource_meta() { + if (!has_resource_meta()) { + clear_res(); + set_has_resource_meta(); + res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskResponse.resource_meta) + return res_.resource_meta_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* CreateTaskResponse::release_resource_meta() { + // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskResponse.resource_meta) + if (has_resource_meta()) { + clear_has_res(); + return res_.resource_meta_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } else { + return nullptr; + } +} +inline void CreateTaskResponse::set_allocated_resource_meta(::std::string* resource_meta) { + if (has_res()) { + clear_res(); + } + if (resource_meta != nullptr) { + set_has_resource_meta(); + res_.resource_meta_.UnsafeSetDefault(resource_meta); + } + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskResponse.resource_meta) +} + +// .flyteidl.admin.Resource resource = 2; +inline bool CreateTaskResponse::has_resource() const { + return res_case() == kResource; +} +inline void CreateTaskResponse::set_has_resource() { + _oneof_case_[0] = kResource; +} +inline void CreateTaskResponse::clear_resource() { + if (has_resource()) { + delete res_.resource_; + clear_has_res(); + } +} +inline ::flyteidl::admin::Resource* CreateTaskResponse::release_resource() { + // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskResponse.resource) + if (has_resource()) { + clear_has_res(); + ::flyteidl::admin::Resource* temp = res_.resource_; + res_.resource_ = nullptr; + return temp; + } else { + return nullptr; + } +} +inline const ::flyteidl::admin::Resource& CreateTaskResponse::resource() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskResponse.resource) + return has_resource() + ? *res_.resource_ + : *reinterpret_cast< ::flyteidl::admin::Resource*>(&::flyteidl::admin::_Resource_default_instance_); +} +inline ::flyteidl::admin::Resource* CreateTaskResponse::mutable_resource() { + if (!has_resource()) { + clear_res(); + set_has_resource(); + res_.resource_ = CreateMaybeMessage< ::flyteidl::admin::Resource >( + GetArenaNoVirtual()); + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskResponse.resource) + return res_.resource_; +} + +inline bool CreateTaskResponse::has_res() const { + return res_case() != RES_NOT_SET; +} +inline void CreateTaskResponse::clear_has_res() { + _oneof_case_[0] = RES_NOT_SET; +} +inline CreateTaskResponse::ResCase CreateTaskResponse::res_case() const { + return CreateTaskResponse::ResCase(_oneof_case_[0]); +} +// ------------------------------------------------------------------- + +// GetTaskRequest + +// string task_type = 1; +inline void GetTaskRequest::clear_task_type() { + task_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& GetTaskRequest::task_type() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskRequest.task_type) + return task_type_.GetNoArena(); +} +inline void GetTaskRequest::set_task_type(const ::std::string& value) { + + task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskRequest.task_type) +} +#if LANG_CXX11 +inline void GetTaskRequest::set_task_type(::std::string&& value) { + + task_type_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskRequest.task_type) +} +#endif +inline void GetTaskRequest::set_task_type(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskRequest.task_type) +} +inline void GetTaskRequest::set_task_type(const char* value, size_t size) { + + task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskRequest.task_type) +} +inline ::std::string* GetTaskRequest::mutable_task_type() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskRequest.task_type) + return task_type_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* GetTaskRequest::release_task_type() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskRequest.task_type) + + return task_type_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void GetTaskRequest::set_allocated_task_type(::std::string* task_type) { + if (task_type != nullptr) { + + } else { + + } + task_type_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), task_type); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskRequest.task_type) +} + +// bytes resource_meta = 2; +inline void GetTaskRequest::clear_resource_meta() { + resource_meta_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& GetTaskRequest::resource_meta() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskRequest.resource_meta) + return resource_meta_.GetNoArena(); +} +inline void GetTaskRequest::set_resource_meta(const ::std::string& value) { + + resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskRequest.resource_meta) +} +#if LANG_CXX11 +inline void GetTaskRequest::set_resource_meta(::std::string&& value) { + + resource_meta_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskRequest.resource_meta) +} +#endif +inline void GetTaskRequest::set_resource_meta(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskRequest.resource_meta) +} +inline void GetTaskRequest::set_resource_meta(const void* value, size_t size) { + + resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskRequest.resource_meta) +} +inline ::std::string* GetTaskRequest::mutable_resource_meta() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskRequest.resource_meta) + return resource_meta_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* GetTaskRequest::release_resource_meta() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskRequest.resource_meta) + + return resource_meta_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void GetTaskRequest::set_allocated_resource_meta(::std::string* resource_meta) { + if (resource_meta != nullptr) { + + } else { + + } + resource_meta_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), resource_meta); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskRequest.resource_meta) +} + +// ------------------------------------------------------------------- + +// GetTaskResponse + +// .flyteidl.admin.Resource resource = 1; +inline bool GetTaskResponse::has_resource() const { + return this != internal_default_instance() && resource_ != nullptr; +} +inline void GetTaskResponse::clear_resource() { + if (GetArenaNoVirtual() == nullptr && resource_ != nullptr) { + delete resource_; + } + resource_ = nullptr; +} +inline const ::flyteidl::admin::Resource& GetTaskResponse::resource() const { + const ::flyteidl::admin::Resource* p = resource_; + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskResponse.resource) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::admin::_Resource_default_instance_); +} +inline ::flyteidl::admin::Resource* GetTaskResponse::release_resource() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskResponse.resource) + + ::flyteidl::admin::Resource* temp = resource_; + resource_ = nullptr; + return temp; +} +inline ::flyteidl::admin::Resource* GetTaskResponse::mutable_resource() { + + if (resource_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::admin::Resource>(GetArenaNoVirtual()); + resource_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskResponse.resource) + return resource_; +} +inline void GetTaskResponse::set_allocated_resource(::flyteidl::admin::Resource* resource) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete resource_; + } + if (resource) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + resource = ::google::protobuf::internal::GetOwnedMessage( + message_arena, resource, submessage_arena); + } + + } else { + + } + resource_ = resource; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskResponse.resource) +} + +// repeated .flyteidl.core.TaskLog log_links = 2; +inline int GetTaskResponse::log_links_size() const { + return log_links_.size(); +} +inline ::flyteidl::core::TaskLog* GetTaskResponse::mutable_log_links(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskResponse.log_links) + return log_links_.Mutable(index); +} +inline ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >* +GetTaskResponse::mutable_log_links() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.GetTaskResponse.log_links) + return &log_links_; +} +inline const ::flyteidl::core::TaskLog& GetTaskResponse::log_links(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskResponse.log_links) + return log_links_.Get(index); +} +inline ::flyteidl::core::TaskLog* GetTaskResponse::add_log_links() { + // @@protoc_insertion_point(field_add:flyteidl.admin.GetTaskResponse.log_links) + return log_links_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >& +GetTaskResponse::log_links() const { + // @@protoc_insertion_point(field_list:flyteidl.admin.GetTaskResponse.log_links) + return log_links_; +} + +// ------------------------------------------------------------------- + +// Resource + +// .flyteidl.admin.State state = 1 [deprecated = true]; +inline void Resource::clear_state() { + state_ = 0; +} +inline ::flyteidl::admin::State Resource::state() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.state) + return static_cast< ::flyteidl::admin::State >(state_); +} +inline void Resource::set_state(::flyteidl::admin::State value) { + + state_ = value; + // @@protoc_insertion_point(field_set:flyteidl.admin.Resource.state) +} + +// .flyteidl.core.LiteralMap outputs = 2; +inline bool Resource::has_outputs() const { + return this != internal_default_instance() && outputs_ != nullptr; +} +inline const ::flyteidl::core::LiteralMap& Resource::outputs() const { + const ::flyteidl::core::LiteralMap* p = outputs_; + // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.outputs) + return p != nullptr ? *p : *reinterpret_cast( &::flyteidl::core::_LiteralMap_default_instance_); } -inline ::flyteidl::core::LiteralMap* CreateTaskRequest::release_inputs() { - // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.inputs) +inline ::flyteidl::core::LiteralMap* Resource::release_outputs() { + // @@protoc_insertion_point(field_release:flyteidl.admin.Resource.outputs) + + ::flyteidl::core::LiteralMap* temp = outputs_; + outputs_ = nullptr; + return temp; +} +inline ::flyteidl::core::LiteralMap* Resource::mutable_outputs() { + + if (outputs_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); + outputs_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.Resource.outputs) + return outputs_; +} +inline void Resource::set_allocated_outputs(::flyteidl::core::LiteralMap* outputs) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(outputs_); + } + if (outputs) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + outputs = ::google::protobuf::internal::GetOwnedMessage( + message_arena, outputs, submessage_arena); + } + + } else { + + } + outputs_ = outputs; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.Resource.outputs) +} + +// string message = 3; +inline void Resource::clear_message() { + message_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& Resource::message() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.message) + return message_.GetNoArena(); +} +inline void Resource::set_message(const ::std::string& value) { + + message_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.Resource.message) +} +#if LANG_CXX11 +inline void Resource::set_message(::std::string&& value) { + + message_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.Resource.message) +} +#endif +inline void Resource::set_message(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + message_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.Resource.message) +} +inline void Resource::set_message(const char* value, size_t size) { + + message_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.Resource.message) +} +inline ::std::string* Resource::mutable_message() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.Resource.message) + return message_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* Resource::release_message() { + // @@protoc_insertion_point(field_release:flyteidl.admin.Resource.message) + + return message_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void Resource::set_allocated_message(::std::string* message) { + if (message != nullptr) { + + } else { + + } + message_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), message); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.Resource.message) +} + +// repeated .flyteidl.core.TaskLog log_links = 4; +inline int Resource::log_links_size() const { + return log_links_.size(); +} +inline ::flyteidl::core::TaskLog* Resource::mutable_log_links(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.admin.Resource.log_links) + return log_links_.Mutable(index); +} +inline ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >* +Resource::mutable_log_links() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.Resource.log_links) + return &log_links_; +} +inline const ::flyteidl::core::TaskLog& Resource::log_links(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.log_links) + return log_links_.Get(index); +} +inline ::flyteidl::core::TaskLog* Resource::add_log_links() { + // @@protoc_insertion_point(field_add:flyteidl.admin.Resource.log_links) + return log_links_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >& +Resource::log_links() const { + // @@protoc_insertion_point(field_list:flyteidl.admin.Resource.log_links) + return log_links_; +} + +// .flyteidl.core.TaskExecution.Phase phase = 5; +inline void Resource::clear_phase() { + phase_ = 0; +} +inline ::flyteidl::core::TaskExecution_Phase Resource::phase() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.phase) + return static_cast< ::flyteidl::core::TaskExecution_Phase >(phase_); +} +inline void Resource::set_phase(::flyteidl::core::TaskExecution_Phase value) { + + phase_ = value; + // @@protoc_insertion_point(field_set:flyteidl.admin.Resource.phase) +} + +// ------------------------------------------------------------------- + +// DeleteTaskRequest + +// string task_type = 1; +inline void DeleteTaskRequest::clear_task_type() { + task_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& DeleteTaskRequest::task_type() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.DeleteTaskRequest.task_type) + return task_type_.GetNoArena(); +} +inline void DeleteTaskRequest::set_task_type(const ::std::string& value) { + + task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.DeleteTaskRequest.task_type) +} +#if LANG_CXX11 +inline void DeleteTaskRequest::set_task_type(::std::string&& value) { - ::flyteidl::core::LiteralMap* temp = inputs_; - inputs_ = nullptr; - return temp; + task_type_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.DeleteTaskRequest.task_type) } -inline ::flyteidl::core::LiteralMap* CreateTaskRequest::mutable_inputs() { +#endif +inline void DeleteTaskRequest::set_task_type(const char* value) { + GOOGLE_DCHECK(value != nullptr); - if (inputs_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); - inputs_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.inputs) - return inputs_; + task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.DeleteTaskRequest.task_type) } -inline void CreateTaskRequest::set_allocated_inputs(::flyteidl::core::LiteralMap* inputs) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(inputs_); - } - if (inputs) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - inputs = ::google::protobuf::internal::GetOwnedMessage( - message_arena, inputs, submessage_arena); - } +inline void DeleteTaskRequest::set_task_type(const char* value, size_t size) { + + task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.DeleteTaskRequest.task_type) +} +inline ::std::string* DeleteTaskRequest::mutable_task_type() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.DeleteTaskRequest.task_type) + return task_type_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* DeleteTaskRequest::release_task_type() { + // @@protoc_insertion_point(field_release:flyteidl.admin.DeleteTaskRequest.task_type) + + return task_type_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void DeleteTaskRequest::set_allocated_task_type(::std::string* task_type) { + if (task_type != nullptr) { } else { } - inputs_ = inputs; - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.inputs) + task_type_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), task_type); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.DeleteTaskRequest.task_type) } -// .flyteidl.core.TaskTemplate template = 2; -inline bool CreateTaskRequest::has_template_() const { - return this != internal_default_instance() && template__ != nullptr; +// bytes resource_meta = 2; +inline void DeleteTaskRequest::clear_resource_meta() { + resource_meta_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::flyteidl::core::TaskTemplate& CreateTaskRequest::template_() const { - const ::flyteidl::core::TaskTemplate* p = template__; - // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.template) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_TaskTemplate_default_instance_); +inline const ::std::string& DeleteTaskRequest::resource_meta() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.DeleteTaskRequest.resource_meta) + return resource_meta_.GetNoArena(); } -inline ::flyteidl::core::TaskTemplate* CreateTaskRequest::release_template_() { - // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.template) +inline void DeleteTaskRequest::set_resource_meta(const ::std::string& value) { - ::flyteidl::core::TaskTemplate* temp = template__; - template__ = nullptr; - return temp; + resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.DeleteTaskRequest.resource_meta) } -inline ::flyteidl::core::TaskTemplate* CreateTaskRequest::mutable_template_() { +#if LANG_CXX11 +inline void DeleteTaskRequest::set_resource_meta(::std::string&& value) { - if (template__ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::TaskTemplate>(GetArenaNoVirtual()); - template__ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.template) - return template__; + resource_meta_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.DeleteTaskRequest.resource_meta) } -inline void CreateTaskRequest::set_allocated_template_(::flyteidl::core::TaskTemplate* template_) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(template__); +#endif +inline void DeleteTaskRequest::set_resource_meta(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.DeleteTaskRequest.resource_meta) +} +inline void DeleteTaskRequest::set_resource_meta(const void* value, size_t size) { + + resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.DeleteTaskRequest.resource_meta) +} +inline ::std::string* DeleteTaskRequest::mutable_resource_meta() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.DeleteTaskRequest.resource_meta) + return resource_meta_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* DeleteTaskRequest::release_resource_meta() { + // @@protoc_insertion_point(field_release:flyteidl.admin.DeleteTaskRequest.resource_meta) + + return resource_meta_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void DeleteTaskRequest::set_allocated_resource_meta(::std::string* resource_meta) { + if (resource_meta != nullptr) { + + } else { + } - if (template_) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - template_ = ::google::protobuf::internal::GetOwnedMessage( - message_arena, template_, submessage_arena); - } + resource_meta_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), resource_meta); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.DeleteTaskRequest.resource_meta) +} + +// ------------------------------------------------------------------- + +// DeleteTaskResponse + +// ------------------------------------------------------------------- + +// Agent + +// string name = 1; +inline void Agent::clear_name() { + name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& Agent::name() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.Agent.name) + return name_.GetNoArena(); +} +inline void Agent::set_name(const ::std::string& value) { + + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.Agent.name) +} +#if LANG_CXX11 +inline void Agent::set_name(::std::string&& value) { + + name_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.Agent.name) +} +#endif +inline void Agent::set_name(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.Agent.name) +} +inline void Agent::set_name(const char* value, size_t size) { + + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.Agent.name) +} +inline ::std::string* Agent::mutable_name() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.Agent.name) + return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* Agent::release_name() { + // @@protoc_insertion_point(field_release:flyteidl.admin.Agent.name) + + return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void Agent::set_allocated_name(::std::string* name) { + if (name != nullptr) { } else { } - template__ = template_; - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.template) + name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.Agent.name) +} + +// repeated string supported_task_types = 2; +inline int Agent::supported_task_types_size() const { + return supported_task_types_.size(); +} +inline void Agent::clear_supported_task_types() { + supported_task_types_.Clear(); +} +inline const ::std::string& Agent::supported_task_types(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.admin.Agent.supported_task_types) + return supported_task_types_.Get(index); +} +inline ::std::string* Agent::mutable_supported_task_types(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.admin.Agent.supported_task_types) + return supported_task_types_.Mutable(index); +} +inline void Agent::set_supported_task_types(int index, const ::std::string& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.Agent.supported_task_types) + supported_task_types_.Mutable(index)->assign(value); +} +#if LANG_CXX11 +inline void Agent::set_supported_task_types(int index, ::std::string&& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.Agent.supported_task_types) + supported_task_types_.Mutable(index)->assign(std::move(value)); +} +#endif +inline void Agent::set_supported_task_types(int index, const char* value) { + GOOGLE_DCHECK(value != nullptr); + supported_task_types_.Mutable(index)->assign(value); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.Agent.supported_task_types) +} +inline void Agent::set_supported_task_types(int index, const char* value, size_t size) { + supported_task_types_.Mutable(index)->assign( + reinterpret_cast(value), size); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.Agent.supported_task_types) +} +inline ::std::string* Agent::add_supported_task_types() { + // @@protoc_insertion_point(field_add_mutable:flyteidl.admin.Agent.supported_task_types) + return supported_task_types_.Add(); +} +inline void Agent::add_supported_task_types(const ::std::string& value) { + supported_task_types_.Add()->assign(value); + // @@protoc_insertion_point(field_add:flyteidl.admin.Agent.supported_task_types) +} +#if LANG_CXX11 +inline void Agent::add_supported_task_types(::std::string&& value) { + supported_task_types_.Add(std::move(value)); + // @@protoc_insertion_point(field_add:flyteidl.admin.Agent.supported_task_types) +} +#endif +inline void Agent::add_supported_task_types(const char* value) { + GOOGLE_DCHECK(value != nullptr); + supported_task_types_.Add()->assign(value); + // @@protoc_insertion_point(field_add_char:flyteidl.admin.Agent.supported_task_types) +} +inline void Agent::add_supported_task_types(const char* value, size_t size) { + supported_task_types_.Add()->assign(reinterpret_cast(value), size); + // @@protoc_insertion_point(field_add_pointer:flyteidl.admin.Agent.supported_task_types) +} +inline const ::google::protobuf::RepeatedPtrField<::std::string>& +Agent::supported_task_types() const { + // @@protoc_insertion_point(field_list:flyteidl.admin.Agent.supported_task_types) + return supported_task_types_; +} +inline ::google::protobuf::RepeatedPtrField<::std::string>* +Agent::mutable_supported_task_types() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.Agent.supported_task_types) + return &supported_task_types_; } -// string output_prefix = 3; -inline void CreateTaskRequest::clear_output_prefix() { - output_prefix_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +// ------------------------------------------------------------------- + +// GetAgentRequest + +// string name = 1; +inline void GetAgentRequest::clear_name() { + name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& CreateTaskRequest::output_prefix() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.output_prefix) - return output_prefix_.GetNoArena(); +inline const ::std::string& GetAgentRequest::name() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetAgentRequest.name) + return name_.GetNoArena(); } -inline void CreateTaskRequest::set_output_prefix(const ::std::string& value) { +inline void GetAgentRequest::set_name(const ::std::string& value) { - output_prefix_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskRequest.output_prefix) + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.GetAgentRequest.name) } #if LANG_CXX11 -inline void CreateTaskRequest::set_output_prefix(::std::string&& value) { +inline void GetAgentRequest::set_name(::std::string&& value) { - output_prefix_.SetNoArena( + name_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.CreateTaskRequest.output_prefix) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetAgentRequest.name) } #endif -inline void CreateTaskRequest::set_output_prefix(const char* value) { +inline void GetAgentRequest::set_name(const char* value) { GOOGLE_DCHECK(value != nullptr); - output_prefix_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.CreateTaskRequest.output_prefix) + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetAgentRequest.name) } -inline void CreateTaskRequest::set_output_prefix(const char* value, size_t size) { +inline void GetAgentRequest::set_name(const char* value, size_t size) { - output_prefix_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.CreateTaskRequest.output_prefix) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetAgentRequest.name) } -inline ::std::string* CreateTaskRequest::mutable_output_prefix() { +inline ::std::string* GetAgentRequest::mutable_name() { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.output_prefix) - return output_prefix_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetAgentRequest.name) + return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* CreateTaskRequest::release_output_prefix() { - // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.output_prefix) +inline ::std::string* GetAgentRequest::release_name() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetAgentRequest.name) - return output_prefix_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void CreateTaskRequest::set_allocated_output_prefix(::std::string* output_prefix) { - if (output_prefix != nullptr) { +inline void GetAgentRequest::set_allocated_name(::std::string* name) { + if (name != nullptr) { } else { } - output_prefix_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), output_prefix); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.output_prefix) + name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetAgentRequest.name) } -// .flyteidl.admin.TaskExecutionMetadata task_execution_metadata = 4; -inline bool CreateTaskRequest::has_task_execution_metadata() const { - return this != internal_default_instance() && task_execution_metadata_ != nullptr; +// ------------------------------------------------------------------- + +// GetAgentResponse + +// .flyteidl.admin.Agent agent = 1; +inline bool GetAgentResponse::has_agent() const { + return this != internal_default_instance() && agent_ != nullptr; } -inline void CreateTaskRequest::clear_task_execution_metadata() { - if (GetArenaNoVirtual() == nullptr && task_execution_metadata_ != nullptr) { - delete task_execution_metadata_; +inline void GetAgentResponse::clear_agent() { + if (GetArenaNoVirtual() == nullptr && agent_ != nullptr) { + delete agent_; } - task_execution_metadata_ = nullptr; + agent_ = nullptr; } -inline const ::flyteidl::admin::TaskExecutionMetadata& CreateTaskRequest::task_execution_metadata() const { - const ::flyteidl::admin::TaskExecutionMetadata* p = task_execution_metadata_; - // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskRequest.task_execution_metadata) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::admin::_TaskExecutionMetadata_default_instance_); +inline const ::flyteidl::admin::Agent& GetAgentResponse::agent() const { + const ::flyteidl::admin::Agent* p = agent_; + // @@protoc_insertion_point(field_get:flyteidl.admin.GetAgentResponse.agent) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::admin::_Agent_default_instance_); } -inline ::flyteidl::admin::TaskExecutionMetadata* CreateTaskRequest::release_task_execution_metadata() { - // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskRequest.task_execution_metadata) +inline ::flyteidl::admin::Agent* GetAgentResponse::release_agent() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetAgentResponse.agent) - ::flyteidl::admin::TaskExecutionMetadata* temp = task_execution_metadata_; - task_execution_metadata_ = nullptr; + ::flyteidl::admin::Agent* temp = agent_; + agent_ = nullptr; return temp; } -inline ::flyteidl::admin::TaskExecutionMetadata* CreateTaskRequest::mutable_task_execution_metadata() { +inline ::flyteidl::admin::Agent* GetAgentResponse::mutable_agent() { - if (task_execution_metadata_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::admin::TaskExecutionMetadata>(GetArenaNoVirtual()); - task_execution_metadata_ = p; + if (agent_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::admin::Agent>(GetArenaNoVirtual()); + agent_ = p; } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskRequest.task_execution_metadata) - return task_execution_metadata_; + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetAgentResponse.agent) + return agent_; } -inline void CreateTaskRequest::set_allocated_task_execution_metadata(::flyteidl::admin::TaskExecutionMetadata* task_execution_metadata) { +inline void GetAgentResponse::set_allocated_agent(::flyteidl::admin::Agent* agent) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == nullptr) { - delete task_execution_metadata_; + delete agent_; } - if (task_execution_metadata) { + if (agent) { ::google::protobuf::Arena* submessage_arena = nullptr; if (message_arena != submessage_arena) { - task_execution_metadata = ::google::protobuf::internal::GetOwnedMessage( - message_arena, task_execution_metadata, submessage_arena); + agent = ::google::protobuf::internal::GetOwnedMessage( + message_arena, agent, submessage_arena); } } else { } - task_execution_metadata_ = task_execution_metadata; - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskRequest.task_execution_metadata) + agent_ = agent; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetAgentResponse.agent) } // ------------------------------------------------------------------- -// CreateTaskResponse +// ListAgentsRequest -// bytes resource_meta = 1; -inline bool CreateTaskResponse::has_resource_meta() const { - return res_case() == kResourceMeta; -} -inline void CreateTaskResponse::set_has_resource_meta() { - _oneof_case_[0] = kResourceMeta; -} -inline void CreateTaskResponse::clear_resource_meta() { - if (has_resource_meta()) { - res_.resource_meta_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - clear_has_res(); - } -} -inline const ::std::string& CreateTaskResponse::resource_meta() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskResponse.resource_meta) - if (has_resource_meta()) { - return res_.resource_meta_.GetNoArena(); - } - return *&::google::protobuf::internal::GetEmptyStringAlreadyInited(); -} -inline void CreateTaskResponse::set_resource_meta(const ::std::string& value) { - // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskResponse.resource_meta) - if (!has_resource_meta()) { - clear_res(); - set_has_resource_meta(); - res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - } - res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskResponse.resource_meta) -} -#if LANG_CXX11 -inline void CreateTaskResponse::set_resource_meta(::std::string&& value) { - // @@protoc_insertion_point(field_set:flyteidl.admin.CreateTaskResponse.resource_meta) - if (!has_resource_meta()) { - clear_res(); - set_has_resource_meta(); - res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - } - res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.CreateTaskResponse.resource_meta) -} -#endif -inline void CreateTaskResponse::set_resource_meta(const char* value) { - GOOGLE_DCHECK(value != nullptr); - if (!has_resource_meta()) { - clear_res(); - set_has_resource_meta(); - res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - } - res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.CreateTaskResponse.resource_meta) -} -inline void CreateTaskResponse::set_resource_meta(const void* value, size_t size) { - if (!has_resource_meta()) { - clear_res(); - set_has_resource_meta(); - res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - } - res_.resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string( - reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.CreateTaskResponse.resource_meta) -} -inline ::std::string* CreateTaskResponse::mutable_resource_meta() { - if (!has_resource_meta()) { - clear_res(); - set_has_resource_meta(); - res_.resource_meta_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskResponse.resource_meta) - return res_.resource_meta_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* CreateTaskResponse::release_resource_meta() { - // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskResponse.resource_meta) - if (has_resource_meta()) { - clear_has_res(); - return res_.resource_meta_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - } else { - return nullptr; - } -} -inline void CreateTaskResponse::set_allocated_resource_meta(::std::string* resource_meta) { - if (has_res()) { - clear_res(); - } - if (resource_meta != nullptr) { - set_has_resource_meta(); - res_.resource_meta_.UnsafeSetDefault(resource_meta); - } - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.CreateTaskResponse.resource_meta) -} +// ------------------------------------------------------------------- -// .flyteidl.admin.Resource resource = 2; -inline bool CreateTaskResponse::has_resource() const { - return res_case() == kResource; -} -inline void CreateTaskResponse::set_has_resource() { - _oneof_case_[0] = kResource; -} -inline void CreateTaskResponse::clear_resource() { - if (has_resource()) { - delete res_.resource_; - clear_has_res(); - } -} -inline ::flyteidl::admin::Resource* CreateTaskResponse::release_resource() { - // @@protoc_insertion_point(field_release:flyteidl.admin.CreateTaskResponse.resource) - if (has_resource()) { - clear_has_res(); - ::flyteidl::admin::Resource* temp = res_.resource_; - res_.resource_ = nullptr; - return temp; - } else { - return nullptr; - } +// ListAgentsResponse + +// repeated .flyteidl.admin.Agent agents = 1; +inline int ListAgentsResponse::agents_size() const { + return agents_.size(); } -inline const ::flyteidl::admin::Resource& CreateTaskResponse::resource() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.CreateTaskResponse.resource) - return has_resource() - ? *res_.resource_ - : *reinterpret_cast< ::flyteidl::admin::Resource*>(&::flyteidl::admin::_Resource_default_instance_); +inline void ListAgentsResponse::clear_agents() { + agents_.Clear(); } -inline ::flyteidl::admin::Resource* CreateTaskResponse::mutable_resource() { - if (!has_resource()) { - clear_res(); - set_has_resource(); - res_.resource_ = CreateMaybeMessage< ::flyteidl::admin::Resource >( - GetArenaNoVirtual()); - } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.CreateTaskResponse.resource) - return res_.resource_; +inline ::flyteidl::admin::Agent* ListAgentsResponse::mutable_agents(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ListAgentsResponse.agents) + return agents_.Mutable(index); } - -inline bool CreateTaskResponse::has_res() const { - return res_case() != RES_NOT_SET; +inline ::google::protobuf::RepeatedPtrField< ::flyteidl::admin::Agent >* +ListAgentsResponse::mutable_agents() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.ListAgentsResponse.agents) + return &agents_; } -inline void CreateTaskResponse::clear_has_res() { - _oneof_case_[0] = RES_NOT_SET; +inline const ::flyteidl::admin::Agent& ListAgentsResponse::agents(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ListAgentsResponse.agents) + return agents_.Get(index); } -inline CreateTaskResponse::ResCase CreateTaskResponse::res_case() const { - return CreateTaskResponse::ResCase(_oneof_case_[0]); +inline ::flyteidl::admin::Agent* ListAgentsResponse::add_agents() { + // @@protoc_insertion_point(field_add:flyteidl.admin.ListAgentsResponse.agents) + return agents_.Add(); +} +inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::admin::Agent >& +ListAgentsResponse::agents() const { + // @@protoc_insertion_point(field_list:flyteidl.admin.ListAgentsResponse.agents) + return agents_; } + // ------------------------------------------------------------------- -// GetTaskRequest +// GetTaskMetricsRequest // string task_type = 1; -inline void GetTaskRequest::clear_task_type() { +inline void GetTaskMetricsRequest::clear_task_type() { task_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& GetTaskRequest::task_type() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskRequest.task_type) +inline const ::std::string& GetTaskMetricsRequest::task_type() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskMetricsRequest.task_type) return task_type_.GetNoArena(); } -inline void GetTaskRequest::set_task_type(const ::std::string& value) { +inline void GetTaskMetricsRequest::set_task_type(const ::std::string& value) { task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskRequest.task_type) + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskMetricsRequest.task_type) } #if LANG_CXX11 -inline void GetTaskRequest::set_task_type(::std::string&& value) { +inline void GetTaskMetricsRequest::set_task_type(::std::string&& value) { task_type_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskRequest.task_type) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskMetricsRequest.task_type) } #endif -inline void GetTaskRequest::set_task_type(const char* value) { +inline void GetTaskMetricsRequest::set_task_type(const char* value) { GOOGLE_DCHECK(value != nullptr); task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskRequest.task_type) + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskMetricsRequest.task_type) } -inline void GetTaskRequest::set_task_type(const char* value, size_t size) { +inline void GetTaskMetricsRequest::set_task_type(const char* value, size_t size) { task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskRequest.task_type) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskMetricsRequest.task_type) } -inline ::std::string* GetTaskRequest::mutable_task_type() { +inline ::std::string* GetTaskMetricsRequest::mutable_task_type() { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskRequest.task_type) + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskMetricsRequest.task_type) return task_type_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* GetTaskRequest::release_task_type() { - // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskRequest.task_type) +inline ::std::string* GetTaskMetricsRequest::release_task_type() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskMetricsRequest.task_type) return task_type_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void GetTaskRequest::set_allocated_task_type(::std::string* task_type) { +inline void GetTaskMetricsRequest::set_allocated_task_type(::std::string* task_type) { if (task_type != nullptr) { } else { } task_type_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), task_type); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskRequest.task_type) + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskMetricsRequest.task_type) } // bytes resource_meta = 2; -inline void GetTaskRequest::clear_resource_meta() { +inline void GetTaskMetricsRequest::clear_resource_meta() { resource_meta_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& GetTaskRequest::resource_meta() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskRequest.resource_meta) +inline const ::std::string& GetTaskMetricsRequest::resource_meta() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskMetricsRequest.resource_meta) return resource_meta_.GetNoArena(); } -inline void GetTaskRequest::set_resource_meta(const ::std::string& value) { +inline void GetTaskMetricsRequest::set_resource_meta(const ::std::string& value) { resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskMetricsRequest.resource_meta) } #if LANG_CXX11 -inline void GetTaskRequest::set_resource_meta(::std::string&& value) { +inline void GetTaskMetricsRequest::set_resource_meta(::std::string&& value) { resource_meta_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskMetricsRequest.resource_meta) } #endif -inline void GetTaskRequest::set_resource_meta(const char* value) { +inline void GetTaskMetricsRequest::set_resource_meta(const char* value) { GOOGLE_DCHECK(value != nullptr); resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskMetricsRequest.resource_meta) } -inline void GetTaskRequest::set_resource_meta(const void* value, size_t size) { +inline void GetTaskMetricsRequest::set_resource_meta(const void* value, size_t size) { resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskMetricsRequest.resource_meta) } -inline ::std::string* GetTaskRequest::mutable_resource_meta() { +inline ::std::string* GetTaskMetricsRequest::mutable_resource_meta() { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskRequest.resource_meta) + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskMetricsRequest.resource_meta) return resource_meta_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* GetTaskRequest::release_resource_meta() { - // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskRequest.resource_meta) +inline ::std::string* GetTaskMetricsRequest::release_resource_meta() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskMetricsRequest.resource_meta) return resource_meta_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void GetTaskRequest::set_allocated_resource_meta(::std::string* resource_meta) { +inline void GetTaskMetricsRequest::set_allocated_resource_meta(::std::string* resource_meta) { if (resource_meta != nullptr) { } else { } resource_meta_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), resource_meta); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskMetricsRequest.resource_meta) } -// ------------------------------------------------------------------- - -// GetTaskResponse - -// .flyteidl.admin.Resource resource = 1; -inline bool GetTaskResponse::has_resource() const { - return this != internal_default_instance() && resource_ != nullptr; +// repeated string queries = 3; +inline int GetTaskMetricsRequest::queries_size() const { + return queries_.size(); } -inline void GetTaskResponse::clear_resource() { - if (GetArenaNoVirtual() == nullptr && resource_ != nullptr) { - delete resource_; - } - resource_ = nullptr; +inline void GetTaskMetricsRequest::clear_queries() { + queries_.Clear(); } -inline const ::flyteidl::admin::Resource& GetTaskResponse::resource() const { - const ::flyteidl::admin::Resource* p = resource_; - // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskResponse.resource) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::admin::_Resource_default_instance_); +inline const ::std::string& GetTaskMetricsRequest::queries(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskMetricsRequest.queries) + return queries_.Get(index); } -inline ::flyteidl::admin::Resource* GetTaskResponse::release_resource() { - // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskResponse.resource) - - ::flyteidl::admin::Resource* temp = resource_; - resource_ = nullptr; - return temp; +inline ::std::string* GetTaskMetricsRequest::mutable_queries(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskMetricsRequest.queries) + return queries_.Mutable(index); } -inline ::flyteidl::admin::Resource* GetTaskResponse::mutable_resource() { - - if (resource_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::admin::Resource>(GetArenaNoVirtual()); - resource_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskResponse.resource) - return resource_; +inline void GetTaskMetricsRequest::set_queries(int index, const ::std::string& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskMetricsRequest.queries) + queries_.Mutable(index)->assign(value); } -inline void GetTaskResponse::set_allocated_resource(::flyteidl::admin::Resource* resource) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete resource_; - } - if (resource) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - resource = ::google::protobuf::internal::GetOwnedMessage( - message_arena, resource, submessage_arena); - } - - } else { - - } - resource_ = resource; - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskResponse.resource) +#if LANG_CXX11 +inline void GetTaskMetricsRequest::set_queries(int index, ::std::string&& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskMetricsRequest.queries) + queries_.Mutable(index)->assign(std::move(value)); } - -// repeated .flyteidl.core.TaskLog log_links = 2; -inline int GetTaskResponse::log_links_size() const { - return log_links_.size(); +#endif +inline void GetTaskMetricsRequest::set_queries(int index, const char* value) { + GOOGLE_DCHECK(value != nullptr); + queries_.Mutable(index)->assign(value); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskMetricsRequest.queries) } -inline ::flyteidl::core::TaskLog* GetTaskResponse::mutable_log_links(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskResponse.log_links) - return log_links_.Mutable(index); +inline void GetTaskMetricsRequest::set_queries(int index, const char* value, size_t size) { + queries_.Mutable(index)->assign( + reinterpret_cast(value), size); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskMetricsRequest.queries) } -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >* -GetTaskResponse::mutable_log_links() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.GetTaskResponse.log_links) - return &log_links_; +inline ::std::string* GetTaskMetricsRequest::add_queries() { + // @@protoc_insertion_point(field_add_mutable:flyteidl.admin.GetTaskMetricsRequest.queries) + return queries_.Add(); } -inline const ::flyteidl::core::TaskLog& GetTaskResponse::log_links(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskResponse.log_links) - return log_links_.Get(index); +inline void GetTaskMetricsRequest::add_queries(const ::std::string& value) { + queries_.Add()->assign(value); + // @@protoc_insertion_point(field_add:flyteidl.admin.GetTaskMetricsRequest.queries) } -inline ::flyteidl::core::TaskLog* GetTaskResponse::add_log_links() { - // @@protoc_insertion_point(field_add:flyteidl.admin.GetTaskResponse.log_links) - return log_links_.Add(); +#if LANG_CXX11 +inline void GetTaskMetricsRequest::add_queries(::std::string&& value) { + queries_.Add(std::move(value)); + // @@protoc_insertion_point(field_add:flyteidl.admin.GetTaskMetricsRequest.queries) } -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >& -GetTaskResponse::log_links() const { - // @@protoc_insertion_point(field_list:flyteidl.admin.GetTaskResponse.log_links) - return log_links_; +#endif +inline void GetTaskMetricsRequest::add_queries(const char* value) { + GOOGLE_DCHECK(value != nullptr); + queries_.Add()->assign(value); + // @@protoc_insertion_point(field_add_char:flyteidl.admin.GetTaskMetricsRequest.queries) } - -// ------------------------------------------------------------------- - -// Resource - -// .flyteidl.admin.State state = 1; -inline void Resource::clear_state() { - state_ = 0; +inline void GetTaskMetricsRequest::add_queries(const char* value, size_t size) { + queries_.Add()->assign(reinterpret_cast(value), size); + // @@protoc_insertion_point(field_add_pointer:flyteidl.admin.GetTaskMetricsRequest.queries) } -inline ::flyteidl::admin::State Resource::state() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.state) - return static_cast< ::flyteidl::admin::State >(state_); +inline const ::google::protobuf::RepeatedPtrField<::std::string>& +GetTaskMetricsRequest::queries() const { + // @@protoc_insertion_point(field_list:flyteidl.admin.GetTaskMetricsRequest.queries) + return queries_; } -inline void Resource::set_state(::flyteidl::admin::State value) { - - state_ = value; - // @@protoc_insertion_point(field_set:flyteidl.admin.Resource.state) +inline ::google::protobuf::RepeatedPtrField<::std::string>* +GetTaskMetricsRequest::mutable_queries() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.GetTaskMetricsRequest.queries) + return &queries_; } -// .flyteidl.core.LiteralMap outputs = 2; -inline bool Resource::has_outputs() const { - return this != internal_default_instance() && outputs_ != nullptr; +// .google.protobuf.Timestamp start_time = 4; +inline bool GetTaskMetricsRequest::has_start_time() const { + return this != internal_default_instance() && start_time_ != nullptr; } -inline const ::flyteidl::core::LiteralMap& Resource::outputs() const { - const ::flyteidl::core::LiteralMap* p = outputs_; - // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.outputs) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_LiteralMap_default_instance_); +inline const ::google::protobuf::Timestamp& GetTaskMetricsRequest::start_time() const { + const ::google::protobuf::Timestamp* p = start_time_; + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskMetricsRequest.start_time) + return p != nullptr ? *p : *reinterpret_cast( + &::google::protobuf::_Timestamp_default_instance_); } -inline ::flyteidl::core::LiteralMap* Resource::release_outputs() { - // @@protoc_insertion_point(field_release:flyteidl.admin.Resource.outputs) +inline ::google::protobuf::Timestamp* GetTaskMetricsRequest::release_start_time() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskMetricsRequest.start_time) - ::flyteidl::core::LiteralMap* temp = outputs_; - outputs_ = nullptr; + ::google::protobuf::Timestamp* temp = start_time_; + start_time_ = nullptr; return temp; } -inline ::flyteidl::core::LiteralMap* Resource::mutable_outputs() { +inline ::google::protobuf::Timestamp* GetTaskMetricsRequest::mutable_start_time() { - if (outputs_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); - outputs_ = p; + if (start_time_ == nullptr) { + auto* p = CreateMaybeMessage<::google::protobuf::Timestamp>(GetArenaNoVirtual()); + start_time_ = p; } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.Resource.outputs) - return outputs_; + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskMetricsRequest.start_time) + return start_time_; } -inline void Resource::set_allocated_outputs(::flyteidl::core::LiteralMap* outputs) { +inline void GetTaskMetricsRequest::set_allocated_start_time(::google::protobuf::Timestamp* start_time) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(outputs_); + delete reinterpret_cast< ::google::protobuf::MessageLite*>(start_time_); } - if (outputs) { - ::google::protobuf::Arena* submessage_arena = nullptr; + if (start_time) { + ::google::protobuf::Arena* submessage_arena = + reinterpret_cast<::google::protobuf::MessageLite*>(start_time)->GetArena(); if (message_arena != submessage_arena) { - outputs = ::google::protobuf::internal::GetOwnedMessage( - message_arena, outputs, submessage_arena); + start_time = ::google::protobuf::internal::GetOwnedMessage( + message_arena, start_time, submessage_arena); } } else { } - outputs_ = outputs; - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.Resource.outputs) + start_time_ = start_time; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskMetricsRequest.start_time) } -// string message = 3; -inline void Resource::clear_message() { - message_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +// .google.protobuf.Timestamp end_time = 5; +inline bool GetTaskMetricsRequest::has_end_time() const { + return this != internal_default_instance() && end_time_ != nullptr; } -inline const ::std::string& Resource::message() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.message) - return message_.GetNoArena(); +inline const ::google::protobuf::Timestamp& GetTaskMetricsRequest::end_time() const { + const ::google::protobuf::Timestamp* p = end_time_; + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskMetricsRequest.end_time) + return p != nullptr ? *p : *reinterpret_cast( + &::google::protobuf::_Timestamp_default_instance_); } -inline void Resource::set_message(const ::std::string& value) { +inline ::google::protobuf::Timestamp* GetTaskMetricsRequest::release_end_time() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskMetricsRequest.end_time) - message_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.Resource.message) + ::google::protobuf::Timestamp* temp = end_time_; + end_time_ = nullptr; + return temp; } -#if LANG_CXX11 -inline void Resource::set_message(::std::string&& value) { +inline ::google::protobuf::Timestamp* GetTaskMetricsRequest::mutable_end_time() { - message_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.Resource.message) + if (end_time_ == nullptr) { + auto* p = CreateMaybeMessage<::google::protobuf::Timestamp>(GetArenaNoVirtual()); + end_time_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskMetricsRequest.end_time) + return end_time_; } -#endif -inline void Resource::set_message(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - message_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.Resource.message) +inline void GetTaskMetricsRequest::set_allocated_end_time(::google::protobuf::Timestamp* end_time) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(end_time_); + } + if (end_time) { + ::google::protobuf::Arena* submessage_arena = + reinterpret_cast<::google::protobuf::MessageLite*>(end_time)->GetArena(); + if (message_arena != submessage_arena) { + end_time = ::google::protobuf::internal::GetOwnedMessage( + message_arena, end_time, submessage_arena); + } + + } else { + + } + end_time_ = end_time; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskMetricsRequest.end_time) } -inline void Resource::set_message(const char* value, size_t size) { - - message_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.Resource.message) + +// .google.protobuf.Duration step = 6; +inline bool GetTaskMetricsRequest::has_step() const { + return this != internal_default_instance() && step_ != nullptr; } -inline ::std::string* Resource::mutable_message() { +inline const ::google::protobuf::Duration& GetTaskMetricsRequest::step() const { + const ::google::protobuf::Duration* p = step_; + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskMetricsRequest.step) + return p != nullptr ? *p : *reinterpret_cast( + &::google::protobuf::_Duration_default_instance_); +} +inline ::google::protobuf::Duration* GetTaskMetricsRequest::release_step() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskMetricsRequest.step) - // @@protoc_insertion_point(field_mutable:flyteidl.admin.Resource.message) - return message_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::google::protobuf::Duration* temp = step_; + step_ = nullptr; + return temp; } -inline ::std::string* Resource::release_message() { - // @@protoc_insertion_point(field_release:flyteidl.admin.Resource.message) +inline ::google::protobuf::Duration* GetTaskMetricsRequest::mutable_step() { - return message_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (step_ == nullptr) { + auto* p = CreateMaybeMessage<::google::protobuf::Duration>(GetArenaNoVirtual()); + step_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskMetricsRequest.step) + return step_; } -inline void Resource::set_allocated_message(::std::string* message) { - if (message != nullptr) { +inline void GetTaskMetricsRequest::set_allocated_step(::google::protobuf::Duration* step) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(step_); + } + if (step) { + ::google::protobuf::Arena* submessage_arena = + reinterpret_cast<::google::protobuf::MessageLite*>(step)->GetArena(); + if (message_arena != submessage_arena) { + step = ::google::protobuf::internal::GetOwnedMessage( + message_arena, step, submessage_arena); + } } else { } - message_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), message); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.Resource.message) + step_ = step; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskMetricsRequest.step) } -// repeated .flyteidl.core.TaskLog log_links = 4; -inline int Resource::log_links_size() const { - return log_links_.size(); +// ------------------------------------------------------------------- + +// GetTaskMetricsResponse + +// repeated .flyteidl.core.ExecutionMetricResult results = 1; +inline int GetTaskMetricsResponse::results_size() const { + return results_.size(); } -inline ::flyteidl::core::TaskLog* Resource::mutable_log_links(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.Resource.log_links) - return log_links_.Mutable(index); +inline ::flyteidl::core::ExecutionMetricResult* GetTaskMetricsResponse::mutable_results(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskMetricsResponse.results) + return results_.Mutable(index); } -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >* -Resource::mutable_log_links() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.Resource.log_links) - return &log_links_; +inline ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ExecutionMetricResult >* +GetTaskMetricsResponse::mutable_results() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.GetTaskMetricsResponse.results) + return &results_; } -inline const ::flyteidl::core::TaskLog& Resource::log_links(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.admin.Resource.log_links) - return log_links_.Get(index); +inline const ::flyteidl::core::ExecutionMetricResult& GetTaskMetricsResponse::results(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskMetricsResponse.results) + return results_.Get(index); } -inline ::flyteidl::core::TaskLog* Resource::add_log_links() { - // @@protoc_insertion_point(field_add:flyteidl.admin.Resource.log_links) - return log_links_.Add(); +inline ::flyteidl::core::ExecutionMetricResult* GetTaskMetricsResponse::add_results() { + // @@protoc_insertion_point(field_add:flyteidl.admin.GetTaskMetricsResponse.results) + return results_.Add(); } -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::TaskLog >& -Resource::log_links() const { - // @@protoc_insertion_point(field_list:flyteidl.admin.Resource.log_links) - return log_links_; +inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ExecutionMetricResult >& +GetTaskMetricsResponse::results() const { + // @@protoc_insertion_point(field_list:flyteidl.admin.GetTaskMetricsResponse.results) + return results_; } // ------------------------------------------------------------------- -// DeleteTaskRequest +// GetTaskLogsRequest // string task_type = 1; -inline void DeleteTaskRequest::clear_task_type() { +inline void GetTaskLogsRequest::clear_task_type() { task_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& DeleteTaskRequest::task_type() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.DeleteTaskRequest.task_type) +inline const ::std::string& GetTaskLogsRequest::task_type() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskLogsRequest.task_type) return task_type_.GetNoArena(); } -inline void DeleteTaskRequest::set_task_type(const ::std::string& value) { +inline void GetTaskLogsRequest::set_task_type(const ::std::string& value) { task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.DeleteTaskRequest.task_type) + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskLogsRequest.task_type) } #if LANG_CXX11 -inline void DeleteTaskRequest::set_task_type(::std::string&& value) { +inline void GetTaskLogsRequest::set_task_type(::std::string&& value) { task_type_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.DeleteTaskRequest.task_type) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskLogsRequest.task_type) } #endif -inline void DeleteTaskRequest::set_task_type(const char* value) { +inline void GetTaskLogsRequest::set_task_type(const char* value) { GOOGLE_DCHECK(value != nullptr); task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.DeleteTaskRequest.task_type) + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskLogsRequest.task_type) } -inline void DeleteTaskRequest::set_task_type(const char* value, size_t size) { +inline void GetTaskLogsRequest::set_task_type(const char* value, size_t size) { task_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.DeleteTaskRequest.task_type) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskLogsRequest.task_type) } -inline ::std::string* DeleteTaskRequest::mutable_task_type() { +inline ::std::string* GetTaskLogsRequest::mutable_task_type() { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.DeleteTaskRequest.task_type) + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskLogsRequest.task_type) return task_type_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* DeleteTaskRequest::release_task_type() { - // @@protoc_insertion_point(field_release:flyteidl.admin.DeleteTaskRequest.task_type) +inline ::std::string* GetTaskLogsRequest::release_task_type() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskLogsRequest.task_type) return task_type_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void DeleteTaskRequest::set_allocated_task_type(::std::string* task_type) { +inline void GetTaskLogsRequest::set_allocated_task_type(::std::string* task_type) { if (task_type != nullptr) { } else { } task_type_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), task_type); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.DeleteTaskRequest.task_type) + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskLogsRequest.task_type) } // bytes resource_meta = 2; -inline void DeleteTaskRequest::clear_resource_meta() { +inline void GetTaskLogsRequest::clear_resource_meta() { resource_meta_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& DeleteTaskRequest::resource_meta() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.DeleteTaskRequest.resource_meta) +inline const ::std::string& GetTaskLogsRequest::resource_meta() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskLogsRequest.resource_meta) return resource_meta_.GetNoArena(); } -inline void DeleteTaskRequest::set_resource_meta(const ::std::string& value) { +inline void GetTaskLogsRequest::set_resource_meta(const ::std::string& value) { resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.DeleteTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskLogsRequest.resource_meta) } #if LANG_CXX11 -inline void DeleteTaskRequest::set_resource_meta(::std::string&& value) { +inline void GetTaskLogsRequest::set_resource_meta(::std::string&& value) { resource_meta_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.DeleteTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskLogsRequest.resource_meta) } #endif -inline void DeleteTaskRequest::set_resource_meta(const char* value) { +inline void GetTaskLogsRequest::set_resource_meta(const char* value) { GOOGLE_DCHECK(value != nullptr); resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.DeleteTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskLogsRequest.resource_meta) } -inline void DeleteTaskRequest::set_resource_meta(const void* value, size_t size) { +inline void GetTaskLogsRequest::set_resource_meta(const void* value, size_t size) { resource_meta_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.DeleteTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskLogsRequest.resource_meta) } -inline ::std::string* DeleteTaskRequest::mutable_resource_meta() { +inline ::std::string* GetTaskLogsRequest::mutable_resource_meta() { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.DeleteTaskRequest.resource_meta) + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskLogsRequest.resource_meta) return resource_meta_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* DeleteTaskRequest::release_resource_meta() { - // @@protoc_insertion_point(field_release:flyteidl.admin.DeleteTaskRequest.resource_meta) +inline ::std::string* GetTaskLogsRequest::release_resource_meta() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskLogsRequest.resource_meta) return resource_meta_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void DeleteTaskRequest::set_allocated_resource_meta(::std::string* resource_meta) { +inline void GetTaskLogsRequest::set_allocated_resource_meta(::std::string* resource_meta) { if (resource_meta != nullptr) { } else { } resource_meta_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), resource_meta); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.DeleteTaskRequest.resource_meta) + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskLogsRequest.resource_meta) } -// ------------------------------------------------------------------- - -// DeleteTaskResponse - -// ------------------------------------------------------------------- - -// Agent +// uint64 lines = 3; +inline void GetTaskLogsRequest::clear_lines() { + lines_ = PROTOBUF_ULONGLONG(0); +} +inline ::google::protobuf::uint64 GetTaskLogsRequest::lines() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskLogsRequest.lines) + return lines_; +} +inline void GetTaskLogsRequest::set_lines(::google::protobuf::uint64 value) { + + lines_ = value; + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskLogsRequest.lines) +} -// string name = 1; -inline void Agent::clear_name() { - name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +// string token = 4; +inline void GetTaskLogsRequest::clear_token() { + token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& Agent::name() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.Agent.name) - return name_.GetNoArena(); +inline const ::std::string& GetTaskLogsRequest::token() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskLogsRequest.token) + return token_.GetNoArena(); } -inline void Agent::set_name(const ::std::string& value) { +inline void GetTaskLogsRequest::set_token(const ::std::string& value) { - name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.Agent.name) + token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskLogsRequest.token) } #if LANG_CXX11 -inline void Agent::set_name(::std::string&& value) { +inline void GetTaskLogsRequest::set_token(::std::string&& value) { - name_.SetNoArena( + token_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.Agent.name) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskLogsRequest.token) } #endif -inline void Agent::set_name(const char* value) { +inline void GetTaskLogsRequest::set_token(const char* value) { GOOGLE_DCHECK(value != nullptr); - name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.Agent.name) + token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskLogsRequest.token) } -inline void Agent::set_name(const char* value, size_t size) { +inline void GetTaskLogsRequest::set_token(const char* value, size_t size) { - name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.Agent.name) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskLogsRequest.token) } -inline ::std::string* Agent::mutable_name() { +inline ::std::string* GetTaskLogsRequest::mutable_token() { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.Agent.name) - return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskLogsRequest.token) + return token_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* Agent::release_name() { - // @@protoc_insertion_point(field_release:flyteidl.admin.Agent.name) +inline ::std::string* GetTaskLogsRequest::release_token() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskLogsRequest.token) - return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + return token_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void Agent::set_allocated_name(::std::string* name) { - if (name != nullptr) { +inline void GetTaskLogsRequest::set_allocated_token(::std::string* token) { + if (token != nullptr) { } else { } - name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.Agent.name) + token_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), token); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskLogsRequest.token) } -// repeated string supported_task_types = 2; -inline int Agent::supported_task_types_size() const { - return supported_task_types_.size(); +// ------------------------------------------------------------------- + +// GetTaskLogsResponse + +// repeated string results = 1; +inline int GetTaskLogsResponse::results_size() const { + return results_.size(); } -inline void Agent::clear_supported_task_types() { - supported_task_types_.Clear(); +inline void GetTaskLogsResponse::clear_results() { + results_.Clear(); } -inline const ::std::string& Agent::supported_task_types(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.admin.Agent.supported_task_types) - return supported_task_types_.Get(index); +inline const ::std::string& GetTaskLogsResponse::results(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskLogsResponse.results) + return results_.Get(index); } -inline ::std::string* Agent::mutable_supported_task_types(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.Agent.supported_task_types) - return supported_task_types_.Mutable(index); +inline ::std::string* GetTaskLogsResponse::mutable_results(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskLogsResponse.results) + return results_.Mutable(index); } -inline void Agent::set_supported_task_types(int index, const ::std::string& value) { - // @@protoc_insertion_point(field_set:flyteidl.admin.Agent.supported_task_types) - supported_task_types_.Mutable(index)->assign(value); +inline void GetTaskLogsResponse::set_results(int index, const ::std::string& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskLogsResponse.results) + results_.Mutable(index)->assign(value); } #if LANG_CXX11 -inline void Agent::set_supported_task_types(int index, ::std::string&& value) { - // @@protoc_insertion_point(field_set:flyteidl.admin.Agent.supported_task_types) - supported_task_types_.Mutable(index)->assign(std::move(value)); +inline void GetTaskLogsResponse::set_results(int index, ::std::string&& value) { + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskLogsResponse.results) + results_.Mutable(index)->assign(std::move(value)); } #endif -inline void Agent::set_supported_task_types(int index, const char* value) { +inline void GetTaskLogsResponse::set_results(int index, const char* value) { GOOGLE_DCHECK(value != nullptr); - supported_task_types_.Mutable(index)->assign(value); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.Agent.supported_task_types) + results_.Mutable(index)->assign(value); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskLogsResponse.results) } -inline void Agent::set_supported_task_types(int index, const char* value, size_t size) { - supported_task_types_.Mutable(index)->assign( +inline void GetTaskLogsResponse::set_results(int index, const char* value, size_t size) { + results_.Mutable(index)->assign( reinterpret_cast(value), size); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.Agent.supported_task_types) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskLogsResponse.results) } -inline ::std::string* Agent::add_supported_task_types() { - // @@protoc_insertion_point(field_add_mutable:flyteidl.admin.Agent.supported_task_types) - return supported_task_types_.Add(); +inline ::std::string* GetTaskLogsResponse::add_results() { + // @@protoc_insertion_point(field_add_mutable:flyteidl.admin.GetTaskLogsResponse.results) + return results_.Add(); } -inline void Agent::add_supported_task_types(const ::std::string& value) { - supported_task_types_.Add()->assign(value); - // @@protoc_insertion_point(field_add:flyteidl.admin.Agent.supported_task_types) +inline void GetTaskLogsResponse::add_results(const ::std::string& value) { + results_.Add()->assign(value); + // @@protoc_insertion_point(field_add:flyteidl.admin.GetTaskLogsResponse.results) } #if LANG_CXX11 -inline void Agent::add_supported_task_types(::std::string&& value) { - supported_task_types_.Add(std::move(value)); - // @@protoc_insertion_point(field_add:flyteidl.admin.Agent.supported_task_types) +inline void GetTaskLogsResponse::add_results(::std::string&& value) { + results_.Add(std::move(value)); + // @@protoc_insertion_point(field_add:flyteidl.admin.GetTaskLogsResponse.results) } #endif -inline void Agent::add_supported_task_types(const char* value) { +inline void GetTaskLogsResponse::add_results(const char* value) { GOOGLE_DCHECK(value != nullptr); - supported_task_types_.Add()->assign(value); - // @@protoc_insertion_point(field_add_char:flyteidl.admin.Agent.supported_task_types) + results_.Add()->assign(value); + // @@protoc_insertion_point(field_add_char:flyteidl.admin.GetTaskLogsResponse.results) } -inline void Agent::add_supported_task_types(const char* value, size_t size) { - supported_task_types_.Add()->assign(reinterpret_cast(value), size); - // @@protoc_insertion_point(field_add_pointer:flyteidl.admin.Agent.supported_task_types) +inline void GetTaskLogsResponse::add_results(const char* value, size_t size) { + results_.Add()->assign(reinterpret_cast(value), size); + // @@protoc_insertion_point(field_add_pointer:flyteidl.admin.GetTaskLogsResponse.results) } inline const ::google::protobuf::RepeatedPtrField<::std::string>& -Agent::supported_task_types() const { - // @@protoc_insertion_point(field_list:flyteidl.admin.Agent.supported_task_types) - return supported_task_types_; +GetTaskLogsResponse::results() const { + // @@protoc_insertion_point(field_list:flyteidl.admin.GetTaskLogsResponse.results) + return results_; } inline ::google::protobuf::RepeatedPtrField<::std::string>* -Agent::mutable_supported_task_types() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.Agent.supported_task_types) - return &supported_task_types_; +GetTaskLogsResponse::mutable_results() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.GetTaskLogsResponse.results) + return &results_; } -// ------------------------------------------------------------------- - -// GetAgentRequest - -// string name = 1; -inline void GetAgentRequest::clear_name() { - name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +// string token = 2; +inline void GetTaskLogsResponse::clear_token() { + token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline const ::std::string& GetAgentRequest::name() const { - // @@protoc_insertion_point(field_get:flyteidl.admin.GetAgentRequest.name) - return name_.GetNoArena(); +inline const ::std::string& GetTaskLogsResponse::token() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.GetTaskLogsResponse.token) + return token_.GetNoArena(); } -inline void GetAgentRequest::set_name(const ::std::string& value) { +inline void GetTaskLogsResponse::set_token(const ::std::string& value) { - name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.admin.GetAgentRequest.name) + token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.GetTaskLogsResponse.token) } #if LANG_CXX11 -inline void GetAgentRequest::set_name(::std::string&& value) { +inline void GetTaskLogsResponse::set_token(::std::string&& value) { - name_.SetNoArena( + token_.SetNoArena( &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetAgentRequest.name) + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.GetTaskLogsResponse.token) } #endif -inline void GetAgentRequest::set_name(const char* value) { +inline void GetTaskLogsResponse::set_token(const char* value) { GOOGLE_DCHECK(value != nullptr); - name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetAgentRequest.name) + token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.GetTaskLogsResponse.token) } -inline void GetAgentRequest::set_name(const char* value, size_t size) { +inline void GetTaskLogsResponse::set_token(const char* value, size_t size) { - name_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetAgentRequest.name) + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.GetTaskLogsResponse.token) } -inline ::std::string* GetAgentRequest::mutable_name() { +inline ::std::string* GetTaskLogsResponse::mutable_token() { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetAgentRequest.name) - return name_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetTaskLogsResponse.token) + return token_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline ::std::string* GetAgentRequest::release_name() { - // @@protoc_insertion_point(field_release:flyteidl.admin.GetAgentRequest.name) +inline ::std::string* GetTaskLogsResponse::release_token() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetTaskLogsResponse.token) - return name_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + return token_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } -inline void GetAgentRequest::set_allocated_name(::std::string* name) { - if (name != nullptr) { +inline void GetTaskLogsResponse::set_allocated_token(::std::string* token) { + if (token != nullptr) { } else { } - name_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), name); - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetAgentRequest.name) + token_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), token); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetTaskLogsResponse.token) } +#ifdef __GNUC__ + #pragma GCC diagnostic pop +#endif // __GNUC__ // ------------------------------------------------------------------- -// GetAgentResponse - -// .flyteidl.admin.Agent agent = 1; -inline bool GetAgentResponse::has_agent() const { - return this != internal_default_instance() && agent_ != nullptr; -} -inline void GetAgentResponse::clear_agent() { - if (GetArenaNoVirtual() == nullptr && agent_ != nullptr) { - delete agent_; - } - agent_ = nullptr; -} -inline const ::flyteidl::admin::Agent& GetAgentResponse::agent() const { - const ::flyteidl::admin::Agent* p = agent_; - // @@protoc_insertion_point(field_get:flyteidl.admin.GetAgentResponse.agent) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::admin::_Agent_default_instance_); -} -inline ::flyteidl::admin::Agent* GetAgentResponse::release_agent() { - // @@protoc_insertion_point(field_release:flyteidl.admin.GetAgentResponse.agent) - - ::flyteidl::admin::Agent* temp = agent_; - agent_ = nullptr; - return temp; -} -inline ::flyteidl::admin::Agent* GetAgentResponse::mutable_agent() { - - if (agent_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::admin::Agent>(GetArenaNoVirtual()); - agent_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetAgentResponse.agent) - return agent_; -} -inline void GetAgentResponse::set_allocated_agent(::flyteidl::admin::Agent* agent) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete agent_; - } - if (agent) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - agent = ::google::protobuf::internal::GetOwnedMessage( - message_arena, agent, submessage_arena); - } - - } else { - - } - agent_ = agent; - // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetAgentResponse.agent) -} - // ------------------------------------------------------------------- -// ListAgentsRequest - // ------------------------------------------------------------------- -// ListAgentsResponse - -// repeated .flyteidl.admin.Agent agents = 1; -inline int ListAgentsResponse::agents_size() const { - return agents_.size(); -} -inline void ListAgentsResponse::clear_agents() { - agents_.Clear(); -} -inline ::flyteidl::admin::Agent* ListAgentsResponse::mutable_agents(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.admin.ListAgentsResponse.agents) - return agents_.Mutable(index); -} -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::admin::Agent >* -ListAgentsResponse::mutable_agents() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.admin.ListAgentsResponse.agents) - return &agents_; -} -inline const ::flyteidl::admin::Agent& ListAgentsResponse::agents(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.admin.ListAgentsResponse.agents) - return agents_.Get(index); -} -inline ::flyteidl::admin::Agent* ListAgentsResponse::add_agents() { - // @@protoc_insertion_point(field_add:flyteidl.admin.ListAgentsResponse.agents) - return agents_.Add(); -} -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::admin::Agent >& -ListAgentsResponse::agents() const { - // @@protoc_insertion_point(field_list:flyteidl.admin.ListAgentsResponse.agents) - return agents_; -} +// ------------------------------------------------------------------- -#ifdef __GNUC__ - #pragma GCC diagnostic pop -#endif // __GNUC__ // ------------------------------------------------------------------- // ------------------------------------------------------------------- diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.cc index 982f3fb6b7..60e9f6862c 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.cc @@ -560,6 +560,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fcommon_2eproto:: PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifier, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifier, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifier, name_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifier, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityMetadata, _internal_metadata_), ~0u, // no _extensions_ @@ -593,6 +594,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fcommon_2eproto:: PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifierListRequest, token_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifierListRequest, sort_by_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifierListRequest, filters_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifierListRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityListRequest, _internal_metadata_), ~0u, // no _extensions_ @@ -605,6 +607,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fcommon_2eproto:: PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityListRequest, token_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityListRequest, sort_by_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityListRequest, filters_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityListRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NamedEntityIdentifierList, _internal_metadata_), ~0u, // no _extensions_ @@ -750,31 +753,31 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fcommon_2eproto:: }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::NamedEntityIdentifier)}, - { 8, -1, sizeof(::flyteidl::admin::NamedEntityMetadata)}, - { 15, -1, sizeof(::flyteidl::admin::NamedEntity)}, - { 23, -1, sizeof(::flyteidl::admin::Sort)}, - { 30, -1, sizeof(::flyteidl::admin::NamedEntityIdentifierListRequest)}, - { 41, -1, sizeof(::flyteidl::admin::NamedEntityListRequest)}, - { 53, -1, sizeof(::flyteidl::admin::NamedEntityIdentifierList)}, - { 60, -1, sizeof(::flyteidl::admin::NamedEntityList)}, - { 67, -1, sizeof(::flyteidl::admin::NamedEntityGetRequest)}, - { 74, -1, sizeof(::flyteidl::admin::NamedEntityUpdateRequest)}, - { 82, -1, sizeof(::flyteidl::admin::NamedEntityUpdateResponse)}, - { 87, -1, sizeof(::flyteidl::admin::ObjectGetRequest)}, - { 93, -1, sizeof(::flyteidl::admin::ResourceListRequest)}, - { 103, -1, sizeof(::flyteidl::admin::EmailNotification)}, - { 109, -1, sizeof(::flyteidl::admin::PagerDutyNotification)}, - { 115, -1, sizeof(::flyteidl::admin::SlackNotification)}, - { 121, -1, sizeof(::flyteidl::admin::Notification)}, - { 131, -1, sizeof(::flyteidl::admin::UrlBlob)}, - { 138, 145, sizeof(::flyteidl::admin::Labels_ValuesEntry_DoNotUse)}, - { 147, -1, sizeof(::flyteidl::admin::Labels)}, - { 153, 160, sizeof(::flyteidl::admin::Annotations_ValuesEntry_DoNotUse)}, - { 162, -1, sizeof(::flyteidl::admin::Annotations)}, - { 168, -1, sizeof(::flyteidl::admin::Envs)}, - { 174, -1, sizeof(::flyteidl::admin::AuthRole)}, - { 181, -1, sizeof(::flyteidl::admin::RawOutputDataConfig)}, - { 187, -1, sizeof(::flyteidl::admin::FlyteURLs)}, + { 9, -1, sizeof(::flyteidl::admin::NamedEntityMetadata)}, + { 16, -1, sizeof(::flyteidl::admin::NamedEntity)}, + { 24, -1, sizeof(::flyteidl::admin::Sort)}, + { 31, -1, sizeof(::flyteidl::admin::NamedEntityIdentifierListRequest)}, + { 43, -1, sizeof(::flyteidl::admin::NamedEntityListRequest)}, + { 56, -1, sizeof(::flyteidl::admin::NamedEntityIdentifierList)}, + { 63, -1, sizeof(::flyteidl::admin::NamedEntityList)}, + { 70, -1, sizeof(::flyteidl::admin::NamedEntityGetRequest)}, + { 77, -1, sizeof(::flyteidl::admin::NamedEntityUpdateRequest)}, + { 85, -1, sizeof(::flyteidl::admin::NamedEntityUpdateResponse)}, + { 90, -1, sizeof(::flyteidl::admin::ObjectGetRequest)}, + { 96, -1, sizeof(::flyteidl::admin::ResourceListRequest)}, + { 106, -1, sizeof(::flyteidl::admin::EmailNotification)}, + { 112, -1, sizeof(::flyteidl::admin::PagerDutyNotification)}, + { 118, -1, sizeof(::flyteidl::admin::SlackNotification)}, + { 124, -1, sizeof(::flyteidl::admin::Notification)}, + { 134, -1, sizeof(::flyteidl::admin::UrlBlob)}, + { 141, 148, sizeof(::flyteidl::admin::Labels_ValuesEntry_DoNotUse)}, + { 150, -1, sizeof(::flyteidl::admin::Labels)}, + { 156, 163, sizeof(::flyteidl::admin::Annotations_ValuesEntry_DoNotUse)}, + { 165, -1, sizeof(::flyteidl::admin::Annotations)}, + { 171, -1, sizeof(::flyteidl::admin::Envs)}, + { 177, -1, sizeof(::flyteidl::admin::AuthRole)}, + { 184, -1, sizeof(::flyteidl::admin::RawOutputDataConfig)}, + { 190, -1, sizeof(::flyteidl::admin::FlyteURLs)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -817,78 +820,78 @@ const char descriptor_table_protodef_flyteidl_2fadmin_2fcommon_2eproto[] = "admin\032\035flyteidl/core/execution.proto\032\036fl" "yteidl/core/identifier.proto\032\034flyteidl/c" "ore/literals.proto\032\037google/protobuf/time" - "stamp.proto\"F\n\025NamedEntityIdentifier\022\017\n\007" + "stamp.proto\"S\n\025NamedEntityIdentifier\022\017\n\007" "project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\014\n\004name\030\003 " - "\001(\t\"[\n\023NamedEntityMetadata\022\023\n\013descriptio" - "n\030\001 \001(\t\022/\n\005state\030\002 \001(\0162 .flyteidl.admin." - "NamedEntityState\"\253\001\n\013NamedEntity\0222\n\rreso" - "urce_type\030\001 \001(\0162\033.flyteidl.core.Resource" - "Type\0221\n\002id\030\002 \001(\0132%.flyteidl.admin.NamedE" - "ntityIdentifier\0225\n\010metadata\030\003 \001(\0132#.flyt" - "eidl.admin.NamedEntityMetadata\"r\n\004Sort\022\013" - "\n\003key\030\001 \001(\t\0221\n\tdirection\030\002 \001(\0162\036.flyteid" - "l.admin.Sort.Direction\"*\n\tDirection\022\016\n\nD" - "ESCENDING\020\000\022\r\n\tASCENDING\020\001\"\231\001\n NamedEnti" - "tyIdentifierListRequest\022\017\n\007project\030\001 \001(\t" - "\022\016\n\006domain\030\002 \001(\t\022\r\n\005limit\030\003 \001(\r\022\r\n\005token" + "\001(\t\022\013\n\003org\030\004 \001(\t\"[\n\023NamedEntityMetadata\022" + "\023\n\013description\030\001 \001(\t\022/\n\005state\030\002 \001(\0162 .fl" + "yteidl.admin.NamedEntityState\"\253\001\n\013NamedE" + "ntity\0222\n\rresource_type\030\001 \001(\0162\033.flyteidl." + "core.ResourceType\0221\n\002id\030\002 \001(\0132%.flyteidl" + ".admin.NamedEntityIdentifier\0225\n\010metadata" + "\030\003 \001(\0132#.flyteidl.admin.NamedEntityMetad" + "ata\"r\n\004Sort\022\013\n\003key\030\001 \001(\t\0221\n\tdirection\030\002 " + "\001(\0162\036.flyteidl.admin.Sort.Direction\"*\n\tD" + "irection\022\016\n\nDESCENDING\020\000\022\r\n\tASCENDING\020\001\"" + "\246\001\n NamedEntityIdentifierListRequest\022\017\n\007" + "project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\r\n\005limit\030\003" + " \001(\r\022\r\n\005token\030\004 \001(\t\022%\n\007sort_by\030\005 \001(\0132\024.f" + "lyteidl.admin.Sort\022\017\n\007filters\030\006 \001(\t\022\013\n\003o" + "rg\030\007 \001(\t\"\320\001\n\026NamedEntityListRequest\0222\n\rr" + "esource_type\030\001 \001(\0162\033.flyteidl.core.Resou" + "rceType\022\017\n\007project\030\002 \001(\t\022\016\n\006domain\030\003 \001(\t" + "\022\r\n\005limit\030\004 \001(\r\022\r\n\005token\030\005 \001(\t\022%\n\007sort_b" + "y\030\006 \001(\0132\024.flyteidl.admin.Sort\022\017\n\007filters" + "\030\007 \001(\t\022\013\n\003org\030\010 \001(\t\"c\n\031NamedEntityIdenti" + "fierList\0227\n\010entities\030\001 \003(\0132%.flyteidl.ad" + "min.NamedEntityIdentifier\022\r\n\005token\030\002 \001(\t" + "\"O\n\017NamedEntityList\022-\n\010entities\030\001 \003(\0132\033." + "flyteidl.admin.NamedEntity\022\r\n\005token\030\002 \001(" + "\t\"~\n\025NamedEntityGetRequest\0222\n\rresource_t" + "ype\030\001 \001(\0162\033.flyteidl.core.ResourceType\0221" + "\n\002id\030\002 \001(\0132%.flyteidl.admin.NamedEntityI" + "dentifier\"\270\001\n\030NamedEntityUpdateRequest\0222" + "\n\rresource_type\030\001 \001(\0162\033.flyteidl.core.Re" + "sourceType\0221\n\002id\030\002 \001(\0132%.flyteidl.admin." + "NamedEntityIdentifier\0225\n\010metadata\030\003 \001(\0132" + "#.flyteidl.admin.NamedEntityMetadata\"\033\n\031" + "NamedEntityUpdateResponse\"9\n\020ObjectGetRe" + "quest\022%\n\002id\030\001 \001(\0132\031.flyteidl.core.Identi" + "fier\"\236\001\n\023ResourceListRequest\0221\n\002id\030\001 \001(\013" + "2%.flyteidl.admin.NamedEntityIdentifier\022" + "\r\n\005limit\030\002 \001(\r\022\r\n\005token\030\003 \001(\t\022\017\n\007filters" "\030\004 \001(\t\022%\n\007sort_by\030\005 \001(\0132\024.flyteidl.admin" - ".Sort\022\017\n\007filters\030\006 \001(\t\"\303\001\n\026NamedEntityLi" - "stRequest\0222\n\rresource_type\030\001 \001(\0162\033.flyte" - "idl.core.ResourceType\022\017\n\007project\030\002 \001(\t\022\016" - "\n\006domain\030\003 \001(\t\022\r\n\005limit\030\004 \001(\r\022\r\n\005token\030\005" - " \001(\t\022%\n\007sort_by\030\006 \001(\0132\024.flyteidl.admin.S" - "ort\022\017\n\007filters\030\007 \001(\t\"c\n\031NamedEntityIdent" - "ifierList\0227\n\010entities\030\001 \003(\0132%.flyteidl.a" - "dmin.NamedEntityIdentifier\022\r\n\005token\030\002 \001(" - "\t\"O\n\017NamedEntityList\022-\n\010entities\030\001 \003(\0132\033" - ".flyteidl.admin.NamedEntity\022\r\n\005token\030\002 \001" - "(\t\"~\n\025NamedEntityGetRequest\0222\n\rresource_" - "type\030\001 \001(\0162\033.flyteidl.core.ResourceType\022" - "1\n\002id\030\002 \001(\0132%.flyteidl.admin.NamedEntity" - "Identifier\"\270\001\n\030NamedEntityUpdateRequest\022" - "2\n\rresource_type\030\001 \001(\0162\033.flyteidl.core.R" - "esourceType\0221\n\002id\030\002 \001(\0132%.flyteidl.admin" - ".NamedEntityIdentifier\0225\n\010metadata\030\003 \001(\013" - "2#.flyteidl.admin.NamedEntityMetadata\"\033\n" - "\031NamedEntityUpdateResponse\"9\n\020ObjectGetR" - "equest\022%\n\002id\030\001 \001(\0132\031.flyteidl.core.Ident" - "ifier\"\236\001\n\023ResourceListRequest\0221\n\002id\030\001 \001(" - "\0132%.flyteidl.admin.NamedEntityIdentifier" - "\022\r\n\005limit\030\002 \001(\r\022\r\n\005token\030\003 \001(\t\022\017\n\007filter" - "s\030\004 \001(\t\022%\n\007sort_by\030\005 \001(\0132\024.flyteidl.admi" - "n.Sort\"-\n\021EmailNotification\022\030\n\020recipient" - "s_email\030\001 \003(\t\"1\n\025PagerDutyNotification\022\030" - "\n\020recipients_email\030\001 \003(\t\"-\n\021SlackNotific" - "ation\022\030\n\020recipients_email\030\001 \003(\t\"\363\001\n\014Noti" - "fication\0226\n\006phases\030\001 \003(\0162&.flyteidl.core" - ".WorkflowExecution.Phase\0222\n\005email\030\002 \001(\0132" - "!.flyteidl.admin.EmailNotificationH\000\022;\n\n" - "pager_duty\030\003 \001(\0132%.flyteidl.admin.PagerD" - "utyNotificationH\000\0222\n\005slack\030\004 \001(\0132!.flyte" - "idl.admin.SlackNotificationH\000B\006\n\004type\")\n" - "\007UrlBlob\022\013\n\003url\030\001 \001(\t\022\r\n\005bytes\030\002 \001(\003:\002\030\001" - "\"k\n\006Labels\0222\n\006values\030\001 \003(\0132\".flyteidl.ad" - "min.Labels.ValuesEntry\032-\n\013ValuesEntry\022\013\n" - "\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"u\n\013Annotat" - "ions\0227\n\006values\030\001 \003(\0132\'.flyteidl.admin.An" - "notations.ValuesEntry\032-\n\013ValuesEntry\022\013\n\003" - "key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"3\n\004Envs\022+\n\006" - "values\030\001 \003(\0132\033.flyteidl.core.KeyValuePai" - "r\"N\n\010AuthRole\022\032\n\022assumable_iam_role\030\001 \001(" - "\t\022\"\n\032kubernetes_service_account\030\002 \001(\t:\002\030" - "\001\"5\n\023RawOutputDataConfig\022\036\n\026output_locat" - "ion_prefix\030\001 \001(\t\":\n\tFlyteURLs\022\016\n\006inputs\030" - "\001 \001(\t\022\017\n\007outputs\030\002 \001(\t\022\014\n\004deck\030\003 \001(\t*\\\n\020" - "NamedEntityState\022\027\n\023NAMED_ENTITY_ACTIVE\020" - "\000\022\031\n\025NAMED_ENTITY_ARCHIVED\020\001\022\024\n\020SYSTEM_G" - "ENERATED\020\002B=Z;github.com/flyteorg/flyte/" - "flyteidl/gen/pb-go/flyteidl/adminb\006proto" - "3" + ".Sort\"-\n\021EmailNotification\022\030\n\020recipients" + "_email\030\001 \003(\t\"1\n\025PagerDutyNotification\022\030\n" + "\020recipients_email\030\001 \003(\t\"-\n\021SlackNotifica" + "tion\022\030\n\020recipients_email\030\001 \003(\t\"\363\001\n\014Notif" + "ication\0226\n\006phases\030\001 \003(\0162&.flyteidl.core." + "WorkflowExecution.Phase\0222\n\005email\030\002 \001(\0132!" + ".flyteidl.admin.EmailNotificationH\000\022;\n\np" + "ager_duty\030\003 \001(\0132%.flyteidl.admin.PagerDu" + "tyNotificationH\000\0222\n\005slack\030\004 \001(\0132!.flytei" + "dl.admin.SlackNotificationH\000B\006\n\004type\")\n\007" + "UrlBlob\022\013\n\003url\030\001 \001(\t\022\r\n\005bytes\030\002 \001(\003:\002\030\001\"" + "k\n\006Labels\0222\n\006values\030\001 \003(\0132\".flyteidl.adm" + "in.Labels.ValuesEntry\032-\n\013ValuesEntry\022\013\n\003" + "key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"u\n\013Annotati" + "ons\0227\n\006values\030\001 \003(\0132\'.flyteidl.admin.Ann" + "otations.ValuesEntry\032-\n\013ValuesEntry\022\013\n\003k" + "ey\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"3\n\004Envs\022+\n\006v" + "alues\030\001 \003(\0132\033.flyteidl.core.KeyValuePair" + "\"N\n\010AuthRole\022\032\n\022assumable_iam_role\030\001 \001(\t" + "\022\"\n\032kubernetes_service_account\030\002 \001(\t:\002\030\001" + "\"5\n\023RawOutputDataConfig\022\036\n\026output_locati" + "on_prefix\030\001 \001(\t\":\n\tFlyteURLs\022\016\n\006inputs\030\001" + " \001(\t\022\017\n\007outputs\030\002 \001(\t\022\014\n\004deck\030\003 \001(\t*\\\n\020N" + "amedEntityState\022\027\n\023NAMED_ENTITY_ACTIVE\020\000" + "\022\031\n\025NAMED_ENTITY_ARCHIVED\020\001\022\024\n\020SYSTEM_GE" + "NERATED\020\002B=Z;github.com/flyteorg/flyte/f" + "lyteidl/gen/pb-go/flyteidl/adminb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fcommon_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fcommon_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fcommon_2eproto, - "flyteidl/admin/common.proto", &assign_descriptors_table_flyteidl_2fadmin_2fcommon_2eproto, 2801, + "flyteidl/admin/common.proto", &assign_descriptors_table_flyteidl_2fadmin_2fcommon_2eproto, 2840, }; void AddDescriptors_flyteidl_2fadmin_2fcommon_2eproto() { @@ -955,6 +958,7 @@ class NamedEntityIdentifier::HasBitSetters { const int NamedEntityIdentifier::kProjectFieldNumber; const int NamedEntityIdentifier::kDomainFieldNumber; const int NamedEntityIdentifier::kNameFieldNumber; +const int NamedEntityIdentifier::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 NamedEntityIdentifier::NamedEntityIdentifier() @@ -978,6 +982,10 @@ NamedEntityIdentifier::NamedEntityIdentifier(const NamedEntityIdentifier& from) if (from.name().size() > 0) { name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } // @@protoc_insertion_point(copy_constructor:flyteidl.admin.NamedEntityIdentifier) } @@ -987,6 +995,7 @@ void NamedEntityIdentifier::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } NamedEntityIdentifier::~NamedEntityIdentifier() { @@ -998,6 +1007,7 @@ void NamedEntityIdentifier::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void NamedEntityIdentifier::SetCachedSize(int size) const { @@ -1018,6 +1028,7 @@ void NamedEntityIdentifier::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); _internal_metadata_.Clear(); } @@ -1082,6 +1093,22 @@ const char* NamedEntityIdentifier::_InternalParse(const char* begin, const char* ptr += size; break; } + // string org = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.NamedEntityIdentifier.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1161,6 +1188,21 @@ bool NamedEntityIdentifier::MergePartialFromCodedStream( break; } + // string org = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.NamedEntityIdentifier.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1218,6 +1260,16 @@ void NamedEntityIdentifier::SerializeWithCachedSizes( 3, this->name(), output); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.NamedEntityIdentifier.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 4, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1264,6 +1316,17 @@ ::google::protobuf::uint8* NamedEntityIdentifier::InternalSerializeWithCachedSiz 3, this->name(), target); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.NamedEntityIdentifier.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 4, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1306,6 +1369,13 @@ size_t NamedEntityIdentifier::ByteSizeLong() const { this->name()); } + // string org = 4; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -1345,6 +1415,10 @@ void NamedEntityIdentifier::MergeFrom(const NamedEntityIdentifier& from) { name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } } void NamedEntityIdentifier::CopyFrom(const ::google::protobuf::Message& from) { @@ -1378,6 +1452,8 @@ void NamedEntityIdentifier::InternalSwap(NamedEntityIdentifier* other) { GetArenaNoVirtual()); name_.Swap(&other->name_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); } ::google::protobuf::Metadata NamedEntityIdentifier::GetMetadata() const { @@ -2500,6 +2576,7 @@ const int NamedEntityIdentifierListRequest::kLimitFieldNumber; const int NamedEntityIdentifierListRequest::kTokenFieldNumber; const int NamedEntityIdentifierListRequest::kSortByFieldNumber; const int NamedEntityIdentifierListRequest::kFiltersFieldNumber; +const int NamedEntityIdentifierListRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 NamedEntityIdentifierListRequest::NamedEntityIdentifierListRequest() @@ -2527,6 +2604,10 @@ NamedEntityIdentifierListRequest::NamedEntityIdentifierListRequest(const NamedEn if (from.filters().size() > 0) { filters_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filters_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { sort_by_ = new ::flyteidl::admin::Sort(*from.sort_by_); } else { @@ -2543,6 +2624,7 @@ void NamedEntityIdentifierListRequest::SharedCtor() { domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&sort_by_, 0, static_cast( reinterpret_cast(&limit_) - reinterpret_cast(&sort_by_)) + sizeof(limit_)); @@ -2558,6 +2640,7 @@ void NamedEntityIdentifierListRequest::SharedDtor() { domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete sort_by_; } @@ -2580,6 +2663,7 @@ void NamedEntityIdentifierListRequest::Clear() { domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && sort_by_ != nullptr) { delete sort_by_; } @@ -2685,6 +2769,22 @@ const char* NamedEntityIdentifierListRequest::_InternalParse(const char* begin, ptr += size; break; } + // string org = 7; + case 7: { + if (static_cast<::google::protobuf::uint8>(tag) != 58) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.NamedEntityIdentifierListRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -2803,6 +2903,21 @@ bool NamedEntityIdentifierListRequest::MergePartialFromCodedStream( break; } + // string org = 7; + case 7: { + if (static_cast< ::google::protobuf::uint8>(tag) == (58 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.NamedEntityIdentifierListRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -2881,6 +2996,16 @@ void NamedEntityIdentifierListRequest::SerializeWithCachedSizes( 6, this->filters(), output); } + // string org = 7; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.NamedEntityIdentifierListRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 7, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -2950,6 +3075,17 @@ ::google::protobuf::uint8* NamedEntityIdentifierListRequest::InternalSerializeWi 6, this->filters(), target); } + // string org = 7; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.NamedEntityIdentifierListRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 7, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -2999,6 +3135,13 @@ size_t NamedEntityIdentifierListRequest::ByteSizeLong() const { this->filters()); } + // string org = 7; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.Sort sort_by = 5; if (this->has_sort_by()) { total_size += 1 + @@ -3056,6 +3199,10 @@ void NamedEntityIdentifierListRequest::MergeFrom(const NamedEntityIdentifierList filters_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filters_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { mutable_sort_by()->::flyteidl::admin::Sort::MergeFrom(from.sort_by()); } @@ -3097,6 +3244,8 @@ void NamedEntityIdentifierListRequest::InternalSwap(NamedEntityIdentifierListReq GetArenaNoVirtual()); filters_.Swap(&other->filters_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(sort_by_, other->sort_by_); swap(limit_, other->limit_); } @@ -3130,6 +3279,7 @@ const int NamedEntityListRequest::kLimitFieldNumber; const int NamedEntityListRequest::kTokenFieldNumber; const int NamedEntityListRequest::kSortByFieldNumber; const int NamedEntityListRequest::kFiltersFieldNumber; +const int NamedEntityListRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 NamedEntityListRequest::NamedEntityListRequest() @@ -3157,6 +3307,10 @@ NamedEntityListRequest::NamedEntityListRequest(const NamedEntityListRequest& fro if (from.filters().size() > 0) { filters_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filters_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { sort_by_ = new ::flyteidl::admin::Sort(*from.sort_by_); } else { @@ -3175,6 +3329,7 @@ void NamedEntityListRequest::SharedCtor() { domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&sort_by_, 0, static_cast( reinterpret_cast(&limit_) - reinterpret_cast(&sort_by_)) + sizeof(limit_)); @@ -3190,6 +3345,7 @@ void NamedEntityListRequest::SharedDtor() { domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete sort_by_; } @@ -3212,6 +3368,7 @@ void NamedEntityListRequest::Clear() { domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && sort_by_ != nullptr) { delete sort_by_; } @@ -3327,6 +3484,22 @@ const char* NamedEntityListRequest::_InternalParse(const char* begin, const char ptr += size; break; } + // string org = 8; + case 8: { + if (static_cast<::google::protobuf::uint8>(tag) != 66) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.NamedEntityListRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -3459,6 +3632,21 @@ bool NamedEntityListRequest::MergePartialFromCodedStream( break; } + // string org = 8; + case 8: { + if (static_cast< ::google::protobuf::uint8>(tag) == (66 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.NamedEntityListRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -3543,6 +3731,16 @@ void NamedEntityListRequest::SerializeWithCachedSizes( 7, this->filters(), output); } + // string org = 8; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.NamedEntityListRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 8, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -3618,6 +3816,17 @@ ::google::protobuf::uint8* NamedEntityListRequest::InternalSerializeWithCachedSi 7, this->filters(), target); } + // string org = 8; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.NamedEntityListRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 8, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -3667,6 +3876,13 @@ size_t NamedEntityListRequest::ByteSizeLong() const { this->filters()); } + // string org = 8; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.Sort sort_by = 6; if (this->has_sort_by()) { total_size += 1 + @@ -3730,6 +3946,10 @@ void NamedEntityListRequest::MergeFrom(const NamedEntityListRequest& from) { filters_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filters_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { mutable_sort_by()->::flyteidl::admin::Sort::MergeFrom(from.sort_by()); } @@ -3774,6 +3994,8 @@ void NamedEntityListRequest::InternalSwap(NamedEntityListRequest* other) { GetArenaNoVirtual()); filters_.Swap(&other->filters_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(sort_by_, other->sort_by_); swap(resource_type_, other->resource_type_); swap(limit_, other->limit_); diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.h index b2eb1cb4e5..a553cfad4a 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/common.pb.h @@ -353,6 +353,20 @@ class NamedEntityIdentifier final : ::std::string* release_name(); void set_allocated_name(::std::string* name); + // string org = 4; + void clear_org(); + static const int kOrgFieldNumber = 4; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // @@protoc_insertion_point(class_scope:flyteidl.admin.NamedEntityIdentifier) private: class HasBitSetters; @@ -361,6 +375,7 @@ class NamedEntityIdentifier final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr name_; + ::google::protobuf::internal::ArenaStringPtr org_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fcommon_2eproto; }; @@ -929,6 +944,20 @@ class NamedEntityIdentifierListRequest final : ::std::string* release_filters(); void set_allocated_filters(::std::string* filters); + // string org = 7; + void clear_org(); + static const int kOrgFieldNumber = 7; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.Sort sort_by = 5; bool has_sort_by() const; void clear_sort_by(); @@ -953,6 +982,7 @@ class NamedEntityIdentifierListRequest final : ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr token_; ::google::protobuf::internal::ArenaStringPtr filters_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::Sort* sort_by_; ::google::protobuf::uint32 limit_; mutable ::google::protobuf::internal::CachedSize _cached_size_; @@ -1111,6 +1141,20 @@ class NamedEntityListRequest final : ::std::string* release_filters(); void set_allocated_filters(::std::string* filters); + // string org = 8; + void clear_org(); + static const int kOrgFieldNumber = 8; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.Sort sort_by = 6; bool has_sort_by() const; void clear_sort_by(); @@ -1141,6 +1185,7 @@ class NamedEntityListRequest final : ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr token_; ::google::protobuf::internal::ArenaStringPtr filters_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::Sort* sort_by_; int resource_type_; ::google::protobuf::uint32 limit_; @@ -3710,6 +3755,59 @@ inline void NamedEntityIdentifier::set_allocated_name(::std::string* name) { // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.NamedEntityIdentifier.name) } +// string org = 4; +inline void NamedEntityIdentifier::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& NamedEntityIdentifier::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.NamedEntityIdentifier.org) + return org_.GetNoArena(); +} +inline void NamedEntityIdentifier::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.NamedEntityIdentifier.org) +} +#if LANG_CXX11 +inline void NamedEntityIdentifier::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.NamedEntityIdentifier.org) +} +#endif +inline void NamedEntityIdentifier::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.NamedEntityIdentifier.org) +} +inline void NamedEntityIdentifier::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.NamedEntityIdentifier.org) +} +inline ::std::string* NamedEntityIdentifier::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.NamedEntityIdentifier.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* NamedEntityIdentifier::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.NamedEntityIdentifier.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void NamedEntityIdentifier::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.NamedEntityIdentifier.org) +} + // ------------------------------------------------------------------- // NamedEntityMetadata @@ -4253,6 +4351,59 @@ inline void NamedEntityIdentifierListRequest::set_allocated_filters(::std::strin // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.NamedEntityIdentifierListRequest.filters) } +// string org = 7; +inline void NamedEntityIdentifierListRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& NamedEntityIdentifierListRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.NamedEntityIdentifierListRequest.org) + return org_.GetNoArena(); +} +inline void NamedEntityIdentifierListRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.NamedEntityIdentifierListRequest.org) +} +#if LANG_CXX11 +inline void NamedEntityIdentifierListRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.NamedEntityIdentifierListRequest.org) +} +#endif +inline void NamedEntityIdentifierListRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.NamedEntityIdentifierListRequest.org) +} +inline void NamedEntityIdentifierListRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.NamedEntityIdentifierListRequest.org) +} +inline ::std::string* NamedEntityIdentifierListRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.NamedEntityIdentifierListRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* NamedEntityIdentifierListRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.NamedEntityIdentifierListRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void NamedEntityIdentifierListRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.NamedEntityIdentifierListRequest.org) +} + // ------------------------------------------------------------------- // NamedEntityListRequest @@ -4548,6 +4699,59 @@ inline void NamedEntityListRequest::set_allocated_filters(::std::string* filters // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.NamedEntityListRequest.filters) } +// string org = 8; +inline void NamedEntityListRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& NamedEntityListRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.NamedEntityListRequest.org) + return org_.GetNoArena(); +} +inline void NamedEntityListRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.NamedEntityListRequest.org) +} +#if LANG_CXX11 +inline void NamedEntityListRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.NamedEntityListRequest.org) +} +#endif +inline void NamedEntityListRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.NamedEntityListRequest.org) +} +inline void NamedEntityListRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.NamedEntityListRequest.org) +} +inline ::std::string* NamedEntityListRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.NamedEntityListRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* NamedEntityListRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.NamedEntityListRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void NamedEntityListRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.NamedEntityListRequest.org) +} + // ------------------------------------------------------------------- // NamedEntityIdentifierList diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.cc index 6165cc0849..f090a2d7f4 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.cc @@ -33,7 +33,7 @@ extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fadmin_2fexecution_2eproto ::google::p extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fadmin_2fexecution_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_Execution_flyteidl_2fadmin_2fexecution_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fadmin_2fexecution_2eproto ::google::protobuf::internal::SCCInfo<5> scc_info_ExecutionMetadata_flyteidl_2fadmin_2fexecution_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fadmin_2fexecution_2eproto ::google::protobuf::internal::SCCInfo<9> scc_info_ExecutionClosure_flyteidl_2fadmin_2fexecution_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fexecution_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_ExecutionError_flyteidl_2fcore_2fexecution_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fexecution_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_QualityOfService_flyteidl_2fcore_2fexecution_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto; @@ -561,6 +561,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fexecution_2eprot PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ExecutionCreateRequest, name_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ExecutionCreateRequest, spec_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ExecutionCreateRequest, inputs_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ExecutionCreateRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ExecutionRelaunchRequest, _internal_metadata_), ~0u, // no _extensions_ @@ -751,28 +752,28 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fexecution_2eprot }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::ExecutionCreateRequest)}, - { 10, -1, sizeof(::flyteidl::admin::ExecutionRelaunchRequest)}, - { 18, -1, sizeof(::flyteidl::admin::ExecutionRecoverRequest)}, - { 26, -1, sizeof(::flyteidl::admin::ExecutionCreateResponse)}, - { 32, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetRequest)}, - { 38, -1, sizeof(::flyteidl::admin::Execution)}, - { 46, -1, sizeof(::flyteidl::admin::ExecutionList)}, - { 53, -1, sizeof(::flyteidl::admin::LiteralMapBlob)}, - { 61, -1, sizeof(::flyteidl::admin::AbortMetadata)}, - { 68, -1, sizeof(::flyteidl::admin::ExecutionClosure)}, - { 88, -1, sizeof(::flyteidl::admin::SystemMetadata)}, - { 95, -1, sizeof(::flyteidl::admin::ExecutionMetadata)}, - { 108, -1, sizeof(::flyteidl::admin::NotificationList)}, - { 114, -1, sizeof(::flyteidl::admin::ExecutionSpec)}, - { 137, -1, sizeof(::flyteidl::admin::ExecutionTerminateRequest)}, - { 144, -1, sizeof(::flyteidl::admin::ExecutionTerminateResponse)}, - { 149, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetDataRequest)}, - { 155, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetDataResponse)}, - { 164, -1, sizeof(::flyteidl::admin::ExecutionUpdateRequest)}, - { 171, -1, sizeof(::flyteidl::admin::ExecutionStateChangeDetails)}, - { 179, -1, sizeof(::flyteidl::admin::ExecutionUpdateResponse)}, - { 184, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetMetricsRequest)}, - { 191, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetMetricsResponse)}, + { 11, -1, sizeof(::flyteidl::admin::ExecutionRelaunchRequest)}, + { 19, -1, sizeof(::flyteidl::admin::ExecutionRecoverRequest)}, + { 27, -1, sizeof(::flyteidl::admin::ExecutionCreateResponse)}, + { 33, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetRequest)}, + { 39, -1, sizeof(::flyteidl::admin::Execution)}, + { 47, -1, sizeof(::flyteidl::admin::ExecutionList)}, + { 54, -1, sizeof(::flyteidl::admin::LiteralMapBlob)}, + { 62, -1, sizeof(::flyteidl::admin::AbortMetadata)}, + { 69, -1, sizeof(::flyteidl::admin::ExecutionClosure)}, + { 89, -1, sizeof(::flyteidl::admin::SystemMetadata)}, + { 96, -1, sizeof(::flyteidl::admin::ExecutionMetadata)}, + { 109, -1, sizeof(::flyteidl::admin::NotificationList)}, + { 115, -1, sizeof(::flyteidl::admin::ExecutionSpec)}, + { 138, -1, sizeof(::flyteidl::admin::ExecutionTerminateRequest)}, + { 145, -1, sizeof(::flyteidl::admin::ExecutionTerminateResponse)}, + { 150, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetDataRequest)}, + { 156, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetDataResponse)}, + { 165, -1, sizeof(::flyteidl::admin::ExecutionUpdateRequest)}, + { 172, -1, sizeof(::flyteidl::admin::ExecutionStateChangeDetails)}, + { 180, -1, sizeof(::flyteidl::admin::ExecutionUpdateResponse)}, + { 185, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetMetricsRequest)}, + { 192, -1, sizeof(::flyteidl::admin::WorkflowExecutionGetMetricsResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -818,119 +819,119 @@ const char descriptor_table_protodef_flyteidl_2fadmin_2fexecution_2eproto[] = "idl/core/security.proto\032\036google/protobuf" "/duration.proto\032\037google/protobuf/timesta" "mp.proto\032\036google/protobuf/wrappers.proto" - "\"\237\001\n\026ExecutionCreateRequest\022\017\n\007project\030\001" + "\"\254\001\n\026ExecutionCreateRequest\022\017\n\007project\030\001" " \001(\t\022\016\n\006domain\030\002 \001(\t\022\014\n\004name\030\003 \001(\t\022+\n\004sp" "ec\030\004 \001(\0132\035.flyteidl.admin.ExecutionSpec\022" ")\n\006inputs\030\005 \001(\0132\031.flyteidl.core.LiteralM" - "ap\"\177\n\030ExecutionRelaunchRequest\0226\n\002id\030\001 \001" - "(\0132*.flyteidl.core.WorkflowExecutionIden" - "tifier\022\014\n\004name\030\003 \001(\t\022\027\n\017overwrite_cache\030" - "\004 \001(\010J\004\010\002\020\003\"\224\001\n\027ExecutionRecoverRequest\022" + "ap\022\013\n\003org\030\006 \001(\t\"\177\n\030ExecutionRelaunchRequ" + "est\0226\n\002id\030\001 \001(\0132*.flyteidl.core.Workflow" + "ExecutionIdentifier\022\014\n\004name\030\003 \001(\t\022\027\n\017ove" + "rwrite_cache\030\004 \001(\010J\004\010\002\020\003\"\224\001\n\027ExecutionRe" + "coverRequest\0226\n\002id\030\001 \001(\0132*.flyteidl.core" + ".WorkflowExecutionIdentifier\022\014\n\004name\030\002 \001" + "(\t\0223\n\010metadata\030\003 \001(\0132!.flyteidl.admin.Ex" + "ecutionMetadata\"Q\n\027ExecutionCreateRespon" + "se\0226\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowE" + "xecutionIdentifier\"U\n\033WorkflowExecutionG" + "etRequest\0226\n\002id\030\001 \001(\0132*.flyteidl.core.Wo" + "rkflowExecutionIdentifier\"\243\001\n\tExecution\022" "6\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowExec" - "utionIdentifier\022\014\n\004name\030\002 \001(\t\0223\n\010metadat" - "a\030\003 \001(\0132!.flyteidl.admin.ExecutionMetada" - "ta\"Q\n\027ExecutionCreateResponse\0226\n\002id\030\001 \001(" - "\0132*.flyteidl.core.WorkflowExecutionIdent" - "ifier\"U\n\033WorkflowExecutionGetRequest\0226\n\002" - "id\030\001 \001(\0132*.flyteidl.core.WorkflowExecuti" - "onIdentifier\"\243\001\n\tExecution\0226\n\002id\030\001 \001(\0132*" - ".flyteidl.core.WorkflowExecutionIdentifi" - "er\022+\n\004spec\030\002 \001(\0132\035.flyteidl.admin.Execut" - "ionSpec\0221\n\007closure\030\003 \001(\0132 .flyteidl.admi" - "n.ExecutionClosure\"M\n\rExecutionList\022-\n\ne" - "xecutions\030\001 \003(\0132\031.flyteidl.admin.Executi" - "on\022\r\n\005token\030\002 \001(\t\"X\n\016LiteralMapBlob\022/\n\006v" - "alues\030\001 \001(\0132\031.flyteidl.core.LiteralMapB\002" - "\030\001H\000\022\r\n\003uri\030\002 \001(\tH\000B\006\n\004data\"1\n\rAbortMeta" - "data\022\r\n\005cause\030\001 \001(\t\022\021\n\tprincipal\030\002 \001(\t\"\360" - "\005\n\020ExecutionClosure\0225\n\007outputs\030\001 \001(\0132\036.f" - "lyteidl.admin.LiteralMapBlobB\002\030\001H\000\022.\n\005er" - "ror\030\002 \001(\0132\035.flyteidl.core.ExecutionError" - "H\000\022\031\n\013abort_cause\030\n \001(\tB\002\030\001H\000\0227\n\016abort_m" - "etadata\030\014 \001(\0132\035.flyteidl.admin.AbortMeta" - "dataH\000\0224\n\013output_data\030\r \001(\0132\031.flyteidl.c" - "ore.LiteralMapB\002\030\001H\000\0226\n\017computed_inputs\030" - "\003 \001(\0132\031.flyteidl.core.LiteralMapB\002\030\001\0225\n\005" - "phase\030\004 \001(\0162&.flyteidl.core.WorkflowExec" - "ution.Phase\022.\n\nstarted_at\030\005 \001(\0132\032.google" - ".protobuf.Timestamp\022+\n\010duration\030\006 \001(\0132\031." - "google.protobuf.Duration\022.\n\ncreated_at\030\007" - " \001(\0132\032.google.protobuf.Timestamp\022.\n\nupda" - "ted_at\030\010 \001(\0132\032.google.protobuf.Timestamp" - "\0223\n\rnotifications\030\t \003(\0132\034.flyteidl.admin" - ".Notification\022.\n\013workflow_id\030\013 \001(\0132\031.fly" - "teidl.core.Identifier\022I\n\024state_change_de" - "tails\030\016 \001(\0132+.flyteidl.admin.ExecutionSt" - "ateChangeDetailsB\017\n\routput_result\">\n\016Sys" - "temMetadata\022\031\n\021execution_cluster\030\001 \001(\t\022\021" - "\n\tnamespace\030\002 \001(\t\"\213\004\n\021ExecutionMetadata\022" - "=\n\004mode\030\001 \001(\0162/.flyteidl.admin.Execution" - "Metadata.ExecutionMode\022\021\n\tprincipal\030\002 \001(" - "\t\022\017\n\007nesting\030\003 \001(\r\0220\n\014scheduled_at\030\004 \001(\013" - "2\032.google.protobuf.Timestamp\022E\n\025parent_n" - "ode_execution\030\005 \001(\0132&.flyteidl.core.Node" - "ExecutionIdentifier\022G\n\023reference_executi" - "on\030\020 \001(\0132*.flyteidl.core.WorkflowExecuti" - "onIdentifier\0227\n\017system_metadata\030\021 \001(\0132\036." - "flyteidl.admin.SystemMetadata\022/\n\014artifac" - "t_ids\030\022 \003(\0132\031.flyteidl.core.ArtifactID\"g" - "\n\rExecutionMode\022\n\n\006MANUAL\020\000\022\r\n\tSCHEDULED" - "\020\001\022\n\n\006SYSTEM\020\002\022\014\n\010RELAUNCH\020\003\022\022\n\016CHILD_WO" - "RKFLOW\020\004\022\r\n\tRECOVERED\020\005\"G\n\020NotificationL" - "ist\0223\n\rnotifications\030\001 \003(\0132\034.flyteidl.ad" - "min.Notification\"\262\006\n\rExecutionSpec\022.\n\013la" - "unch_plan\030\001 \001(\0132\031.flyteidl.core.Identifi" - "er\022-\n\006inputs\030\002 \001(\0132\031.flyteidl.core.Liter" - "alMapB\002\030\001\0223\n\010metadata\030\003 \001(\0132!.flyteidl.a" - "dmin.ExecutionMetadata\0229\n\rnotifications\030" - "\005 \001(\0132 .flyteidl.admin.NotificationListH" - "\000\022\025\n\013disable_all\030\006 \001(\010H\000\022&\n\006labels\030\007 \001(\013" - "2\026.flyteidl.admin.Labels\0220\n\013annotations\030" - "\010 \001(\0132\033.flyteidl.admin.Annotations\0228\n\020se" - "curity_context\030\n \001(\0132\036.flyteidl.core.Sec" - "urityContext\022/\n\tauth_role\030\020 \001(\0132\030.flytei" - "dl.admin.AuthRoleB\002\030\001\022;\n\022quality_of_serv" - "ice\030\021 \001(\0132\037.flyteidl.core.QualityOfServi" - "ce\022\027\n\017max_parallelism\030\022 \001(\005\022C\n\026raw_outpu" - "t_data_config\030\023 \001(\0132#.flyteidl.admin.Raw" - "OutputDataConfig\022=\n\022cluster_assignment\030\024" - " \001(\0132!.flyteidl.admin.ClusterAssignment\022" - "1\n\rinterruptible\030\025 \001(\0132\032.google.protobuf" - ".BoolValue\022\027\n\017overwrite_cache\030\026 \001(\010\022\"\n\004e" - "nvs\030\027 \001(\0132\024.flyteidl.admin.Envs\022\014\n\004tags\030" - "\030 \003(\tB\030\n\026notification_overridesJ\004\010\004\020\005\"b\n" - "\031ExecutionTerminateRequest\0226\n\002id\030\001 \001(\0132*" - ".flyteidl.core.WorkflowExecutionIdentifi" - "er\022\r\n\005cause\030\002 \001(\t\"\034\n\032ExecutionTerminateR" - "esponse\"Y\n\037WorkflowExecutionGetDataReque" + "utionIdentifier\022+\n\004spec\030\002 \001(\0132\035.flyteidl" + ".admin.ExecutionSpec\0221\n\007closure\030\003 \001(\0132 ." + "flyteidl.admin.ExecutionClosure\"M\n\rExecu" + "tionList\022-\n\nexecutions\030\001 \003(\0132\031.flyteidl." + "admin.Execution\022\r\n\005token\030\002 \001(\t\"X\n\016Litera" + "lMapBlob\022/\n\006values\030\001 \001(\0132\031.flyteidl.core" + ".LiteralMapB\002\030\001H\000\022\r\n\003uri\030\002 \001(\tH\000B\006\n\004data" + "\"1\n\rAbortMetadata\022\r\n\005cause\030\001 \001(\t\022\021\n\tprin" + "cipal\030\002 \001(\t\"\360\005\n\020ExecutionClosure\0225\n\007outp" + "uts\030\001 \001(\0132\036.flyteidl.admin.LiteralMapBlo" + "bB\002\030\001H\000\022.\n\005error\030\002 \001(\0132\035.flyteidl.core.E" + "xecutionErrorH\000\022\031\n\013abort_cause\030\n \001(\tB\002\030\001" + "H\000\0227\n\016abort_metadata\030\014 \001(\0132\035.flyteidl.ad" + "min.AbortMetadataH\000\0224\n\013output_data\030\r \001(\013" + "2\031.flyteidl.core.LiteralMapB\002\030\001H\000\0226\n\017com" + "puted_inputs\030\003 \001(\0132\031.flyteidl.core.Liter" + "alMapB\002\030\001\0225\n\005phase\030\004 \001(\0162&.flyteidl.core" + ".WorkflowExecution.Phase\022.\n\nstarted_at\030\005" + " \001(\0132\032.google.protobuf.Timestamp\022+\n\010dura" + "tion\030\006 \001(\0132\031.google.protobuf.Duration\022.\n" + "\ncreated_at\030\007 \001(\0132\032.google.protobuf.Time" + "stamp\022.\n\nupdated_at\030\010 \001(\0132\032.google.proto" + "buf.Timestamp\0223\n\rnotifications\030\t \003(\0132\034.f" + "lyteidl.admin.Notification\022.\n\013workflow_i" + "d\030\013 \001(\0132\031.flyteidl.core.Identifier\022I\n\024st" + "ate_change_details\030\016 \001(\0132+.flyteidl.admi" + "n.ExecutionStateChangeDetailsB\017\n\routput_" + "result\">\n\016SystemMetadata\022\031\n\021execution_cl" + "uster\030\001 \001(\t\022\021\n\tnamespace\030\002 \001(\t\"\213\004\n\021Execu" + "tionMetadata\022=\n\004mode\030\001 \001(\0162/.flyteidl.ad" + "min.ExecutionMetadata.ExecutionMode\022\021\n\tp" + "rincipal\030\002 \001(\t\022\017\n\007nesting\030\003 \001(\r\0220\n\014sched" + "uled_at\030\004 \001(\0132\032.google.protobuf.Timestam" + "p\022E\n\025parent_node_execution\030\005 \001(\0132&.flyte" + "idl.core.NodeExecutionIdentifier\022G\n\023refe" + "rence_execution\030\020 \001(\0132*.flyteidl.core.Wo" + "rkflowExecutionIdentifier\0227\n\017system_meta" + "data\030\021 \001(\0132\036.flyteidl.admin.SystemMetada" + "ta\022/\n\014artifact_ids\030\022 \003(\0132\031.flyteidl.core" + ".ArtifactID\"g\n\rExecutionMode\022\n\n\006MANUAL\020\000" + "\022\r\n\tSCHEDULED\020\001\022\n\n\006SYSTEM\020\002\022\014\n\010RELAUNCH\020" + "\003\022\022\n\016CHILD_WORKFLOW\020\004\022\r\n\tRECOVERED\020\005\"G\n\020" + "NotificationList\0223\n\rnotifications\030\001 \003(\0132" + "\034.flyteidl.admin.Notification\"\262\006\n\rExecut" + "ionSpec\022.\n\013launch_plan\030\001 \001(\0132\031.flyteidl." + "core.Identifier\022-\n\006inputs\030\002 \001(\0132\031.flytei" + "dl.core.LiteralMapB\002\030\001\0223\n\010metadata\030\003 \001(\013" + "2!.flyteidl.admin.ExecutionMetadata\0229\n\rn" + "otifications\030\005 \001(\0132 .flyteidl.admin.Noti" + "ficationListH\000\022\025\n\013disable_all\030\006 \001(\010H\000\022&\n" + "\006labels\030\007 \001(\0132\026.flyteidl.admin.Labels\0220\n" + "\013annotations\030\010 \001(\0132\033.flyteidl.admin.Anno" + "tations\0228\n\020security_context\030\n \001(\0132\036.flyt" + "eidl.core.SecurityContext\022/\n\tauth_role\030\020" + " \001(\0132\030.flyteidl.admin.AuthRoleB\002\030\001\022;\n\022qu" + "ality_of_service\030\021 \001(\0132\037.flyteidl.core.Q" + "ualityOfService\022\027\n\017max_parallelism\030\022 \001(\005" + "\022C\n\026raw_output_data_config\030\023 \001(\0132#.flyte" + "idl.admin.RawOutputDataConfig\022=\n\022cluster" + "_assignment\030\024 \001(\0132!.flyteidl.admin.Clust" + "erAssignment\0221\n\rinterruptible\030\025 \001(\0132\032.go" + "ogle.protobuf.BoolValue\022\027\n\017overwrite_cac" + "he\030\026 \001(\010\022\"\n\004envs\030\027 \001(\0132\024.flyteidl.admin." + "Envs\022\014\n\004tags\030\030 \003(\tB\030\n\026notification_overr" + "idesJ\004\010\004\020\005\"b\n\031ExecutionTerminateRequest\022" + "6\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowExec" + "utionIdentifier\022\r\n\005cause\030\002 \001(\t\"\034\n\032Execut" + "ionTerminateResponse\"Y\n\037WorkflowExecutio" + "nGetDataRequest\0226\n\002id\030\001 \001(\0132*.flyteidl.c" + "ore.WorkflowExecutionIdentifier\"\336\001\n Work" + "flowExecutionGetDataResponse\022,\n\007outputs\030" + "\001 \001(\0132\027.flyteidl.admin.UrlBlobB\002\030\001\022+\n\006in" + "puts\030\002 \001(\0132\027.flyteidl.admin.UrlBlobB\002\030\001\022" + ".\n\013full_inputs\030\003 \001(\0132\031.flyteidl.core.Lit" + "eralMap\022/\n\014full_outputs\030\004 \001(\0132\031.flyteidl" + ".core.LiteralMap\"\177\n\026ExecutionUpdateReque" "st\0226\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowE" - "xecutionIdentifier\"\336\001\n WorkflowExecution" - "GetDataResponse\022,\n\007outputs\030\001 \001(\0132\027.flyte" - "idl.admin.UrlBlobB\002\030\001\022+\n\006inputs\030\002 \001(\0132\027." - "flyteidl.admin.UrlBlobB\002\030\001\022.\n\013full_input" - "s\030\003 \001(\0132\031.flyteidl.core.LiteralMap\022/\n\014fu" - "ll_outputs\030\004 \001(\0132\031.flyteidl.core.Literal" - "Map\"\177\n\026ExecutionUpdateRequest\0226\n\002id\030\001 \001(" - "\0132*.flyteidl.core.WorkflowExecutionIdent" - "ifier\022-\n\005state\030\002 \001(\0162\036.flyteidl.admin.Ex" - "ecutionState\"\220\001\n\033ExecutionStateChangeDet" - "ails\022-\n\005state\030\001 \001(\0162\036.flyteidl.admin.Exe" - "cutionState\022/\n\013occurred_at\030\002 \001(\0132\032.googl" - "e.protobuf.Timestamp\022\021\n\tprincipal\030\003 \001(\t\"" - "\031\n\027ExecutionUpdateResponse\"k\n\"WorkflowEx" - "ecutionGetMetricsRequest\0226\n\002id\030\001 \001(\0132*.f" - "lyteidl.core.WorkflowExecutionIdentifier" - "\022\r\n\005depth\030\002 \001(\005\"H\n#WorkflowExecutionGetM" - "etricsResponse\022!\n\004span\030\001 \001(\0132\023.flyteidl." - "core.Span*>\n\016ExecutionState\022\024\n\020EXECUTION" - "_ACTIVE\020\000\022\026\n\022EXECUTION_ARCHIVED\020\001B=Z;git" - "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" - "o/flyteidl/adminb\006proto3" + "xecutionIdentifier\022-\n\005state\030\002 \001(\0162\036.flyt" + "eidl.admin.ExecutionState\"\220\001\n\033ExecutionS" + "tateChangeDetails\022-\n\005state\030\001 \001(\0162\036.flyte" + "idl.admin.ExecutionState\022/\n\013occurred_at\030" + "\002 \001(\0132\032.google.protobuf.Timestamp\022\021\n\tpri" + "ncipal\030\003 \001(\t\"\031\n\027ExecutionUpdateResponse\"" + "k\n\"WorkflowExecutionGetMetricsRequest\0226\n" + "\002id\030\001 \001(\0132*.flyteidl.core.WorkflowExecut" + "ionIdentifier\022\r\n\005depth\030\002 \001(\005\"H\n#Workflow" + "ExecutionGetMetricsResponse\022!\n\004span\030\001 \001(" + "\0132\023.flyteidl.core.Span*>\n\016ExecutionState" + "\022\024\n\020EXECUTION_ACTIVE\020\000\022\026\n\022EXECUTION_ARCH" + "IVED\020\001B=Z;github.com/flyteorg/flyte/flyt" + "eidl/gen/pb-go/flyteidl/adminb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fexecution_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fexecution_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fexecution_2eproto, - "flyteidl/admin/execution.proto", &assign_descriptors_table_flyteidl_2fadmin_2fexecution_2eproto, 4704, + "flyteidl/admin/execution.proto", &assign_descriptors_table_flyteidl_2fadmin_2fexecution_2eproto, 4717, }; void AddDescriptors_flyteidl_2fadmin_2fexecution_2eproto() { @@ -1033,6 +1034,7 @@ const int ExecutionCreateRequest::kDomainFieldNumber; const int ExecutionCreateRequest::kNameFieldNumber; const int ExecutionCreateRequest::kSpecFieldNumber; const int ExecutionCreateRequest::kInputsFieldNumber; +const int ExecutionCreateRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ExecutionCreateRequest::ExecutionCreateRequest() @@ -1056,6 +1058,10 @@ ExecutionCreateRequest::ExecutionCreateRequest(const ExecutionCreateRequest& fro if (from.name().size() > 0) { name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_spec()) { spec_ = new ::flyteidl::admin::ExecutionSpec(*from.spec_); } else { @@ -1075,6 +1081,7 @@ void ExecutionCreateRequest::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&spec_, 0, static_cast( reinterpret_cast(&inputs_) - reinterpret_cast(&spec_)) + sizeof(inputs_)); @@ -1089,6 +1096,7 @@ void ExecutionCreateRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete spec_; if (this != internal_default_instance()) delete inputs_; } @@ -1111,6 +1119,7 @@ void ExecutionCreateRequest::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && spec_ != nullptr) { delete spec_; } @@ -1209,6 +1218,22 @@ const char* ExecutionCreateRequest::_InternalParse(const char* begin, const char {parser_till_end, object}, ptr - size, ptr)); break; } + // string org = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ExecutionCreateRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1310,6 +1335,21 @@ bool ExecutionCreateRequest::MergePartialFromCodedStream( break; } + // string org = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ExecutionCreateRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1379,6 +1419,16 @@ void ExecutionCreateRequest::SerializeWithCachedSizes( 5, HasBitSetters::inputs(this), output); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ExecutionCreateRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 6, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1439,6 +1489,17 @@ ::google::protobuf::uint8* ExecutionCreateRequest::InternalSerializeWithCachedSi 5, HasBitSetters::inputs(this), target); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ExecutionCreateRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 6, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1481,6 +1542,13 @@ size_t ExecutionCreateRequest::ByteSizeLong() const { this->name()); } + // string org = 6; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.ExecutionSpec spec = 4; if (this->has_spec()) { total_size += 1 + @@ -1534,6 +1602,10 @@ void ExecutionCreateRequest::MergeFrom(const ExecutionCreateRequest& from) { name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_spec()) { mutable_spec()->::flyteidl::admin::ExecutionSpec::MergeFrom(from.spec()); } @@ -1573,6 +1645,8 @@ void ExecutionCreateRequest::InternalSwap(ExecutionCreateRequest* other) { GetArenaNoVirtual()); name_.Swap(&other->name_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(spec_, other->spec_); swap(inputs_, other->inputs_); } diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.h index fbe4b1f4f4..e98933aa24 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/execution.pb.h @@ -348,6 +348,20 @@ class ExecutionCreateRequest final : ::std::string* release_name(); void set_allocated_name(::std::string* name); + // string org = 6; + void clear_org(); + static const int kOrgFieldNumber = 6; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.ExecutionSpec spec = 4; bool has_spec() const; void clear_spec(); @@ -374,6 +388,7 @@ class ExecutionCreateRequest final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr name_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::ExecutionSpec* spec_; ::flyteidl::core::LiteralMap* inputs_; mutable ::google::protobuf::internal::CachedSize _cached_size_; @@ -3853,6 +3868,59 @@ inline void ExecutionCreateRequest::set_allocated_inputs(::flyteidl::core::Liter // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ExecutionCreateRequest.inputs) } +// string org = 6; +inline void ExecutionCreateRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ExecutionCreateRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ExecutionCreateRequest.org) + return org_.GetNoArena(); +} +inline void ExecutionCreateRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ExecutionCreateRequest.org) +} +#if LANG_CXX11 +inline void ExecutionCreateRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ExecutionCreateRequest.org) +} +#endif +inline void ExecutionCreateRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ExecutionCreateRequest.org) +} +inline void ExecutionCreateRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ExecutionCreateRequest.org) +} +inline ::std::string* ExecutionCreateRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ExecutionCreateRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ExecutionCreateRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ExecutionCreateRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ExecutionCreateRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ExecutionCreateRequest.org) +} + // ------------------------------------------------------------------- // ExecutionRelaunchRequest diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.cc index a69ec02fdb..d02760dfd5 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.cc @@ -408,6 +408,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2flaunch_5fplan_2e PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ActiveLaunchPlanListRequest, limit_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ActiveLaunchPlanListRequest, token_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ActiveLaunchPlanListRequest, sort_by_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ActiveLaunchPlanListRequest, org_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::LaunchPlanCreateRequest)}, @@ -501,18 +502,19 @@ const char descriptor_table_protodef_flyteidl_2fadmin_2flaunch_5fplan_2eproto[] "\001(\0162\037.flyteidl.admin.LaunchPlanState\"\032\n\030" "LaunchPlanUpdateResponse\"L\n\027ActiveLaunch" "PlanRequest\0221\n\002id\030\001 \001(\0132%.flyteidl.admin" - ".NamedEntityIdentifier\"\203\001\n\033ActiveLaunchP" + ".NamedEntityIdentifier\"\220\001\n\033ActiveLaunchP" "lanListRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006domai" "n\030\002 \001(\t\022\r\n\005limit\030\003 \001(\r\022\r\n\005token\030\004 \001(\t\022%\n" - "\007sort_by\030\005 \001(\0132\024.flyteidl.admin.Sort*+\n\017" - "LaunchPlanState\022\014\n\010INACTIVE\020\000\022\n\n\006ACTIVE\020" - "\001B=Z;github.com/flyteorg/flyte/flyteidl/" - "gen/pb-go/flyteidl/adminb\006proto3" + "\007sort_by\030\005 \001(\0132\024.flyteidl.admin.Sort\022\013\n\003" + "org\030\006 \001(\t*+\n\017LaunchPlanState\022\014\n\010INACTIVE" + "\020\000\022\n\n\006ACTIVE\020\001B=Z;github.com/flyteorg/fl" + "yte/flyteidl/gen/pb-go/flyteidl/adminb\006p" + "roto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2flaunch_5fplan_2eproto = { false, InitDefaults_flyteidl_2fadmin_2flaunch_5fplan_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2flaunch_5fplan_2eproto, - "flyteidl/admin/launch_plan.proto", &assign_descriptors_table_flyteidl_2fadmin_2flaunch_5fplan_2eproto, 2472, + "flyteidl/admin/launch_plan.proto", &assign_descriptors_table_flyteidl_2fadmin_2flaunch_5fplan_2eproto, 2485, }; void AddDescriptors_flyteidl_2fadmin_2flaunch_5fplan_2eproto() { @@ -5446,6 +5448,7 @@ const int ActiveLaunchPlanListRequest::kDomainFieldNumber; const int ActiveLaunchPlanListRequest::kLimitFieldNumber; const int ActiveLaunchPlanListRequest::kTokenFieldNumber; const int ActiveLaunchPlanListRequest::kSortByFieldNumber; +const int ActiveLaunchPlanListRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ActiveLaunchPlanListRequest::ActiveLaunchPlanListRequest() @@ -5469,6 +5472,10 @@ ActiveLaunchPlanListRequest::ActiveLaunchPlanListRequest(const ActiveLaunchPlanL if (from.token().size() > 0) { token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { sort_by_ = new ::flyteidl::admin::Sort(*from.sort_by_); } else { @@ -5484,6 +5491,7 @@ void ActiveLaunchPlanListRequest::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&sort_by_, 0, static_cast( reinterpret_cast(&limit_) - reinterpret_cast(&sort_by_)) + sizeof(limit_)); @@ -5498,6 +5506,7 @@ void ActiveLaunchPlanListRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete sort_by_; } @@ -5519,6 +5528,7 @@ void ActiveLaunchPlanListRequest::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && sort_by_ != nullptr) { delete sort_by_; } @@ -5608,6 +5618,22 @@ const char* ActiveLaunchPlanListRequest::_InternalParse(const char* begin, const {parser_till_end, object}, ptr - size, ptr)); break; } + // string org = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ActiveLaunchPlanListRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -5711,6 +5737,21 @@ bool ActiveLaunchPlanListRequest::MergePartialFromCodedStream( break; } + // string org = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ActiveLaunchPlanListRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -5779,6 +5820,16 @@ void ActiveLaunchPlanListRequest::SerializeWithCachedSizes( 5, HasBitSetters::sort_by(this), output); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ActiveLaunchPlanListRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 6, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -5837,6 +5888,17 @@ ::google::protobuf::uint8* ActiveLaunchPlanListRequest::InternalSerializeWithCac 5, HasBitSetters::sort_by(this), target); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ActiveLaunchPlanListRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 6, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -5879,6 +5941,13 @@ size_t ActiveLaunchPlanListRequest::ByteSizeLong() const { this->token()); } + // string org = 6; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.Sort sort_by = 5; if (this->has_sort_by()) { total_size += 1 + @@ -5932,6 +6001,10 @@ void ActiveLaunchPlanListRequest::MergeFrom(const ActiveLaunchPlanListRequest& f token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { mutable_sort_by()->::flyteidl::admin::Sort::MergeFrom(from.sort_by()); } @@ -5971,6 +6044,8 @@ void ActiveLaunchPlanListRequest::InternalSwap(ActiveLaunchPlanListRequest* othe GetArenaNoVirtual()); token_.Swap(&other->token_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(sort_by_, other->sort_by_); swap(limit_, other->limit_); } diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.h index 27e0deca98..8334ed7e4a 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/launch_plan.pb.h @@ -1807,6 +1807,20 @@ class ActiveLaunchPlanListRequest final : ::std::string* release_token(); void set_allocated_token(::std::string* token); + // string org = 6; + void clear_org(); + static const int kOrgFieldNumber = 6; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.Sort sort_by = 5; bool has_sort_by() const; void clear_sort_by(); @@ -1830,6 +1844,7 @@ class ActiveLaunchPlanListRequest final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr token_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::Sort* sort_by_; ::google::protobuf::uint32 limit_; mutable ::google::protobuf::internal::CachedSize _cached_size_; @@ -3636,6 +3651,59 @@ inline void ActiveLaunchPlanListRequest::set_allocated_sort_by(::flyteidl::admin // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ActiveLaunchPlanListRequest.sort_by) } +// string org = 6; +inline void ActiveLaunchPlanListRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ActiveLaunchPlanListRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ActiveLaunchPlanListRequest.org) + return org_.GetNoArena(); +} +inline void ActiveLaunchPlanListRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ActiveLaunchPlanListRequest.org) +} +#if LANG_CXX11 +inline void ActiveLaunchPlanListRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ActiveLaunchPlanListRequest.org) +} +#endif +inline void ActiveLaunchPlanListRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ActiveLaunchPlanListRequest.org) +} +inline void ActiveLaunchPlanListRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ActiveLaunchPlanListRequest.org) +} +inline ::std::string* ActiveLaunchPlanListRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ActiveLaunchPlanListRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ActiveLaunchPlanListRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ActiveLaunchPlanListRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ActiveLaunchPlanListRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ActiveLaunchPlanListRequest.org) +} + #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.cc index b38613ca8a..8bd580b5d2 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.cc @@ -415,12 +415,14 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fmatchable_5freso PROTOBUF_FIELD_OFFSET(::flyteidl::admin::MatchableAttributesConfiguration, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::MatchableAttributesConfiguration, workflow_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::MatchableAttributesConfiguration, launch_plan_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::MatchableAttributesConfiguration, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ListMatchableAttributesRequest, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ListMatchableAttributesRequest, resource_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ListMatchableAttributesRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ListMatchableAttributesResponse, _internal_metadata_), ~0u, // no _extensions_ @@ -440,8 +442,8 @@ static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SE { 58, -1, sizeof(::flyteidl::admin::WorkflowExecutionConfig)}, { 71, -1, sizeof(::flyteidl::admin::MatchingAttributes)}, { 85, -1, sizeof(::flyteidl::admin::MatchableAttributesConfiguration)}, - { 95, -1, sizeof(::flyteidl::admin::ListMatchableAttributesRequest)}, - { 101, -1, sizeof(::flyteidl::admin::ListMatchableAttributesResponse)}, + { 96, -1, sizeof(::flyteidl::admin::ListMatchableAttributesRequest)}, + { 103, -1, sizeof(::flyteidl::admin::ListMatchableAttributesResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -515,29 +517,30 @@ const char descriptor_table_protodef_flyteidl_2fadmin_2fmatchable_5fresource_2ep "sH\000\022L\n\031workflow_execution_config\030\007 \001(\0132\'" ".flyteidl.admin.WorkflowExecutionConfigH" "\000\022\?\n\022cluster_assignment\030\010 \001(\0132!.flyteidl" - ".admin.ClusterAssignmentH\000B\010\n\006target\"\242\001\n" + ".admin.ClusterAssignmentH\000B\010\n\006target\"\257\001\n" " MatchableAttributesConfiguration\0226\n\natt" "ributes\030\001 \001(\0132\".flyteidl.admin.MatchingA" "ttributes\022\016\n\006domain\030\002 \001(\t\022\017\n\007project\030\003 \001" "(\t\022\020\n\010workflow\030\004 \001(\t\022\023\n\013launch_plan\030\005 \001(" - "\t\"Z\n\036ListMatchableAttributesRequest\0228\n\rr" - "esource_type\030\001 \001(\0162!.flyteidl.admin.Matc" - "hableResource\"k\n\037ListMatchableAttributes" - "Response\022H\n\016configurations\030\001 \003(\01320.flyte" - "idl.admin.MatchableAttributesConfigurati" - "on*\340\001\n\021MatchableResource\022\021\n\rTASK_RESOURC" - "E\020\000\022\024\n\020CLUSTER_RESOURCE\020\001\022\023\n\017EXECUTION_Q" - "UEUE\020\002\022\033\n\027EXECUTION_CLUSTER_LABEL\020\003\022$\n Q" - "UALITY_OF_SERVICE_SPECIFICATION\020\004\022\023\n\017PLU" - "GIN_OVERRIDE\020\005\022\035\n\031WORKFLOW_EXECUTION_CON" - "FIG\020\006\022\026\n\022CLUSTER_ASSIGNMENT\020\007B=Z;github." - "com/flyteorg/flyte/flyteidl/gen/pb-go/fl" - "yteidl/adminb\006proto3" + "\t\022\013\n\003org\030\006 \001(\t\"g\n\036ListMatchableAttribute" + "sRequest\0228\n\rresource_type\030\001 \001(\0162!.flytei" + "dl.admin.MatchableResource\022\013\n\003org\030\002 \001(\t\"" + "k\n\037ListMatchableAttributesResponse\022H\n\016co" + "nfigurations\030\001 \003(\01320.flyteidl.admin.Matc" + "hableAttributesConfiguration*\340\001\n\021Matchab" + "leResource\022\021\n\rTASK_RESOURCE\020\000\022\024\n\020CLUSTER" + "_RESOURCE\020\001\022\023\n\017EXECUTION_QUEUE\020\002\022\033\n\027EXEC" + "UTION_CLUSTER_LABEL\020\003\022$\n QUALITY_OF_SERV" + "ICE_SPECIFICATION\020\004\022\023\n\017PLUGIN_OVERRIDE\020\005" + "\022\035\n\031WORKFLOW_EXECUTION_CONFIG\020\006\022\026\n\022CLUST" + "ER_ASSIGNMENT\020\007B=Z;github.com/flyteorg/f" + "lyte/flyteidl/gen/pb-go/flyteidl/adminb\006" + "proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fmatchable_5fresource_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fmatchable_5fresource_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fmatchable_5fresource_2eproto, - "flyteidl/admin/matchable_resource.proto", &assign_descriptors_table_flyteidl_2fadmin_2fmatchable_5fresource_2eproto, 2620, + "flyteidl/admin/matchable_resource.proto", &assign_descriptors_table_flyteidl_2fadmin_2fmatchable_5fresource_2eproto, 2646, }; void AddDescriptors_flyteidl_2fadmin_2fmatchable_5fresource_2eproto() { @@ -4901,6 +4904,7 @@ const int MatchableAttributesConfiguration::kDomainFieldNumber; const int MatchableAttributesConfiguration::kProjectFieldNumber; const int MatchableAttributesConfiguration::kWorkflowFieldNumber; const int MatchableAttributesConfiguration::kLaunchPlanFieldNumber; +const int MatchableAttributesConfiguration::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 MatchableAttributesConfiguration::MatchableAttributesConfiguration() @@ -4928,6 +4932,10 @@ MatchableAttributesConfiguration::MatchableAttributesConfiguration(const Matchab if (from.launch_plan().size() > 0) { launch_plan_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.launch_plan_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_attributes()) { attributes_ = new ::flyteidl::admin::MatchingAttributes(*from.attributes_); } else { @@ -4943,6 +4951,7 @@ void MatchableAttributesConfiguration::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); launch_plan_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); attributes_ = nullptr; } @@ -4956,6 +4965,7 @@ void MatchableAttributesConfiguration::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); launch_plan_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete attributes_; } @@ -4978,6 +4988,7 @@ void MatchableAttributesConfiguration::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); launch_plan_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && attributes_ != nullptr) { delete attributes_; } @@ -5075,6 +5086,22 @@ const char* MatchableAttributesConfiguration::_InternalParse(const char* begin, ptr += size; break; } + // string org = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.MatchableAttributesConfiguration.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -5180,6 +5207,21 @@ bool MatchableAttributesConfiguration::MergePartialFromCodedStream( break; } + // string org = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.MatchableAttributesConfiguration.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -5253,6 +5295,16 @@ void MatchableAttributesConfiguration::SerializeWithCachedSizes( 5, this->launch_plan(), output); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.MatchableAttributesConfiguration.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 6, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -5317,6 +5369,17 @@ ::google::protobuf::uint8* MatchableAttributesConfiguration::InternalSerializeWi 5, this->launch_plan(), target); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.MatchableAttributesConfiguration.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 6, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -5366,6 +5429,13 @@ size_t MatchableAttributesConfiguration::ByteSizeLong() const { this->launch_plan()); } + // string org = 6; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchingAttributes attributes = 1; if (this->has_attributes()) { total_size += 1 + @@ -5416,6 +5486,10 @@ void MatchableAttributesConfiguration::MergeFrom(const MatchableAttributesConfig launch_plan_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.launch_plan_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_attributes()) { mutable_attributes()->::flyteidl::admin::MatchingAttributes::MergeFrom(from.attributes()); } @@ -5454,6 +5528,8 @@ void MatchableAttributesConfiguration::InternalSwap(MatchableAttributesConfigura GetArenaNoVirtual()); launch_plan_.Swap(&other->launch_plan_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(attributes_, other->attributes_); } @@ -5473,6 +5549,7 @@ class ListMatchableAttributesRequest::HasBitSetters { #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int ListMatchableAttributesRequest::kResourceTypeFieldNumber; +const int ListMatchableAttributesRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ListMatchableAttributesRequest::ListMatchableAttributesRequest() @@ -5484,11 +5561,18 @@ ListMatchableAttributesRequest::ListMatchableAttributesRequest(const ListMatchab : ::google::protobuf::Message(), _internal_metadata_(nullptr) { _internal_metadata_.MergeFrom(from._internal_metadata_); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.admin.ListMatchableAttributesRequest) } void ListMatchableAttributesRequest::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_ListMatchableAttributesRequest_flyteidl_2fadmin_2fmatchable_5fresource_2eproto.base); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -5498,6 +5582,7 @@ ListMatchableAttributesRequest::~ListMatchableAttributesRequest() { } void ListMatchableAttributesRequest::SharedDtor() { + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void ListMatchableAttributesRequest::SetCachedSize(int size) const { @@ -5515,6 +5600,7 @@ void ListMatchableAttributesRequest::Clear() { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -5540,6 +5626,22 @@ const char* ListMatchableAttributesRequest::_InternalParse(const char* begin, co GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ListMatchableAttributesRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -5555,6 +5657,13 @@ const char* ListMatchableAttributesRequest::_InternalParse(const char* begin, co } // switch } // while return ptr; +string_till_end: + static_cast<::std::string*>(object)->clear(); + static_cast<::std::string*>(object)->reserve(size); + goto len_delim_till_end; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); } #else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER bool ListMatchableAttributesRequest::MergePartialFromCodedStream( @@ -5581,6 +5690,21 @@ bool ListMatchableAttributesRequest::MergePartialFromCodedStream( break; } + // string org = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ListMatchableAttributesRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -5614,6 +5738,16 @@ void ListMatchableAttributesRequest::SerializeWithCachedSizes( 1, this->resource_type(), output); } + // string org = 2; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ListMatchableAttributesRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 2, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -5633,6 +5767,17 @@ ::google::protobuf::uint8* ListMatchableAttributesRequest::InternalSerializeWith 1, this->resource_type(), target); } + // string org = 2; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ListMatchableAttributesRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 2, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -5654,6 +5799,13 @@ size_t ListMatchableAttributesRequest::ByteSizeLong() const { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; + // string org = 2; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchableResource resource_type = 1; if (this->resource_type() != 0) { total_size += 1 + @@ -5687,6 +5839,10 @@ void ListMatchableAttributesRequest::MergeFrom(const ListMatchableAttributesRequ ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -5717,6 +5873,8 @@ void ListMatchableAttributesRequest::Swap(ListMatchableAttributesRequest* other) void ListMatchableAttributesRequest::InternalSwap(ListMatchableAttributesRequest* other) { using std::swap; _internal_metadata_.Swap(&other->_internal_metadata_); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.h index 12ea29fdf1..7fb9357061 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/matchable_resource.pb.h @@ -1707,6 +1707,20 @@ class MatchableAttributesConfiguration final : ::std::string* release_launch_plan(); void set_allocated_launch_plan(::std::string* launch_plan); + // string org = 6; + void clear_org(); + static const int kOrgFieldNumber = 6; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchingAttributes attributes = 1; bool has_attributes() const; void clear_attributes(); @@ -1725,6 +1739,7 @@ class MatchableAttributesConfiguration final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr workflow_; ::google::protobuf::internal::ArenaStringPtr launch_plan_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::MatchingAttributes* attributes_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fmatchable_5fresource_2eproto; @@ -1826,6 +1841,20 @@ class ListMatchableAttributesRequest final : // accessors ------------------------------------------------------- + // string org = 2; + void clear_org(); + static const int kOrgFieldNumber = 2; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchableResource resource_type = 1; void clear_resource_type(); static const int kResourceTypeFieldNumber = 1; @@ -1837,6 +1866,7 @@ class ListMatchableAttributesRequest final : class HasBitSetters; ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fmatchable_5fresource_2eproto; @@ -3568,6 +3598,59 @@ inline void MatchableAttributesConfiguration::set_allocated_launch_plan(::std::s // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.MatchableAttributesConfiguration.launch_plan) } +// string org = 6; +inline void MatchableAttributesConfiguration::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& MatchableAttributesConfiguration::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.MatchableAttributesConfiguration.org) + return org_.GetNoArena(); +} +inline void MatchableAttributesConfiguration::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.MatchableAttributesConfiguration.org) +} +#if LANG_CXX11 +inline void MatchableAttributesConfiguration::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.MatchableAttributesConfiguration.org) +} +#endif +inline void MatchableAttributesConfiguration::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.MatchableAttributesConfiguration.org) +} +inline void MatchableAttributesConfiguration::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.MatchableAttributesConfiguration.org) +} +inline ::std::string* MatchableAttributesConfiguration::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.MatchableAttributesConfiguration.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* MatchableAttributesConfiguration::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.MatchableAttributesConfiguration.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void MatchableAttributesConfiguration::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.MatchableAttributesConfiguration.org) +} + // ------------------------------------------------------------------- // ListMatchableAttributesRequest @@ -3586,6 +3669,59 @@ inline void ListMatchableAttributesRequest::set_resource_type(::flyteidl::admin: // @@protoc_insertion_point(field_set:flyteidl.admin.ListMatchableAttributesRequest.resource_type) } +// string org = 2; +inline void ListMatchableAttributesRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ListMatchableAttributesRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ListMatchableAttributesRequest.org) + return org_.GetNoArena(); +} +inline void ListMatchableAttributesRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ListMatchableAttributesRequest.org) +} +#if LANG_CXX11 +inline void ListMatchableAttributesRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ListMatchableAttributesRequest.org) +} +#endif +inline void ListMatchableAttributesRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ListMatchableAttributesRequest.org) +} +inline void ListMatchableAttributesRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ListMatchableAttributesRequest.org) +} +inline ::std::string* ListMatchableAttributesRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ListMatchableAttributesRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ListMatchableAttributesRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ListMatchableAttributesRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ListMatchableAttributesRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ListMatchableAttributesRequest.org) +} + // ------------------------------------------------------------------- // ListMatchableAttributesResponse diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.cc index 09fc0a5e20..94d514ae08 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.cc @@ -90,6 +90,14 @@ class NodeExecutionGetDataResponseDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; } _NodeExecutionGetDataResponse_default_instance_; +class GetDynamicNodeWorkflowRequestDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _GetDynamicNodeWorkflowRequest_default_instance_; +class DynamicNodeWorkflowResponseDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _DynamicNodeWorkflowResponse_default_instance_; } // namespace admin } // namespace flyteidl static void InitDefaultsNodeExecutionGetRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto() { @@ -284,6 +292,36 @@ ::google::protobuf::internal::SCCInfo<4> scc_info_NodeExecutionGetDataResponse_f &scc_info_DynamicWorkflowNodeMetadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base, &scc_info_FlyteURLs_flyteidl_2fadmin_2fcommon_2eproto.base,}}; +static void InitDefaultsGetDynamicNodeWorkflowRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::admin::_GetDynamicNodeWorkflowRequest_default_instance_; + new (ptr) ::flyteidl::admin::GetDynamicNodeWorkflowRequest(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::admin::GetDynamicNodeWorkflowRequest::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<1> scc_info_GetDynamicNodeWorkflowRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsGetDynamicNodeWorkflowRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto}, { + &scc_info_NodeExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base,}}; + +static void InitDefaultsDynamicNodeWorkflowResponse_flyteidl_2fadmin_2fnode_5fexecution_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::admin::_DynamicNodeWorkflowResponse_default_instance_; + new (ptr) ::flyteidl::admin::DynamicNodeWorkflowResponse(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::admin::DynamicNodeWorkflowResponse::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<1> scc_info_DynamicNodeWorkflowResponse_flyteidl_2fadmin_2fnode_5fexecution_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsDynamicNodeWorkflowResponse_flyteidl_2fadmin_2fnode_5fexecution_2eproto}, { + &scc_info_CompiledWorkflowClosure_flyteidl_2fcore_2fcompiler_2eproto.base,}}; + void InitDefaults_flyteidl_2fadmin_2fnode_5fexecution_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_NodeExecutionGetRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_NodeExecutionListRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); @@ -297,9 +335,11 @@ void InitDefaults_flyteidl_2fadmin_2fnode_5fexecution_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_DynamicWorkflowNodeMetadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_NodeExecutionGetDataRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_NodeExecutionGetDataResponse_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_GetDynamicNodeWorkflowRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_DynamicNodeWorkflowResponse_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); } -::google::protobuf::Metadata file_level_metadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto[12]; +::google::protobuf::Metadata file_level_metadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto[14]; constexpr ::google::protobuf::EnumDescriptor const** file_level_enum_descriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto = nullptr; constexpr ::google::protobuf::ServiceDescriptor const** file_level_service_descriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto = nullptr; @@ -415,6 +455,18 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fnode_5fexecution PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NodeExecutionGetDataResponse, full_outputs_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NodeExecutionGetDataResponse, dynamic_workflow_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::NodeExecutionGetDataResponse, flyte_urls_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetDynamicNodeWorkflowRequest, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::GetDynamicNodeWorkflowRequest, id_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::DynamicNodeWorkflowResponse, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::DynamicNodeWorkflowResponse, compiled_workflow_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::NodeExecutionGetRequest)}, @@ -429,6 +481,8 @@ static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SE { 86, -1, sizeof(::flyteidl::admin::DynamicWorkflowNodeMetadata)}, { 94, -1, sizeof(::flyteidl::admin::NodeExecutionGetDataRequest)}, { 100, -1, sizeof(::flyteidl::admin::NodeExecutionGetDataResponse)}, + { 111, -1, sizeof(::flyteidl::admin::GetDynamicNodeWorkflowRequest)}, + { 117, -1, sizeof(::flyteidl::admin::DynamicNodeWorkflowResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -444,12 +498,14 @@ static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast(&::flyteidl::admin::_DynamicWorkflowNodeMetadata_default_instance_), reinterpret_cast(&::flyteidl::admin::_NodeExecutionGetDataRequest_default_instance_), reinterpret_cast(&::flyteidl::admin::_NodeExecutionGetDataResponse_default_instance_), + reinterpret_cast(&::flyteidl::admin::_GetDynamicNodeWorkflowRequest_default_instance_), + reinterpret_cast(&::flyteidl::admin::_DynamicNodeWorkflowResponse_default_instance_), }; ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_flyteidl_2fadmin_2fnode_5fexecution_2eproto = { {}, AddDescriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto, "flyteidl/admin/node_execution.proto", schemas, file_default_instances, TableStruct_flyteidl_2fadmin_2fnode_5fexecution_2eproto::offsets, - file_level_metadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto, 12, file_level_enum_descriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto, file_level_service_descriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto, + file_level_metadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto, 14, file_level_enum_descriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto, file_level_service_descriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto, }; const char descriptor_table_protodef_flyteidl_2fadmin_2fnode_5fexecution_2eproto[] = @@ -519,14 +575,18 @@ const char descriptor_table_protodef_flyteidl_2fadmin_2fnode_5fexecution_2eproto ".core.LiteralMap\022E\n\020dynamic_workflow\030\020 \001" "(\0132+.flyteidl.admin.DynamicWorkflowNodeM" "etadata\022-\n\nflyte_urls\030\021 \001(\0132\031.flyteidl.a" - "dmin.FlyteURLsB=Z;github.com/flyteorg/fl" - "yte/flyteidl/gen/pb-go/flyteidl/adminb\006p" - "roto3" + "dmin.FlyteURLs\"S\n\035GetDynamicNodeWorkflow" + "Request\0222\n\002id\030\001 \001(\0132&.flyteidl.core.Node" + "ExecutionIdentifier\"`\n\033DynamicNodeWorkfl" + "owResponse\022A\n\021compiled_workflow\030\001 \001(\0132&." + "flyteidl.core.CompiledWorkflowClosureB=Z" + ";github.com/flyteorg/flyte/flyteidl/gen/" + "pb-go/flyteidl/adminb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fnode_5fexecution_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fnode_5fexecution_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fnode_5fexecution_2eproto, - "flyteidl/admin/node_execution.proto", &assign_descriptors_table_flyteidl_2fadmin_2fnode_5fexecution_2eproto, 2725, + "flyteidl/admin/node_execution.proto", &assign_descriptors_table_flyteidl_2fadmin_2fnode_5fexecution_2eproto, 2908, }; void AddDescriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto() { @@ -6668,6 +6728,592 @@ ::google::protobuf::Metadata NodeExecutionGetDataResponse::GetMetadata() const { } +// =================================================================== + +void GetDynamicNodeWorkflowRequest::InitAsDefaultInstance() { + ::flyteidl::admin::_GetDynamicNodeWorkflowRequest_default_instance_._instance.get_mutable()->id_ = const_cast< ::flyteidl::core::NodeExecutionIdentifier*>( + ::flyteidl::core::NodeExecutionIdentifier::internal_default_instance()); +} +class GetDynamicNodeWorkflowRequest::HasBitSetters { + public: + static const ::flyteidl::core::NodeExecutionIdentifier& id(const GetDynamicNodeWorkflowRequest* msg); +}; + +const ::flyteidl::core::NodeExecutionIdentifier& +GetDynamicNodeWorkflowRequest::HasBitSetters::id(const GetDynamicNodeWorkflowRequest* msg) { + return *msg->id_; +} +void GetDynamicNodeWorkflowRequest::clear_id() { + if (GetArenaNoVirtual() == nullptr && id_ != nullptr) { + delete id_; + } + id_ = nullptr; +} +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int GetDynamicNodeWorkflowRequest::kIdFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +GetDynamicNodeWorkflowRequest::GetDynamicNodeWorkflowRequest() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.admin.GetDynamicNodeWorkflowRequest) +} +GetDynamicNodeWorkflowRequest::GetDynamicNodeWorkflowRequest(const GetDynamicNodeWorkflowRequest& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + if (from.has_id()) { + id_ = new ::flyteidl::core::NodeExecutionIdentifier(*from.id_); + } else { + id_ = nullptr; + } + // @@protoc_insertion_point(copy_constructor:flyteidl.admin.GetDynamicNodeWorkflowRequest) +} + +void GetDynamicNodeWorkflowRequest::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_GetDynamicNodeWorkflowRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); + id_ = nullptr; +} + +GetDynamicNodeWorkflowRequest::~GetDynamicNodeWorkflowRequest() { + // @@protoc_insertion_point(destructor:flyteidl.admin.GetDynamicNodeWorkflowRequest) + SharedDtor(); +} + +void GetDynamicNodeWorkflowRequest::SharedDtor() { + if (this != internal_default_instance()) delete id_; +} + +void GetDynamicNodeWorkflowRequest::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const GetDynamicNodeWorkflowRequest& GetDynamicNodeWorkflowRequest::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_GetDynamicNodeWorkflowRequest_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); + return *internal_default_instance(); +} + + +void GetDynamicNodeWorkflowRequest::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + if (GetArenaNoVirtual() == nullptr && id_ != nullptr) { + delete id_; + } + id_ = nullptr; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* GetDynamicNodeWorkflowRequest::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // .flyteidl.core.NodeExecutionIdentifier id = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::NodeExecutionIdentifier::_InternalParse; + object = msg->mutable_id(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool GetDynamicNodeWorkflowRequest::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .flyteidl.core.NodeExecutionIdentifier id = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_id())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.admin.GetDynamicNodeWorkflowRequest) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.admin.GetDynamicNodeWorkflowRequest) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void GetDynamicNodeWorkflowRequest::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.NodeExecutionIdentifier id = 1; + if (this->has_id()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, HasBitSetters::id(this), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.admin.GetDynamicNodeWorkflowRequest) +} + +::google::protobuf::uint8* GetDynamicNodeWorkflowRequest::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.NodeExecutionIdentifier id = 1; + if (this->has_id()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 1, HasBitSetters::id(this), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.admin.GetDynamicNodeWorkflowRequest) + return target; +} + +size_t GetDynamicNodeWorkflowRequest::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // .flyteidl.core.NodeExecutionIdentifier id = 1; + if (this->has_id()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *id_); + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void GetDynamicNodeWorkflowRequest::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + GOOGLE_DCHECK_NE(&from, this); + const GetDynamicNodeWorkflowRequest* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.admin.GetDynamicNodeWorkflowRequest) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.admin.GetDynamicNodeWorkflowRequest) + MergeFrom(*source); + } +} + +void GetDynamicNodeWorkflowRequest::MergeFrom(const GetDynamicNodeWorkflowRequest& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.has_id()) { + mutable_id()->::flyteidl::core::NodeExecutionIdentifier::MergeFrom(from.id()); + } +} + +void GetDynamicNodeWorkflowRequest::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void GetDynamicNodeWorkflowRequest::CopyFrom(const GetDynamicNodeWorkflowRequest& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.admin.GetDynamicNodeWorkflowRequest) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool GetDynamicNodeWorkflowRequest::IsInitialized() const { + return true; +} + +void GetDynamicNodeWorkflowRequest::Swap(GetDynamicNodeWorkflowRequest* other) { + if (other == this) return; + InternalSwap(other); +} +void GetDynamicNodeWorkflowRequest::InternalSwap(GetDynamicNodeWorkflowRequest* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(id_, other->id_); +} + +::google::protobuf::Metadata GetDynamicNodeWorkflowRequest::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fadmin_2fnode_5fexecution_2eproto); + return ::file_level_metadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto[kIndexInFileMessages]; +} + + +// =================================================================== + +void DynamicNodeWorkflowResponse::InitAsDefaultInstance() { + ::flyteidl::admin::_DynamicNodeWorkflowResponse_default_instance_._instance.get_mutable()->compiled_workflow_ = const_cast< ::flyteidl::core::CompiledWorkflowClosure*>( + ::flyteidl::core::CompiledWorkflowClosure::internal_default_instance()); +} +class DynamicNodeWorkflowResponse::HasBitSetters { + public: + static const ::flyteidl::core::CompiledWorkflowClosure& compiled_workflow(const DynamicNodeWorkflowResponse* msg); +}; + +const ::flyteidl::core::CompiledWorkflowClosure& +DynamicNodeWorkflowResponse::HasBitSetters::compiled_workflow(const DynamicNodeWorkflowResponse* msg) { + return *msg->compiled_workflow_; +} +void DynamicNodeWorkflowResponse::clear_compiled_workflow() { + if (GetArenaNoVirtual() == nullptr && compiled_workflow_ != nullptr) { + delete compiled_workflow_; + } + compiled_workflow_ = nullptr; +} +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int DynamicNodeWorkflowResponse::kCompiledWorkflowFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +DynamicNodeWorkflowResponse::DynamicNodeWorkflowResponse() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.admin.DynamicNodeWorkflowResponse) +} +DynamicNodeWorkflowResponse::DynamicNodeWorkflowResponse(const DynamicNodeWorkflowResponse& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + if (from.has_compiled_workflow()) { + compiled_workflow_ = new ::flyteidl::core::CompiledWorkflowClosure(*from.compiled_workflow_); + } else { + compiled_workflow_ = nullptr; + } + // @@protoc_insertion_point(copy_constructor:flyteidl.admin.DynamicNodeWorkflowResponse) +} + +void DynamicNodeWorkflowResponse::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_DynamicNodeWorkflowResponse_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); + compiled_workflow_ = nullptr; +} + +DynamicNodeWorkflowResponse::~DynamicNodeWorkflowResponse() { + // @@protoc_insertion_point(destructor:flyteidl.admin.DynamicNodeWorkflowResponse) + SharedDtor(); +} + +void DynamicNodeWorkflowResponse::SharedDtor() { + if (this != internal_default_instance()) delete compiled_workflow_; +} + +void DynamicNodeWorkflowResponse::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const DynamicNodeWorkflowResponse& DynamicNodeWorkflowResponse::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_DynamicNodeWorkflowResponse_flyteidl_2fadmin_2fnode_5fexecution_2eproto.base); + return *internal_default_instance(); +} + + +void DynamicNodeWorkflowResponse::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.admin.DynamicNodeWorkflowResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + if (GetArenaNoVirtual() == nullptr && compiled_workflow_ != nullptr) { + delete compiled_workflow_; + } + compiled_workflow_ = nullptr; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* DynamicNodeWorkflowResponse::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::CompiledWorkflowClosure::_InternalParse; + object = msg->mutable_compiled_workflow(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool DynamicNodeWorkflowResponse::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.admin.DynamicNodeWorkflowResponse) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_compiled_workflow())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.admin.DynamicNodeWorkflowResponse) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.admin.DynamicNodeWorkflowResponse) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void DynamicNodeWorkflowResponse::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.admin.DynamicNodeWorkflowResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + if (this->has_compiled_workflow()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, HasBitSetters::compiled_workflow(this), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.admin.DynamicNodeWorkflowResponse) +} + +::google::protobuf::uint8* DynamicNodeWorkflowResponse::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.admin.DynamicNodeWorkflowResponse) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + if (this->has_compiled_workflow()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 1, HasBitSetters::compiled_workflow(this), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.admin.DynamicNodeWorkflowResponse) + return target; +} + +size_t DynamicNodeWorkflowResponse::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.admin.DynamicNodeWorkflowResponse) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + if (this->has_compiled_workflow()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *compiled_workflow_); + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void DynamicNodeWorkflowResponse::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.admin.DynamicNodeWorkflowResponse) + GOOGLE_DCHECK_NE(&from, this); + const DynamicNodeWorkflowResponse* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.admin.DynamicNodeWorkflowResponse) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.admin.DynamicNodeWorkflowResponse) + MergeFrom(*source); + } +} + +void DynamicNodeWorkflowResponse::MergeFrom(const DynamicNodeWorkflowResponse& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.admin.DynamicNodeWorkflowResponse) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.has_compiled_workflow()) { + mutable_compiled_workflow()->::flyteidl::core::CompiledWorkflowClosure::MergeFrom(from.compiled_workflow()); + } +} + +void DynamicNodeWorkflowResponse::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.admin.DynamicNodeWorkflowResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void DynamicNodeWorkflowResponse::CopyFrom(const DynamicNodeWorkflowResponse& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.admin.DynamicNodeWorkflowResponse) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool DynamicNodeWorkflowResponse::IsInitialized() const { + return true; +} + +void DynamicNodeWorkflowResponse::Swap(DynamicNodeWorkflowResponse* other) { + if (other == this) return; + InternalSwap(other); +} +void DynamicNodeWorkflowResponse::InternalSwap(DynamicNodeWorkflowResponse* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(compiled_workflow_, other->compiled_workflow_); +} + +::google::protobuf::Metadata DynamicNodeWorkflowResponse::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fadmin_2fnode_5fexecution_2eproto); + return ::file_level_metadata_flyteidl_2fadmin_2fnode_5fexecution_2eproto[kIndexInFileMessages]; +} + + // @@protoc_insertion_point(namespace_scope) } // namespace admin } // namespace flyteidl @@ -6709,6 +7355,12 @@ template<> PROTOBUF_NOINLINE ::flyteidl::admin::NodeExecutionGetDataRequest* Are template<> PROTOBUF_NOINLINE ::flyteidl::admin::NodeExecutionGetDataResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::NodeExecutionGetDataResponse >(Arena* arena) { return Arena::CreateInternal< ::flyteidl::admin::NodeExecutionGetDataResponse >(arena); } +template<> PROTOBUF_NOINLINE ::flyteidl::admin::GetDynamicNodeWorkflowRequest* Arena::CreateMaybeMessage< ::flyteidl::admin::GetDynamicNodeWorkflowRequest >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::GetDynamicNodeWorkflowRequest >(arena); +} +template<> PROTOBUF_NOINLINE ::flyteidl::admin::DynamicNodeWorkflowResponse* Arena::CreateMaybeMessage< ::flyteidl::admin::DynamicNodeWorkflowResponse >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::admin::DynamicNodeWorkflowResponse >(arena); +} } // namespace protobuf } // namespace google diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.h index 88da7e3612..15e691830c 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/node_execution.pb.h @@ -49,7 +49,7 @@ struct TableStruct_flyteidl_2fadmin_2fnode_5fexecution_2eproto { PROTOBUF_SECTION_VARIABLE(protodesc_cold); static const ::google::protobuf::internal::AuxillaryParseTableField aux[] PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::google::protobuf::internal::ParseTable schema[12] + static const ::google::protobuf::internal::ParseTable schema[14] PROTOBUF_SECTION_VARIABLE(protodesc_cold); static const ::google::protobuf::internal::FieldMetadata field_metadata[]; static const ::google::protobuf::internal::SerializationTable serialization_table[]; @@ -58,9 +58,15 @@ struct TableStruct_flyteidl_2fadmin_2fnode_5fexecution_2eproto { void AddDescriptors_flyteidl_2fadmin_2fnode_5fexecution_2eproto(); namespace flyteidl { namespace admin { +class DynamicNodeWorkflowResponse; +class DynamicNodeWorkflowResponseDefaultTypeInternal; +extern DynamicNodeWorkflowResponseDefaultTypeInternal _DynamicNodeWorkflowResponse_default_instance_; class DynamicWorkflowNodeMetadata; class DynamicWorkflowNodeMetadataDefaultTypeInternal; extern DynamicWorkflowNodeMetadataDefaultTypeInternal _DynamicWorkflowNodeMetadata_default_instance_; +class GetDynamicNodeWorkflowRequest; +class GetDynamicNodeWorkflowRequestDefaultTypeInternal; +extern GetDynamicNodeWorkflowRequestDefaultTypeInternal _GetDynamicNodeWorkflowRequest_default_instance_; class NodeExecution; class NodeExecutionDefaultTypeInternal; extern NodeExecutionDefaultTypeInternal _NodeExecution_default_instance_; @@ -98,7 +104,9 @@ extern WorkflowNodeMetadataDefaultTypeInternal _WorkflowNodeMetadata_default_ins } // namespace flyteidl namespace google { namespace protobuf { +template<> ::flyteidl::admin::DynamicNodeWorkflowResponse* Arena::CreateMaybeMessage<::flyteidl::admin::DynamicNodeWorkflowResponse>(Arena*); template<> ::flyteidl::admin::DynamicWorkflowNodeMetadata* Arena::CreateMaybeMessage<::flyteidl::admin::DynamicWorkflowNodeMetadata>(Arena*); +template<> ::flyteidl::admin::GetDynamicNodeWorkflowRequest* Arena::CreateMaybeMessage<::flyteidl::admin::GetDynamicNodeWorkflowRequest>(Arena*); template<> ::flyteidl::admin::NodeExecution* Arena::CreateMaybeMessage<::flyteidl::admin::NodeExecution>(Arena*); template<> ::flyteidl::admin::NodeExecutionClosure* Arena::CreateMaybeMessage<::flyteidl::admin::NodeExecutionClosure>(Arena*); template<> ::flyteidl::admin::NodeExecutionForTaskListRequest* Arena::CreateMaybeMessage<::flyteidl::admin::NodeExecutionForTaskListRequest>(Arena*); @@ -1956,6 +1964,236 @@ class NodeExecutionGetDataResponse final : mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fnode_5fexecution_2eproto; }; +// ------------------------------------------------------------------- + +class GetDynamicNodeWorkflowRequest final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.admin.GetDynamicNodeWorkflowRequest) */ { + public: + GetDynamicNodeWorkflowRequest(); + virtual ~GetDynamicNodeWorkflowRequest(); + + GetDynamicNodeWorkflowRequest(const GetDynamicNodeWorkflowRequest& from); + + inline GetDynamicNodeWorkflowRequest& operator=(const GetDynamicNodeWorkflowRequest& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + GetDynamicNodeWorkflowRequest(GetDynamicNodeWorkflowRequest&& from) noexcept + : GetDynamicNodeWorkflowRequest() { + *this = ::std::move(from); + } + + inline GetDynamicNodeWorkflowRequest& operator=(GetDynamicNodeWorkflowRequest&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const GetDynamicNodeWorkflowRequest& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const GetDynamicNodeWorkflowRequest* internal_default_instance() { + return reinterpret_cast( + &_GetDynamicNodeWorkflowRequest_default_instance_); + } + static constexpr int kIndexInFileMessages = + 12; + + void Swap(GetDynamicNodeWorkflowRequest* other); + friend void swap(GetDynamicNodeWorkflowRequest& a, GetDynamicNodeWorkflowRequest& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline GetDynamicNodeWorkflowRequest* New() const final { + return CreateMaybeMessage(nullptr); + } + + GetDynamicNodeWorkflowRequest* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const GetDynamicNodeWorkflowRequest& from); + void MergeFrom(const GetDynamicNodeWorkflowRequest& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(GetDynamicNodeWorkflowRequest* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // .flyteidl.core.NodeExecutionIdentifier id = 1; + bool has_id() const; + void clear_id(); + static const int kIdFieldNumber = 1; + const ::flyteidl::core::NodeExecutionIdentifier& id() const; + ::flyteidl::core::NodeExecutionIdentifier* release_id(); + ::flyteidl::core::NodeExecutionIdentifier* mutable_id(); + void set_allocated_id(::flyteidl::core::NodeExecutionIdentifier* id); + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetDynamicNodeWorkflowRequest) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::flyteidl::core::NodeExecutionIdentifier* id_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fadmin_2fnode_5fexecution_2eproto; +}; +// ------------------------------------------------------------------- + +class DynamicNodeWorkflowResponse final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.admin.DynamicNodeWorkflowResponse) */ { + public: + DynamicNodeWorkflowResponse(); + virtual ~DynamicNodeWorkflowResponse(); + + DynamicNodeWorkflowResponse(const DynamicNodeWorkflowResponse& from); + + inline DynamicNodeWorkflowResponse& operator=(const DynamicNodeWorkflowResponse& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + DynamicNodeWorkflowResponse(DynamicNodeWorkflowResponse&& from) noexcept + : DynamicNodeWorkflowResponse() { + *this = ::std::move(from); + } + + inline DynamicNodeWorkflowResponse& operator=(DynamicNodeWorkflowResponse&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const DynamicNodeWorkflowResponse& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const DynamicNodeWorkflowResponse* internal_default_instance() { + return reinterpret_cast( + &_DynamicNodeWorkflowResponse_default_instance_); + } + static constexpr int kIndexInFileMessages = + 13; + + void Swap(DynamicNodeWorkflowResponse* other); + friend void swap(DynamicNodeWorkflowResponse& a, DynamicNodeWorkflowResponse& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline DynamicNodeWorkflowResponse* New() const final { + return CreateMaybeMessage(nullptr); + } + + DynamicNodeWorkflowResponse* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const DynamicNodeWorkflowResponse& from); + void MergeFrom(const DynamicNodeWorkflowResponse& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(DynamicNodeWorkflowResponse* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + bool has_compiled_workflow() const; + void clear_compiled_workflow(); + static const int kCompiledWorkflowFieldNumber = 1; + const ::flyteidl::core::CompiledWorkflowClosure& compiled_workflow() const; + ::flyteidl::core::CompiledWorkflowClosure* release_compiled_workflow(); + ::flyteidl::core::CompiledWorkflowClosure* mutable_compiled_workflow(); + void set_allocated_compiled_workflow(::flyteidl::core::CompiledWorkflowClosure* compiled_workflow); + + // @@protoc_insertion_point(class_scope:flyteidl.admin.DynamicNodeWorkflowResponse) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::flyteidl::core::CompiledWorkflowClosure* compiled_workflow_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fadmin_2fnode_5fexecution_2eproto; +}; // =================================================================== @@ -4147,6 +4385,104 @@ inline void NodeExecutionGetDataResponse::set_allocated_flyte_urls(::flyteidl::a // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.NodeExecutionGetDataResponse.flyte_urls) } +// ------------------------------------------------------------------- + +// GetDynamicNodeWorkflowRequest + +// .flyteidl.core.NodeExecutionIdentifier id = 1; +inline bool GetDynamicNodeWorkflowRequest::has_id() const { + return this != internal_default_instance() && id_ != nullptr; +} +inline const ::flyteidl::core::NodeExecutionIdentifier& GetDynamicNodeWorkflowRequest::id() const { + const ::flyteidl::core::NodeExecutionIdentifier* p = id_; + // @@protoc_insertion_point(field_get:flyteidl.admin.GetDynamicNodeWorkflowRequest.id) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_NodeExecutionIdentifier_default_instance_); +} +inline ::flyteidl::core::NodeExecutionIdentifier* GetDynamicNodeWorkflowRequest::release_id() { + // @@protoc_insertion_point(field_release:flyteidl.admin.GetDynamicNodeWorkflowRequest.id) + + ::flyteidl::core::NodeExecutionIdentifier* temp = id_; + id_ = nullptr; + return temp; +} +inline ::flyteidl::core::NodeExecutionIdentifier* GetDynamicNodeWorkflowRequest::mutable_id() { + + if (id_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::NodeExecutionIdentifier>(GetArenaNoVirtual()); + id_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.GetDynamicNodeWorkflowRequest.id) + return id_; +} +inline void GetDynamicNodeWorkflowRequest::set_allocated_id(::flyteidl::core::NodeExecutionIdentifier* id) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(id_); + } + if (id) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + id = ::google::protobuf::internal::GetOwnedMessage( + message_arena, id, submessage_arena); + } + + } else { + + } + id_ = id; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.GetDynamicNodeWorkflowRequest.id) +} + +// ------------------------------------------------------------------- + +// DynamicNodeWorkflowResponse + +// .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; +inline bool DynamicNodeWorkflowResponse::has_compiled_workflow() const { + return this != internal_default_instance() && compiled_workflow_ != nullptr; +} +inline const ::flyteidl::core::CompiledWorkflowClosure& DynamicNodeWorkflowResponse::compiled_workflow() const { + const ::flyteidl::core::CompiledWorkflowClosure* p = compiled_workflow_; + // @@protoc_insertion_point(field_get:flyteidl.admin.DynamicNodeWorkflowResponse.compiled_workflow) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_CompiledWorkflowClosure_default_instance_); +} +inline ::flyteidl::core::CompiledWorkflowClosure* DynamicNodeWorkflowResponse::release_compiled_workflow() { + // @@protoc_insertion_point(field_release:flyteidl.admin.DynamicNodeWorkflowResponse.compiled_workflow) + + ::flyteidl::core::CompiledWorkflowClosure* temp = compiled_workflow_; + compiled_workflow_ = nullptr; + return temp; +} +inline ::flyteidl::core::CompiledWorkflowClosure* DynamicNodeWorkflowResponse::mutable_compiled_workflow() { + + if (compiled_workflow_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::CompiledWorkflowClosure>(GetArenaNoVirtual()); + compiled_workflow_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.admin.DynamicNodeWorkflowResponse.compiled_workflow) + return compiled_workflow_; +} +inline void DynamicNodeWorkflowResponse::set_allocated_compiled_workflow(::flyteidl::core::CompiledWorkflowClosure* compiled_workflow) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(compiled_workflow_); + } + if (compiled_workflow) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + compiled_workflow = ::google::protobuf::internal::GetOwnedMessage( + message_arena, compiled_workflow, submessage_arena); + } + + } else { + + } + compiled_workflow_ = compiled_workflow; + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.DynamicNodeWorkflowResponse.compiled_workflow) +} + #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ @@ -4172,6 +4508,10 @@ inline void NodeExecutionGetDataResponse::set_allocated_flyte_urls(::flyteidl::a // ------------------------------------------------------------------- +// ------------------------------------------------------------------- + +// ------------------------------------------------------------------- + // @@protoc_insertion_point(namespace_scope) diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.cc index 979041c005..03f21267d0 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.cc @@ -188,6 +188,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_2eproto: PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Project, description_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Project, labels_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Project, state_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Project, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::Projects, _internal_metadata_), ~0u, // no _extensions_ @@ -204,6 +205,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_2eproto: PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectListRequest, token_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectListRequest, filters_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectListRequest, sort_by_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectListRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectRegisterRequest, _internal_metadata_), ~0u, // no _extensions_ @@ -224,11 +226,11 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_2eproto: static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::Domain)}, { 7, -1, sizeof(::flyteidl::admin::Project)}, - { 18, -1, sizeof(::flyteidl::admin::Projects)}, - { 25, -1, sizeof(::flyteidl::admin::ProjectListRequest)}, - { 34, -1, sizeof(::flyteidl::admin::ProjectRegisterRequest)}, - { 40, -1, sizeof(::flyteidl::admin::ProjectRegisterResponse)}, - { 45, -1, sizeof(::flyteidl::admin::ProjectUpdateResponse)}, + { 19, -1, sizeof(::flyteidl::admin::Projects)}, + { 26, -1, sizeof(::flyteidl::admin::ProjectListRequest)}, + { 36, -1, sizeof(::flyteidl::admin::ProjectRegisterRequest)}, + { 42, -1, sizeof(::flyteidl::admin::ProjectRegisterResponse)}, + { 47, -1, sizeof(::flyteidl::admin::ProjectUpdateResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -250,28 +252,29 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fadmin_2fproject_2eproto[] = "\n\034flyteidl/admin/project.proto\022\016flyteidl" ".admin\032\033flyteidl/admin/common.proto\"\"\n\006D" - "omain\022\n\n\002id\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\"\376\001\n\007Proj" + "omain\022\n\n\002id\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\"\213\002\n\007Proj" "ect\022\n\n\002id\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\022\'\n\007domains" "\030\003 \003(\0132\026.flyteidl.admin.Domain\022\023\n\013descri" "ption\030\004 \001(\t\022&\n\006labels\030\005 \001(\0132\026.flyteidl.a" "dmin.Labels\0223\n\005state\030\006 \001(\0162$.flyteidl.ad" - "min.Project.ProjectState\">\n\014ProjectState" - "\022\n\n\006ACTIVE\020\000\022\014\n\010ARCHIVED\020\001\022\024\n\020SYSTEM_GEN" - "ERATED\020\002\"D\n\010Projects\022)\n\010projects\030\001 \003(\0132\027" - ".flyteidl.admin.Project\022\r\n\005token\030\002 \001(\t\"j" - "\n\022ProjectListRequest\022\r\n\005limit\030\001 \001(\r\022\r\n\005t" - "oken\030\002 \001(\t\022\017\n\007filters\030\003 \001(\t\022%\n\007sort_by\030\004" - " \001(\0132\024.flyteidl.admin.Sort\"B\n\026ProjectReg" - "isterRequest\022(\n\007project\030\001 \001(\0132\027.flyteidl" - ".admin.Project\"\031\n\027ProjectRegisterRespons" - "e\"\027\n\025ProjectUpdateResponseB=Z;github.com" - "/flyteorg/flyte/flyteidl/gen/pb-go/flyte" - "idl/adminb\006proto3" + "min.Project.ProjectState\022\013\n\003org\030\007 \001(\t\">\n" + "\014ProjectState\022\n\n\006ACTIVE\020\000\022\014\n\010ARCHIVED\020\001\022" + "\024\n\020SYSTEM_GENERATED\020\002\"D\n\010Projects\022)\n\010pro" + "jects\030\001 \003(\0132\027.flyteidl.admin.Project\022\r\n\005" + "token\030\002 \001(\t\"w\n\022ProjectListRequest\022\r\n\005lim" + "it\030\001 \001(\r\022\r\n\005token\030\002 \001(\t\022\017\n\007filters\030\003 \001(\t" + "\022%\n\007sort_by\030\004 \001(\0132\024.flyteidl.admin.Sort\022" + "\013\n\003org\030\005 \001(\t\"B\n\026ProjectRegisterRequest\022(" + "\n\007project\030\001 \001(\0132\027.flyteidl.admin.Project" + "\"\031\n\027ProjectRegisterResponse\"\027\n\025ProjectUp" + "dateResponseB=Z;github.com/flyteorg/flyt" + "e/flyteidl/gen/pb-go/flyteidl/adminb\006pro" + "to3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fproject_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fproject_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fproject_2eproto, - "flyteidl/admin/project.proto", &assign_descriptors_table_flyteidl_2fadmin_2fproject_2eproto, 737, + "flyteidl/admin/project.proto", &assign_descriptors_table_flyteidl_2fadmin_2fproject_2eproto, 763, }; void AddDescriptors_flyteidl_2fadmin_2fproject_2eproto() { @@ -708,6 +711,7 @@ const int Project::kDomainsFieldNumber; const int Project::kDescriptionFieldNumber; const int Project::kLabelsFieldNumber; const int Project::kStateFieldNumber; +const int Project::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 Project::Project() @@ -732,6 +736,10 @@ Project::Project(const Project& from) if (from.description().size() > 0) { description_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.description_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_labels()) { labels_ = new ::flyteidl::admin::Labels(*from.labels_); } else { @@ -747,6 +755,7 @@ void Project::SharedCtor() { id_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); description_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&labels_, 0, static_cast( reinterpret_cast(&state_) - reinterpret_cast(&labels_)) + sizeof(state_)); @@ -761,6 +770,7 @@ void Project::SharedDtor() { id_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); description_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete labels_; } @@ -783,6 +793,7 @@ void Project::Clear() { id_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); description_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && labels_ != nullptr) { delete labels_; } @@ -889,6 +900,22 @@ const char* Project::_InternalParse(const char* begin, const char* end, void* ob GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 7; + case 7: { + if (static_cast<::google::protobuf::uint8>(tag) != 58) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.Project.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1004,6 +1031,21 @@ bool Project::MergePartialFromCodedStream( break; } + // string org = 7; + case 7: { + if (static_cast< ::google::protobuf::uint8>(tag) == (58 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.Project.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1082,6 +1124,16 @@ void Project::SerializeWithCachedSizes( 6, this->state(), output); } + // string org = 7; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.Project.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 7, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1149,6 +1201,17 @@ ::google::protobuf::uint8* Project::InternalSerializeWithCachedSizesToArray( 6, this->state(), target); } + // string org = 7; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.Project.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 7, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1202,6 +1265,13 @@ size_t Project::ByteSizeLong() const { this->description()); } + // string org = 7; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.Labels labels = 5; if (this->has_labels()) { total_size += 1 + @@ -1255,6 +1325,10 @@ void Project::MergeFrom(const Project& from) { description_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.description_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_labels()) { mutable_labels()->::flyteidl::admin::Labels::MergeFrom(from.labels()); } @@ -1295,6 +1369,8 @@ void Project::InternalSwap(Project* other) { GetArenaNoVirtual()); description_.Swap(&other->description_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(labels_, other->labels_); swap(state_, other->state_); } @@ -1688,6 +1764,7 @@ const int ProjectListRequest::kLimitFieldNumber; const int ProjectListRequest::kTokenFieldNumber; const int ProjectListRequest::kFiltersFieldNumber; const int ProjectListRequest::kSortByFieldNumber; +const int ProjectListRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ProjectListRequest::ProjectListRequest() @@ -1707,6 +1784,10 @@ ProjectListRequest::ProjectListRequest(const ProjectListRequest& from) if (from.filters().size() > 0) { filters_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filters_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { sort_by_ = new ::flyteidl::admin::Sort(*from.sort_by_); } else { @@ -1721,6 +1802,7 @@ void ProjectListRequest::SharedCtor() { &scc_info_ProjectListRequest_flyteidl_2fadmin_2fproject_2eproto.base); token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::memset(&sort_by_, 0, static_cast( reinterpret_cast(&limit_) - reinterpret_cast(&sort_by_)) + sizeof(limit_)); @@ -1734,6 +1816,7 @@ ProjectListRequest::~ProjectListRequest() { void ProjectListRequest::SharedDtor() { token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete sort_by_; } @@ -1754,6 +1837,7 @@ void ProjectListRequest::Clear() { token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); filters_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && sort_by_ != nullptr) { delete sort_by_; } @@ -1827,6 +1911,22 @@ const char* ProjectListRequest::_InternalParse(const char* begin, const char* en {parser_till_end, object}, ptr - size, ptr)); break; } + // string org = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ProjectListRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1915,6 +2015,21 @@ bool ProjectListRequest::MergePartialFromCodedStream( break; } + // string org = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ProjectListRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1973,6 +2088,16 @@ void ProjectListRequest::SerializeWithCachedSizes( 4, HasBitSetters::sort_by(this), output); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectListRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 5, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -2020,6 +2145,17 @@ ::google::protobuf::uint8* ProjectListRequest::InternalSerializeWithCachedSizesT 4, HasBitSetters::sort_by(this), target); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectListRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 5, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -2055,6 +2191,13 @@ size_t ProjectListRequest::ByteSizeLong() const { this->filters()); } + // string org = 5; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.Sort sort_by = 4; if (this->has_sort_by()) { total_size += 1 + @@ -2104,6 +2247,10 @@ void ProjectListRequest::MergeFrom(const ProjectListRequest& from) { filters_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.filters_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_sort_by()) { mutable_sort_by()->::flyteidl::admin::Sort::MergeFrom(from.sort_by()); } @@ -2141,6 +2288,8 @@ void ProjectListRequest::InternalSwap(ProjectListRequest* other) { GetArenaNoVirtual()); filters_.Swap(&other->filters_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(sort_by_, other->sort_by_); swap(limit_, other->limit_); } diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.h index b1fa7e9e3b..33b8879252 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/project.pb.h @@ -425,6 +425,20 @@ class Project final : ::std::string* release_description(); void set_allocated_description(::std::string* description); + // string org = 7; + void clear_org(); + static const int kOrgFieldNumber = 7; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.Labels labels = 5; bool has_labels() const; void clear_labels(); @@ -449,6 +463,7 @@ class Project final : ::google::protobuf::internal::ArenaStringPtr id_; ::google::protobuf::internal::ArenaStringPtr name_; ::google::protobuf::internal::ArenaStringPtr description_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::Labels* labels_; int state_; mutable ::google::protobuf::internal::CachedSize _cached_size_; @@ -712,6 +727,20 @@ class ProjectListRequest final : ::std::string* release_filters(); void set_allocated_filters(::std::string* filters); + // string org = 5; + void clear_org(); + static const int kOrgFieldNumber = 5; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.Sort sort_by = 4; bool has_sort_by() const; void clear_sort_by(); @@ -734,6 +763,7 @@ class ProjectListRequest final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr token_; ::google::protobuf::internal::ArenaStringPtr filters_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::Sort* sort_by_; ::google::protobuf::uint32 limit_; mutable ::google::protobuf::internal::CachedSize _cached_size_; @@ -1433,6 +1463,59 @@ inline void Project::set_state(::flyteidl::admin::Project_ProjectState value) { // @@protoc_insertion_point(field_set:flyteidl.admin.Project.state) } +// string org = 7; +inline void Project::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& Project::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.Project.org) + return org_.GetNoArena(); +} +inline void Project::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.Project.org) +} +#if LANG_CXX11 +inline void Project::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.Project.org) +} +#endif +inline void Project::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.Project.org) +} +inline void Project::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.Project.org) +} +inline ::std::string* Project::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.Project.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* Project::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.Project.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void Project::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.Project.org) +} + // ------------------------------------------------------------------- // Projects @@ -1689,6 +1772,59 @@ inline void ProjectListRequest::set_allocated_sort_by(::flyteidl::admin::Sort* s // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectListRequest.sort_by) } +// string org = 5; +inline void ProjectListRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ProjectListRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ProjectListRequest.org) + return org_.GetNoArena(); +} +inline void ProjectListRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectListRequest.org) +} +#if LANG_CXX11 +inline void ProjectListRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ProjectListRequest.org) +} +#endif +inline void ProjectListRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ProjectListRequest.org) +} +inline void ProjectListRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ProjectListRequest.org) +} +inline ::std::string* ProjectListRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ProjectListRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ProjectListRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ProjectListRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ProjectListRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectListRequest.org) +} + // ------------------------------------------------------------------- // ProjectRegisterRequest diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.cc index ad78bb68cf..40a47623d7 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.cc @@ -173,6 +173,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fattrib ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributes, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributes, matching_attributes_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributes, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesUpdateRequest, _internal_metadata_), ~0u, // no _extensions_ @@ -191,6 +192,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fattrib ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesGetRequest, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesGetRequest, resource_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesGetRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesGetResponse, _internal_metadata_), ~0u, // no _extensions_ @@ -204,6 +206,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fattrib ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesDeleteRequest, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesDeleteRequest, resource_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesDeleteRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectAttributesDeleteResponse, _internal_metadata_), ~0u, // no _extensions_ @@ -212,12 +215,12 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fattrib }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::ProjectAttributes)}, - { 7, -1, sizeof(::flyteidl::admin::ProjectAttributesUpdateRequest)}, - { 13, -1, sizeof(::flyteidl::admin::ProjectAttributesUpdateResponse)}, - { 18, -1, sizeof(::flyteidl::admin::ProjectAttributesGetRequest)}, - { 25, -1, sizeof(::flyteidl::admin::ProjectAttributesGetResponse)}, - { 31, -1, sizeof(::flyteidl::admin::ProjectAttributesDeleteRequest)}, - { 38, -1, sizeof(::flyteidl::admin::ProjectAttributesDeleteResponse)}, + { 8, -1, sizeof(::flyteidl::admin::ProjectAttributesUpdateRequest)}, + { 14, -1, sizeof(::flyteidl::admin::ProjectAttributesUpdateResponse)}, + { 19, -1, sizeof(::flyteidl::admin::ProjectAttributesGetRequest)}, + { 27, -1, sizeof(::flyteidl::admin::ProjectAttributesGetResponse)}, + { 33, -1, sizeof(::flyteidl::admin::ProjectAttributesDeleteRequest)}, + { 41, -1, sizeof(::flyteidl::admin::ProjectAttributesDeleteResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -239,28 +242,29 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fadmin_2fproject_5fattributes_2eproto[] = "\n\'flyteidl/admin/project_attributes.prot" "o\022\016flyteidl.admin\032\'flyteidl/admin/matcha" - "ble_resource.proto\"e\n\021ProjectAttributes\022" + "ble_resource.proto\"r\n\021ProjectAttributes\022" "\017\n\007project\030\001 \001(\t\022\?\n\023matching_attributes\030" "\002 \001(\0132\".flyteidl.admin.MatchingAttribute" - "s\"W\n\036ProjectAttributesUpdateRequest\0225\n\na" - "ttributes\030\001 \001(\0132!.flyteidl.admin.Project" - "Attributes\"!\n\037ProjectAttributesUpdateRes" - "ponse\"h\n\033ProjectAttributesGetRequest\022\017\n\007" - "project\030\001 \001(\t\0228\n\rresource_type\030\002 \001(\0162!.f" - "lyteidl.admin.MatchableResource\"U\n\034Proje" - "ctAttributesGetResponse\0225\n\nattributes\030\001 " - "\001(\0132!.flyteidl.admin.ProjectAttributes\"k" - "\n\036ProjectAttributesDeleteRequest\022\017\n\007proj" - "ect\030\001 \001(\t\0228\n\rresource_type\030\002 \001(\0162!.flyte" - "idl.admin.MatchableResource\"!\n\037ProjectAt" - "tributesDeleteResponseB=Z;github.com/fly" - "teorg/flyte/flyteidl/gen/pb-go/flyteidl/" - "adminb\006proto3" + "s\022\013\n\003org\030\003 \001(\t\"W\n\036ProjectAttributesUpdat" + "eRequest\0225\n\nattributes\030\001 \001(\0132!.flyteidl." + "admin.ProjectAttributes\"!\n\037ProjectAttrib" + "utesUpdateResponse\"u\n\033ProjectAttributesG" + "etRequest\022\017\n\007project\030\001 \001(\t\0228\n\rresource_t" + "ype\030\002 \001(\0162!.flyteidl.admin.MatchableReso" + "urce\022\013\n\003org\030\003 \001(\t\"U\n\034ProjectAttributesGe" + "tResponse\0225\n\nattributes\030\001 \001(\0132!.flyteidl" + ".admin.ProjectAttributes\"x\n\036ProjectAttri" + "butesDeleteRequest\022\017\n\007project\030\001 \001(\t\0228\n\rr" + "esource_type\030\002 \001(\0162!.flyteidl.admin.Matc" + "hableResource\022\013\n\003org\030\003 \001(\t\"!\n\037ProjectAtt" + "ributesDeleteResponseB=Z;github.com/flyt" + "eorg/flyte/flyteidl/gen/pb-go/flyteidl/a" + "dminb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fproject_5fattributes_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fproject_5fattributes_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fproject_5fattributes_2eproto, - "flyteidl/admin/project_attributes.proto", &assign_descriptors_table_flyteidl_2fadmin_2fproject_5fattributes_2eproto, 733, + "flyteidl/admin/project_attributes.proto", &assign_descriptors_table_flyteidl_2fadmin_2fproject_5fattributes_2eproto, 772, }; void AddDescriptors_flyteidl_2fadmin_2fproject_5fattributes_2eproto() { @@ -300,6 +304,7 @@ void ProjectAttributes::clear_matching_attributes() { #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int ProjectAttributes::kProjectFieldNumber; const int ProjectAttributes::kMatchingAttributesFieldNumber; +const int ProjectAttributes::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ProjectAttributes::ProjectAttributes() @@ -315,6 +320,10 @@ ProjectAttributes::ProjectAttributes(const ProjectAttributes& from) if (from.project().size() > 0) { project_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.project_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_matching_attributes()) { matching_attributes_ = new ::flyteidl::admin::MatchingAttributes(*from.matching_attributes_); } else { @@ -327,6 +336,7 @@ void ProjectAttributes::SharedCtor() { ::google::protobuf::internal::InitSCC( &scc_info_ProjectAttributes_flyteidl_2fadmin_2fproject_5fattributes_2eproto.base); project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); matching_attributes_ = nullptr; } @@ -337,6 +347,7 @@ ProjectAttributes::~ProjectAttributes() { void ProjectAttributes::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete matching_attributes_; } @@ -356,6 +367,7 @@ void ProjectAttributes::Clear() { (void) cached_has_bits; project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && matching_attributes_ != nullptr) { delete matching_attributes_; } @@ -405,6 +417,22 @@ const char* ProjectAttributes::_InternalParse(const char* begin, const char* end {parser_till_end, object}, ptr - size, ptr)); break; } + // string org = 3; + case 3: { + if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ProjectAttributes.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -465,6 +493,21 @@ bool ProjectAttributes::MergePartialFromCodedStream( break; } + // string org = 3; + case 3: { + if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ProjectAttributes.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -508,6 +551,16 @@ void ProjectAttributes::SerializeWithCachedSizes( 2, HasBitSetters::matching_attributes(this), output); } + // string org = 3; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectAttributes.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 3, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -539,6 +592,17 @@ ::google::protobuf::uint8* ProjectAttributes::InternalSerializeWithCachedSizesTo 2, HasBitSetters::matching_attributes(this), target); } + // string org = 3; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectAttributes.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 3, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -567,6 +631,13 @@ size_t ProjectAttributes::ByteSizeLong() const { this->project()); } + // string org = 3; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchingAttributes matching_attributes = 2; if (this->has_matching_attributes()) { total_size += 1 + @@ -605,6 +676,10 @@ void ProjectAttributes::MergeFrom(const ProjectAttributes& from) { project_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.project_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_matching_attributes()) { mutable_matching_attributes()->::flyteidl::admin::MatchingAttributes::MergeFrom(from.matching_attributes()); } @@ -637,6 +712,8 @@ void ProjectAttributes::InternalSwap(ProjectAttributes* other) { _internal_metadata_.Swap(&other->_internal_metadata_); project_.Swap(&other->project_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(matching_attributes_, other->matching_attributes_); } @@ -1153,6 +1230,7 @@ class ProjectAttributesGetRequest::HasBitSetters { #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int ProjectAttributesGetRequest::kProjectFieldNumber; const int ProjectAttributesGetRequest::kResourceTypeFieldNumber; +const int ProjectAttributesGetRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ProjectAttributesGetRequest::ProjectAttributesGetRequest() @@ -1168,6 +1246,10 @@ ProjectAttributesGetRequest::ProjectAttributesGetRequest(const ProjectAttributes if (from.project().size() > 0) { project_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.project_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.admin.ProjectAttributesGetRequest) } @@ -1176,6 +1258,7 @@ void ProjectAttributesGetRequest::SharedCtor() { ::google::protobuf::internal::InitSCC( &scc_info_ProjectAttributesGetRequest_flyteidl_2fadmin_2fproject_5fattributes_2eproto.base); project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -1186,6 +1269,7 @@ ProjectAttributesGetRequest::~ProjectAttributesGetRequest() { void ProjectAttributesGetRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void ProjectAttributesGetRequest::SetCachedSize(int size) const { @@ -1204,6 +1288,7 @@ void ProjectAttributesGetRequest::Clear() { (void) cached_has_bits; project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -1245,6 +1330,22 @@ const char* ProjectAttributesGetRequest::_InternalParse(const char* begin, const GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 3; + case 3: { + if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ProjectAttributesGetRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1308,6 +1409,21 @@ bool ProjectAttributesGetRequest::MergePartialFromCodedStream( break; } + // string org = 3; + case 3: { + if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ProjectAttributesGetRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1351,6 +1467,16 @@ void ProjectAttributesGetRequest::SerializeWithCachedSizes( 2, this->resource_type(), output); } + // string org = 3; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectAttributesGetRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 3, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1381,6 +1507,17 @@ ::google::protobuf::uint8* ProjectAttributesGetRequest::InternalSerializeWithCac 2, this->resource_type(), target); } + // string org = 3; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectAttributesGetRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 3, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1409,6 +1546,13 @@ size_t ProjectAttributesGetRequest::ByteSizeLong() const { this->project()); } + // string org = 3; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchableResource resource_type = 2; if (this->resource_type() != 0) { total_size += 1 + @@ -1446,6 +1590,10 @@ void ProjectAttributesGetRequest::MergeFrom(const ProjectAttributesGetRequest& f project_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.project_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -1478,6 +1626,8 @@ void ProjectAttributesGetRequest::InternalSwap(ProjectAttributesGetRequest* othe _internal_metadata_.Swap(&other->_internal_metadata_); project_.Swap(&other->project_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } @@ -1785,6 +1935,7 @@ class ProjectAttributesDeleteRequest::HasBitSetters { #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int ProjectAttributesDeleteRequest::kProjectFieldNumber; const int ProjectAttributesDeleteRequest::kResourceTypeFieldNumber; +const int ProjectAttributesDeleteRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ProjectAttributesDeleteRequest::ProjectAttributesDeleteRequest() @@ -1800,6 +1951,10 @@ ProjectAttributesDeleteRequest::ProjectAttributesDeleteRequest(const ProjectAttr if (from.project().size() > 0) { project_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.project_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.admin.ProjectAttributesDeleteRequest) } @@ -1808,6 +1963,7 @@ void ProjectAttributesDeleteRequest::SharedCtor() { ::google::protobuf::internal::InitSCC( &scc_info_ProjectAttributesDeleteRequest_flyteidl_2fadmin_2fproject_5fattributes_2eproto.base); project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -1818,6 +1974,7 @@ ProjectAttributesDeleteRequest::~ProjectAttributesDeleteRequest() { void ProjectAttributesDeleteRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void ProjectAttributesDeleteRequest::SetCachedSize(int size) const { @@ -1836,6 +1993,7 @@ void ProjectAttributesDeleteRequest::Clear() { (void) cached_has_bits; project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -1877,6 +2035,22 @@ const char* ProjectAttributesDeleteRequest::_InternalParse(const char* begin, co GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 3; + case 3: { + if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ProjectAttributesDeleteRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1940,6 +2114,21 @@ bool ProjectAttributesDeleteRequest::MergePartialFromCodedStream( break; } + // string org = 3; + case 3: { + if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ProjectAttributesDeleteRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1983,6 +2172,16 @@ void ProjectAttributesDeleteRequest::SerializeWithCachedSizes( 2, this->resource_type(), output); } + // string org = 3; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectAttributesDeleteRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 3, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -2013,6 +2212,17 @@ ::google::protobuf::uint8* ProjectAttributesDeleteRequest::InternalSerializeWith 2, this->resource_type(), target); } + // string org = 3; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectAttributesDeleteRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 3, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -2041,6 +2251,13 @@ size_t ProjectAttributesDeleteRequest::ByteSizeLong() const { this->project()); } + // string org = 3; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchableResource resource_type = 2; if (this->resource_type() != 0) { total_size += 1 + @@ -2078,6 +2295,10 @@ void ProjectAttributesDeleteRequest::MergeFrom(const ProjectAttributesDeleteRequ project_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.project_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -2110,6 +2331,8 @@ void ProjectAttributesDeleteRequest::InternalSwap(ProjectAttributesDeleteRequest _internal_metadata_.Swap(&other->_internal_metadata_); project_.Swap(&other->project_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.h index 3c859a5e4a..af00e25767 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/project_attributes.pb.h @@ -199,6 +199,20 @@ class ProjectAttributes final : ::std::string* release_project(); void set_allocated_project(::std::string* project); + // string org = 3; + void clear_org(); + static const int kOrgFieldNumber = 3; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchingAttributes matching_attributes = 2; bool has_matching_attributes() const; void clear_matching_attributes(); @@ -214,6 +228,7 @@ class ProjectAttributes final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr project_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::MatchingAttributes* matching_attributes_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fproject_5fattributes_2eproto; @@ -549,6 +564,20 @@ class ProjectAttributesGetRequest final : ::std::string* release_project(); void set_allocated_project(::std::string* project); + // string org = 3; + void clear_org(); + static const int kOrgFieldNumber = 3; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchableResource resource_type = 2; void clear_resource_type(); static const int kResourceTypeFieldNumber = 2; @@ -561,6 +590,7 @@ class ProjectAttributesGetRequest final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr project_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fproject_5fattributes_2eproto; @@ -791,6 +821,20 @@ class ProjectAttributesDeleteRequest final : ::std::string* release_project(); void set_allocated_project(::std::string* project); + // string org = 3; + void clear_org(); + static const int kOrgFieldNumber = 3; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchableResource resource_type = 2; void clear_resource_type(); static const int kResourceTypeFieldNumber = 2; @@ -803,6 +847,7 @@ class ProjectAttributesDeleteRequest final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr project_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fproject_5fattributes_2eproto; @@ -1021,6 +1066,59 @@ inline void ProjectAttributes::set_allocated_matching_attributes(::flyteidl::adm // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectAttributes.matching_attributes) } +// string org = 3; +inline void ProjectAttributes::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ProjectAttributes::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ProjectAttributes.org) + return org_.GetNoArena(); +} +inline void ProjectAttributes::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectAttributes.org) +} +#if LANG_CXX11 +inline void ProjectAttributes::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ProjectAttributes.org) +} +#endif +inline void ProjectAttributes::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ProjectAttributes.org) +} +inline void ProjectAttributes::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ProjectAttributes.org) +} +inline ::std::string* ProjectAttributes::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ProjectAttributes.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ProjectAttributes::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ProjectAttributes.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ProjectAttributes::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectAttributes.org) +} + // ------------------------------------------------------------------- // ProjectAttributesUpdateRequest @@ -1151,6 +1249,59 @@ inline void ProjectAttributesGetRequest::set_resource_type(::flyteidl::admin::Ma // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectAttributesGetRequest.resource_type) } +// string org = 3; +inline void ProjectAttributesGetRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ProjectAttributesGetRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ProjectAttributesGetRequest.org) + return org_.GetNoArena(); +} +inline void ProjectAttributesGetRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectAttributesGetRequest.org) +} +#if LANG_CXX11 +inline void ProjectAttributesGetRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ProjectAttributesGetRequest.org) +} +#endif +inline void ProjectAttributesGetRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ProjectAttributesGetRequest.org) +} +inline void ProjectAttributesGetRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ProjectAttributesGetRequest.org) +} +inline ::std::string* ProjectAttributesGetRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ProjectAttributesGetRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ProjectAttributesGetRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ProjectAttributesGetRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ProjectAttributesGetRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectAttributesGetRequest.org) +} + // ------------------------------------------------------------------- // ProjectAttributesGetResponse @@ -1277,6 +1428,59 @@ inline void ProjectAttributesDeleteRequest::set_resource_type(::flyteidl::admin: // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectAttributesDeleteRequest.resource_type) } +// string org = 3; +inline void ProjectAttributesDeleteRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ProjectAttributesDeleteRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ProjectAttributesDeleteRequest.org) + return org_.GetNoArena(); +} +inline void ProjectAttributesDeleteRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectAttributesDeleteRequest.org) +} +#if LANG_CXX11 +inline void ProjectAttributesDeleteRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ProjectAttributesDeleteRequest.org) +} +#endif +inline void ProjectAttributesDeleteRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ProjectAttributesDeleteRequest.org) +} +inline void ProjectAttributesDeleteRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ProjectAttributesDeleteRequest.org) +} +inline ::std::string* ProjectAttributesDeleteRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ProjectAttributesDeleteRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ProjectAttributesDeleteRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ProjectAttributesDeleteRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ProjectAttributesDeleteRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectAttributesDeleteRequest.org) +} + // ------------------------------------------------------------------- // ProjectAttributesDeleteResponse diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.cc index 6de4670235..25b576c211 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.cc @@ -174,6 +174,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fdomain PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributes, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributes, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributes, matching_attributes_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributes, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesUpdateRequest, _internal_metadata_), ~0u, // no _extensions_ @@ -193,6 +194,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fdomain PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesGetRequest, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesGetRequest, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesGetRequest, resource_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesGetRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesGetResponse, _internal_metadata_), ~0u, // no _extensions_ @@ -207,6 +209,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fdomain PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesDeleteRequest, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesDeleteRequest, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesDeleteRequest, resource_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesDeleteRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::ProjectDomainAttributesDeleteResponse, _internal_metadata_), ~0u, // no _extensions_ @@ -215,12 +218,12 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fproject_5fdomain }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::ProjectDomainAttributes)}, - { 8, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesUpdateRequest)}, - { 14, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesUpdateResponse)}, - { 19, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesGetRequest)}, - { 27, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesGetResponse)}, - { 33, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesDeleteRequest)}, - { 41, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesDeleteResponse)}, + { 9, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesUpdateRequest)}, + { 15, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesUpdateResponse)}, + { 20, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesGetRequest)}, + { 29, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesGetResponse)}, + { 35, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesDeleteRequest)}, + { 44, -1, sizeof(::flyteidl::admin::ProjectDomainAttributesDeleteResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -242,31 +245,32 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto[] = "\n.flyteidl/admin/project_domain_attribut" "es.proto\022\016flyteidl.admin\032\'flyteidl/admin" - "/matchable_resource.proto\"{\n\027ProjectDoma" - "inAttributes\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030" - "\002 \001(\t\022\?\n\023matching_attributes\030\003 \001(\0132\".fly" - "teidl.admin.MatchingAttributes\"c\n$Projec" - "tDomainAttributesUpdateRequest\022;\n\nattrib" - "utes\030\001 \001(\0132\'.flyteidl.admin.ProjectDomai" - "nAttributes\"\'\n%ProjectDomainAttributesUp" - "dateResponse\"~\n!ProjectDomainAttributesG" - "etRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001" - "(\t\0228\n\rresource_type\030\003 \001(\0162!.flyteidl.adm" - "in.MatchableResource\"a\n\"ProjectDomainAtt" - "ributesGetResponse\022;\n\nattributes\030\001 \001(\0132\'" - ".flyteidl.admin.ProjectDomainAttributes\"" - "\201\001\n$ProjectDomainAttributesDeleteRequest" - "\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\0228\n\rres" - "ource_type\030\003 \001(\0162!.flyteidl.admin.Matcha" - "bleResource\"\'\n%ProjectDomainAttributesDe" - "leteResponseB=Z;github.com/flyteorg/flyt" - "e/flyteidl/gen/pb-go/flyteidl/adminb\006pro" - "to3" + "/matchable_resource.proto\"\210\001\n\027ProjectDom" + "ainAttributes\022\017\n\007project\030\001 \001(\t\022\016\n\006domain" + "\030\002 \001(\t\022\?\n\023matching_attributes\030\003 \001(\0132\".fl" + "yteidl.admin.MatchingAttributes\022\013\n\003org\030\004" + " \001(\t\"c\n$ProjectDomainAttributesUpdateReq" + "uest\022;\n\nattributes\030\001 \001(\0132\'.flyteidl.admi" + "n.ProjectDomainAttributes\"\'\n%ProjectDoma" + "inAttributesUpdateResponse\"\213\001\n!ProjectDo" + "mainAttributesGetRequest\022\017\n\007project\030\001 \001(" + "\t\022\016\n\006domain\030\002 \001(\t\0228\n\rresource_type\030\003 \001(\016" + "2!.flyteidl.admin.MatchableResource\022\013\n\003o" + "rg\030\004 \001(\t\"a\n\"ProjectDomainAttributesGetRe" + "sponse\022;\n\nattributes\030\001 \001(\0132\'.flyteidl.ad" + "min.ProjectDomainAttributes\"\216\001\n$ProjectD" + "omainAttributesDeleteRequest\022\017\n\007project\030" + "\001 \001(\t\022\016\n\006domain\030\002 \001(\t\0228\n\rresource_type\030\003" + " \001(\0162!.flyteidl.admin.MatchableResource\022" + "\013\n\003org\030\004 \001(\t\"\'\n%ProjectDomainAttributesD" + "eleteResponseB=Z;github.com/flyteorg/fly" + "te/flyteidl/gen/pb-go/flyteidl/adminb\006pr" + "oto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto, - "flyteidl/admin/project_domain_attributes.proto", &assign_descriptors_table_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto, 843, + "flyteidl/admin/project_domain_attributes.proto", &assign_descriptors_table_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto, 884, }; void AddDescriptors_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto() { @@ -307,6 +311,7 @@ void ProjectDomainAttributes::clear_matching_attributes() { const int ProjectDomainAttributes::kProjectFieldNumber; const int ProjectDomainAttributes::kDomainFieldNumber; const int ProjectDomainAttributes::kMatchingAttributesFieldNumber; +const int ProjectDomainAttributes::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ProjectDomainAttributes::ProjectDomainAttributes() @@ -326,6 +331,10 @@ ProjectDomainAttributes::ProjectDomainAttributes(const ProjectDomainAttributes& if (from.domain().size() > 0) { domain_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.domain_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_matching_attributes()) { matching_attributes_ = new ::flyteidl::admin::MatchingAttributes(*from.matching_attributes_); } else { @@ -339,6 +348,7 @@ void ProjectDomainAttributes::SharedCtor() { &scc_info_ProjectDomainAttributes_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto.base); project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); matching_attributes_ = nullptr; } @@ -350,6 +360,7 @@ ProjectDomainAttributes::~ProjectDomainAttributes() { void ProjectDomainAttributes::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete matching_attributes_; } @@ -370,6 +381,7 @@ void ProjectDomainAttributes::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && matching_attributes_ != nullptr) { delete matching_attributes_; } @@ -435,6 +447,22 @@ const char* ProjectDomainAttributes::_InternalParse(const char* begin, const cha {parser_till_end, object}, ptr - size, ptr)); break; } + // string org = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ProjectDomainAttributes.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -510,6 +538,21 @@ bool ProjectDomainAttributes::MergePartialFromCodedStream( break; } + // string org = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ProjectDomainAttributes.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -563,6 +606,16 @@ void ProjectDomainAttributes::SerializeWithCachedSizes( 3, HasBitSetters::matching_attributes(this), output); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectDomainAttributes.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 4, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -605,6 +658,17 @@ ::google::protobuf::uint8* ProjectDomainAttributes::InternalSerializeWithCachedS 3, HasBitSetters::matching_attributes(this), target); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectDomainAttributes.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 4, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -640,6 +704,13 @@ size_t ProjectDomainAttributes::ByteSizeLong() const { this->domain()); } + // string org = 4; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchingAttributes matching_attributes = 3; if (this->has_matching_attributes()) { total_size += 1 + @@ -682,6 +753,10 @@ void ProjectDomainAttributes::MergeFrom(const ProjectDomainAttributes& from) { domain_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.domain_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_matching_attributes()) { mutable_matching_attributes()->::flyteidl::admin::MatchingAttributes::MergeFrom(from.matching_attributes()); } @@ -716,6 +791,8 @@ void ProjectDomainAttributes::InternalSwap(ProjectDomainAttributes* other) { GetArenaNoVirtual()); domain_.Swap(&other->domain_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(matching_attributes_, other->matching_attributes_); } @@ -1233,6 +1310,7 @@ class ProjectDomainAttributesGetRequest::HasBitSetters { const int ProjectDomainAttributesGetRequest::kProjectFieldNumber; const int ProjectDomainAttributesGetRequest::kDomainFieldNumber; const int ProjectDomainAttributesGetRequest::kResourceTypeFieldNumber; +const int ProjectDomainAttributesGetRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ProjectDomainAttributesGetRequest::ProjectDomainAttributesGetRequest() @@ -1252,6 +1330,10 @@ ProjectDomainAttributesGetRequest::ProjectDomainAttributesGetRequest(const Proje if (from.domain().size() > 0) { domain_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.domain_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.admin.ProjectDomainAttributesGetRequest) } @@ -1261,6 +1343,7 @@ void ProjectDomainAttributesGetRequest::SharedCtor() { &scc_info_ProjectDomainAttributesGetRequest_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto.base); project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -1272,6 +1355,7 @@ ProjectDomainAttributesGetRequest::~ProjectDomainAttributesGetRequest() { void ProjectDomainAttributesGetRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void ProjectDomainAttributesGetRequest::SetCachedSize(int size) const { @@ -1291,6 +1375,7 @@ void ProjectDomainAttributesGetRequest::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -1348,6 +1433,22 @@ const char* ProjectDomainAttributesGetRequest::_InternalParse(const char* begin, GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ProjectDomainAttributesGetRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1426,6 +1527,21 @@ bool ProjectDomainAttributesGetRequest::MergePartialFromCodedStream( break; } + // string org = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ProjectDomainAttributesGetRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1479,6 +1595,16 @@ void ProjectDomainAttributesGetRequest::SerializeWithCachedSizes( 3, this->resource_type(), output); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectDomainAttributesGetRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 4, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1520,6 +1646,17 @@ ::google::protobuf::uint8* ProjectDomainAttributesGetRequest::InternalSerializeW 3, this->resource_type(), target); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectDomainAttributesGetRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 4, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1555,6 +1692,13 @@ size_t ProjectDomainAttributesGetRequest::ByteSizeLong() const { this->domain()); } + // string org = 4; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchableResource resource_type = 3; if (this->resource_type() != 0) { total_size += 1 + @@ -1596,6 +1740,10 @@ void ProjectDomainAttributesGetRequest::MergeFrom(const ProjectDomainAttributesG domain_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.domain_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -1630,6 +1778,8 @@ void ProjectDomainAttributesGetRequest::InternalSwap(ProjectDomainAttributesGetR GetArenaNoVirtual()); domain_.Swap(&other->domain_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } @@ -1938,6 +2088,7 @@ class ProjectDomainAttributesDeleteRequest::HasBitSetters { const int ProjectDomainAttributesDeleteRequest::kProjectFieldNumber; const int ProjectDomainAttributesDeleteRequest::kDomainFieldNumber; const int ProjectDomainAttributesDeleteRequest::kResourceTypeFieldNumber; +const int ProjectDomainAttributesDeleteRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ProjectDomainAttributesDeleteRequest::ProjectDomainAttributesDeleteRequest() @@ -1957,6 +2108,10 @@ ProjectDomainAttributesDeleteRequest::ProjectDomainAttributesDeleteRequest(const if (from.domain().size() > 0) { domain_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.domain_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.admin.ProjectDomainAttributesDeleteRequest) } @@ -1966,6 +2121,7 @@ void ProjectDomainAttributesDeleteRequest::SharedCtor() { &scc_info_ProjectDomainAttributesDeleteRequest_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto.base); project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -1977,6 +2133,7 @@ ProjectDomainAttributesDeleteRequest::~ProjectDomainAttributesDeleteRequest() { void ProjectDomainAttributesDeleteRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void ProjectDomainAttributesDeleteRequest::SetCachedSize(int size) const { @@ -1996,6 +2153,7 @@ void ProjectDomainAttributesDeleteRequest::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -2053,6 +2211,22 @@ const char* ProjectDomainAttributesDeleteRequest::_InternalParse(const char* beg GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.ProjectDomainAttributesDeleteRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -2131,6 +2305,21 @@ bool ProjectDomainAttributesDeleteRequest::MergePartialFromCodedStream( break; } + // string org = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.ProjectDomainAttributesDeleteRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -2184,6 +2373,16 @@ void ProjectDomainAttributesDeleteRequest::SerializeWithCachedSizes( 3, this->resource_type(), output); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectDomainAttributesDeleteRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 4, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -2225,6 +2424,17 @@ ::google::protobuf::uint8* ProjectDomainAttributesDeleteRequest::InternalSeriali 3, this->resource_type(), target); } + // string org = 4; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.ProjectDomainAttributesDeleteRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 4, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -2260,6 +2470,13 @@ size_t ProjectDomainAttributesDeleteRequest::ByteSizeLong() const { this->domain()); } + // string org = 4; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchableResource resource_type = 3; if (this->resource_type() != 0) { total_size += 1 + @@ -2301,6 +2518,10 @@ void ProjectDomainAttributesDeleteRequest::MergeFrom(const ProjectDomainAttribut domain_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.domain_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -2335,6 +2556,8 @@ void ProjectDomainAttributesDeleteRequest::InternalSwap(ProjectDomainAttributesD GetArenaNoVirtual()); domain_.Swap(&other->domain_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.h index 3ad739b9dd..aa27ef3a0a 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/project_domain_attributes.pb.h @@ -213,6 +213,20 @@ class ProjectDomainAttributes final : ::std::string* release_domain(); void set_allocated_domain(::std::string* domain); + // string org = 4; + void clear_org(); + static const int kOrgFieldNumber = 4; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchingAttributes matching_attributes = 3; bool has_matching_attributes() const; void clear_matching_attributes(); @@ -229,6 +243,7 @@ class ProjectDomainAttributes final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::MatchingAttributes* matching_attributes_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto; @@ -578,6 +593,20 @@ class ProjectDomainAttributesGetRequest final : ::std::string* release_domain(); void set_allocated_domain(::std::string* domain); + // string org = 4; + void clear_org(); + static const int kOrgFieldNumber = 4; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchableResource resource_type = 3; void clear_resource_type(); static const int kResourceTypeFieldNumber = 3; @@ -591,6 +620,7 @@ class ProjectDomainAttributesGetRequest final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto; @@ -835,6 +865,20 @@ class ProjectDomainAttributesDeleteRequest final : ::std::string* release_domain(); void set_allocated_domain(::std::string* domain); + // string org = 4; + void clear_org(); + static const int kOrgFieldNumber = 4; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchableResource resource_type = 3; void clear_resource_type(); static const int kResourceTypeFieldNumber = 3; @@ -848,6 +892,7 @@ class ProjectDomainAttributesDeleteRequest final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fproject_5fdomain_5fattributes_2eproto; @@ -1119,6 +1164,59 @@ inline void ProjectDomainAttributes::set_allocated_matching_attributes(::flyteid // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectDomainAttributes.matching_attributes) } +// string org = 4; +inline void ProjectDomainAttributes::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ProjectDomainAttributes::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ProjectDomainAttributes.org) + return org_.GetNoArena(); +} +inline void ProjectDomainAttributes::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectDomainAttributes.org) +} +#if LANG_CXX11 +inline void ProjectDomainAttributes::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ProjectDomainAttributes.org) +} +#endif +inline void ProjectDomainAttributes::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ProjectDomainAttributes.org) +} +inline void ProjectDomainAttributes::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ProjectDomainAttributes.org) +} +inline ::std::string* ProjectDomainAttributes::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ProjectDomainAttributes.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ProjectDomainAttributes::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ProjectDomainAttributes.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ProjectDomainAttributes::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectDomainAttributes.org) +} + // ------------------------------------------------------------------- // ProjectDomainAttributesUpdateRequest @@ -1302,6 +1400,59 @@ inline void ProjectDomainAttributesGetRequest::set_resource_type(::flyteidl::adm // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectDomainAttributesGetRequest.resource_type) } +// string org = 4; +inline void ProjectDomainAttributesGetRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ProjectDomainAttributesGetRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ProjectDomainAttributesGetRequest.org) + return org_.GetNoArena(); +} +inline void ProjectDomainAttributesGetRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectDomainAttributesGetRequest.org) +} +#if LANG_CXX11 +inline void ProjectDomainAttributesGetRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ProjectDomainAttributesGetRequest.org) +} +#endif +inline void ProjectDomainAttributesGetRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ProjectDomainAttributesGetRequest.org) +} +inline void ProjectDomainAttributesGetRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ProjectDomainAttributesGetRequest.org) +} +inline ::std::string* ProjectDomainAttributesGetRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ProjectDomainAttributesGetRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ProjectDomainAttributesGetRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ProjectDomainAttributesGetRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ProjectDomainAttributesGetRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectDomainAttributesGetRequest.org) +} + // ------------------------------------------------------------------- // ProjectDomainAttributesGetResponse @@ -1481,6 +1632,59 @@ inline void ProjectDomainAttributesDeleteRequest::set_resource_type(::flyteidl:: // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectDomainAttributesDeleteRequest.resource_type) } +// string org = 4; +inline void ProjectDomainAttributesDeleteRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ProjectDomainAttributesDeleteRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) + return org_.GetNoArena(); +} +inline void ProjectDomainAttributesDeleteRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) +} +#if LANG_CXX11 +inline void ProjectDomainAttributesDeleteRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) +} +#endif +inline void ProjectDomainAttributesDeleteRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) +} +inline void ProjectDomainAttributesDeleteRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) +} +inline ::std::string* ProjectDomainAttributesDeleteRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ProjectDomainAttributesDeleteRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ProjectDomainAttributesDeleteRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.ProjectDomainAttributesDeleteRequest.org) +} + // ------------------------------------------------------------------- // ProjectDomainAttributesDeleteResponse diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.cc index 0a85bfeb1d..f181c74738 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.cc @@ -175,6 +175,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fworkflow_5fattri PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributes, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributes, workflow_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributes, matching_attributes_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributes, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesUpdateRequest, _internal_metadata_), ~0u, // no _extensions_ @@ -195,6 +196,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fworkflow_5fattri PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesGetRequest, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesGetRequest, workflow_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesGetRequest, resource_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesGetRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesGetResponse, _internal_metadata_), ~0u, // no _extensions_ @@ -210,6 +212,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fworkflow_5fattri PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesDeleteRequest, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesDeleteRequest, workflow_), PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesDeleteRequest, resource_type_), + PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesDeleteRequest, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::admin::WorkflowAttributesDeleteResponse, _internal_metadata_), ~0u, // no _extensions_ @@ -218,12 +221,12 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fadmin_2fworkflow_5fattri }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::admin::WorkflowAttributes)}, - { 9, -1, sizeof(::flyteidl::admin::WorkflowAttributesUpdateRequest)}, - { 15, -1, sizeof(::flyteidl::admin::WorkflowAttributesUpdateResponse)}, - { 20, -1, sizeof(::flyteidl::admin::WorkflowAttributesGetRequest)}, - { 29, -1, sizeof(::flyteidl::admin::WorkflowAttributesGetResponse)}, - { 35, -1, sizeof(::flyteidl::admin::WorkflowAttributesDeleteRequest)}, - { 44, -1, sizeof(::flyteidl::admin::WorkflowAttributesDeleteResponse)}, + { 10, -1, sizeof(::flyteidl::admin::WorkflowAttributesUpdateRequest)}, + { 16, -1, sizeof(::flyteidl::admin::WorkflowAttributesUpdateResponse)}, + { 21, -1, sizeof(::flyteidl::admin::WorkflowAttributesGetRequest)}, + { 31, -1, sizeof(::flyteidl::admin::WorkflowAttributesGetResponse)}, + { 37, -1, sizeof(::flyteidl::admin::WorkflowAttributesDeleteRequest)}, + { 47, -1, sizeof(::flyteidl::admin::WorkflowAttributesDeleteResponse)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -245,31 +248,32 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto[] = "\n(flyteidl/admin/workflow_attributes.pro" "to\022\016flyteidl.admin\032\'flyteidl/admin/match" - "able_resource.proto\"\210\001\n\022WorkflowAttribut" + "able_resource.proto\"\225\001\n\022WorkflowAttribut" "es\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\020\n\010w" "orkflow\030\003 \001(\t\022\?\n\023matching_attributes\030\004 \001" - "(\0132\".flyteidl.admin.MatchingAttributes\"Y" - "\n\037WorkflowAttributesUpdateRequest\0226\n\natt" - "ributes\030\001 \001(\0132\".flyteidl.admin.WorkflowA" - "ttributes\"\"\n WorkflowAttributesUpdateRes" - "ponse\"\213\001\n\034WorkflowAttributesGetRequest\022\017" - "\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\020\n\010workf" - "low\030\003 \001(\t\0228\n\rresource_type\030\004 \001(\0162!.flyte" - "idl.admin.MatchableResource\"W\n\035WorkflowA" - "ttributesGetResponse\0226\n\nattributes\030\001 \001(\013" - "2\".flyteidl.admin.WorkflowAttributes\"\216\001\n" - "\037WorkflowAttributesDeleteRequest\022\017\n\007proj" - "ect\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\020\n\010workflow\030\003 " - "\001(\t\0228\n\rresource_type\030\004 \001(\0162!.flyteidl.ad" - "min.MatchableResource\"\"\n WorkflowAttribu" - "tesDeleteResponseB=Z;github.com/flyteorg" - "/flyte/flyteidl/gen/pb-go/flyteidl/admin" - "b\006proto3" + "(\0132\".flyteidl.admin.MatchingAttributes\022\013" + "\n\003org\030\005 \001(\t\"Y\n\037WorkflowAttributesUpdateR" + "equest\0226\n\nattributes\030\001 \001(\0132\".flyteidl.ad" + "min.WorkflowAttributes\"\"\n WorkflowAttrib" + "utesUpdateResponse\"\230\001\n\034WorkflowAttribute" + "sGetRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002" + " \001(\t\022\020\n\010workflow\030\003 \001(\t\0228\n\rresource_type\030" + "\004 \001(\0162!.flyteidl.admin.MatchableResource" + "\022\013\n\003org\030\005 \001(\t\"W\n\035WorkflowAttributesGetRe" + "sponse\0226\n\nattributes\030\001 \001(\0132\".flyteidl.ad" + "min.WorkflowAttributes\"\233\001\n\037WorkflowAttri" + "butesDeleteRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006d" + "omain\030\002 \001(\t\022\020\n\010workflow\030\003 \001(\t\0228\n\rresourc" + "e_type\030\004 \001(\0162!.flyteidl.admin.MatchableR" + "esource\022\013\n\003org\030\005 \001(\t\"\"\n WorkflowAttribut" + "esDeleteResponseB=Z;github.com/flyteorg/" + "flyte/flyteidl/gen/pb-go/flyteidl/adminb" + "\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto = { false, InitDefaults_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto, descriptor_table_protodef_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto, - "flyteidl/admin/workflow_attributes.proto", &assign_descriptors_table_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto, 848, + "flyteidl/admin/workflow_attributes.proto", &assign_descriptors_table_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto, 887, }; void AddDescriptors_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto() { @@ -311,6 +315,7 @@ const int WorkflowAttributes::kProjectFieldNumber; const int WorkflowAttributes::kDomainFieldNumber; const int WorkflowAttributes::kWorkflowFieldNumber; const int WorkflowAttributes::kMatchingAttributesFieldNumber; +const int WorkflowAttributes::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 WorkflowAttributes::WorkflowAttributes() @@ -334,6 +339,10 @@ WorkflowAttributes::WorkflowAttributes(const WorkflowAttributes& from) if (from.workflow().size() > 0) { workflow_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.workflow_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_matching_attributes()) { matching_attributes_ = new ::flyteidl::admin::MatchingAttributes(*from.matching_attributes_); } else { @@ -348,6 +357,7 @@ void WorkflowAttributes::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); matching_attributes_ = nullptr; } @@ -360,6 +370,7 @@ void WorkflowAttributes::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete matching_attributes_; } @@ -381,6 +392,7 @@ void WorkflowAttributes::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && matching_attributes_ != nullptr) { delete matching_attributes_; } @@ -462,6 +474,22 @@ const char* WorkflowAttributes::_InternalParse(const char* begin, const char* en {parser_till_end, object}, ptr - size, ptr)); break; } + // string org = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.WorkflowAttributes.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -552,6 +580,21 @@ bool WorkflowAttributes::MergePartialFromCodedStream( break; } + // string org = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.WorkflowAttributes.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -615,6 +658,16 @@ void WorkflowAttributes::SerializeWithCachedSizes( 4, HasBitSetters::matching_attributes(this), output); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.WorkflowAttributes.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 5, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -668,6 +721,17 @@ ::google::protobuf::uint8* WorkflowAttributes::InternalSerializeWithCachedSizesT 4, HasBitSetters::matching_attributes(this), target); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.WorkflowAttributes.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 5, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -710,6 +774,13 @@ size_t WorkflowAttributes::ByteSizeLong() const { this->workflow()); } + // string org = 5; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchingAttributes matching_attributes = 4; if (this->has_matching_attributes()) { total_size += 1 + @@ -756,6 +827,10 @@ void WorkflowAttributes::MergeFrom(const WorkflowAttributes& from) { workflow_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.workflow_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.has_matching_attributes()) { mutable_matching_attributes()->::flyteidl::admin::MatchingAttributes::MergeFrom(from.matching_attributes()); } @@ -792,6 +867,8 @@ void WorkflowAttributes::InternalSwap(WorkflowAttributes* other) { GetArenaNoVirtual()); workflow_.Swap(&other->workflow_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(matching_attributes_, other->matching_attributes_); } @@ -1310,6 +1387,7 @@ const int WorkflowAttributesGetRequest::kProjectFieldNumber; const int WorkflowAttributesGetRequest::kDomainFieldNumber; const int WorkflowAttributesGetRequest::kWorkflowFieldNumber; const int WorkflowAttributesGetRequest::kResourceTypeFieldNumber; +const int WorkflowAttributesGetRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 WorkflowAttributesGetRequest::WorkflowAttributesGetRequest() @@ -1333,6 +1411,10 @@ WorkflowAttributesGetRequest::WorkflowAttributesGetRequest(const WorkflowAttribu if (from.workflow().size() > 0) { workflow_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.workflow_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.admin.WorkflowAttributesGetRequest) } @@ -1343,6 +1425,7 @@ void WorkflowAttributesGetRequest::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -1355,6 +1438,7 @@ void WorkflowAttributesGetRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void WorkflowAttributesGetRequest::SetCachedSize(int size) const { @@ -1375,6 +1459,7 @@ void WorkflowAttributesGetRequest::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -1448,6 +1533,22 @@ const char* WorkflowAttributesGetRequest::_InternalParse(const char* begin, cons GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.WorkflowAttributesGetRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1541,6 +1642,21 @@ bool WorkflowAttributesGetRequest::MergePartialFromCodedStream( break; } + // string org = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.WorkflowAttributesGetRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1604,6 +1720,16 @@ void WorkflowAttributesGetRequest::SerializeWithCachedSizes( 4, this->resource_type(), output); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.WorkflowAttributesGetRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 5, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1656,6 +1782,17 @@ ::google::protobuf::uint8* WorkflowAttributesGetRequest::InternalSerializeWithCa 4, this->resource_type(), target); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.WorkflowAttributesGetRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 5, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1698,6 +1835,13 @@ size_t WorkflowAttributesGetRequest::ByteSizeLong() const { this->workflow()); } + // string org = 5; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchableResource resource_type = 4; if (this->resource_type() != 0) { total_size += 1 + @@ -1743,6 +1887,10 @@ void WorkflowAttributesGetRequest::MergeFrom(const WorkflowAttributesGetRequest& workflow_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.workflow_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -1779,6 +1927,8 @@ void WorkflowAttributesGetRequest::InternalSwap(WorkflowAttributesGetRequest* ot GetArenaNoVirtual()); workflow_.Swap(&other->workflow_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } @@ -2088,6 +2238,7 @@ const int WorkflowAttributesDeleteRequest::kProjectFieldNumber; const int WorkflowAttributesDeleteRequest::kDomainFieldNumber; const int WorkflowAttributesDeleteRequest::kWorkflowFieldNumber; const int WorkflowAttributesDeleteRequest::kResourceTypeFieldNumber; +const int WorkflowAttributesDeleteRequest::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 WorkflowAttributesDeleteRequest::WorkflowAttributesDeleteRequest() @@ -2111,6 +2262,10 @@ WorkflowAttributesDeleteRequest::WorkflowAttributesDeleteRequest(const WorkflowA if (from.workflow().size() > 0) { workflow_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.workflow_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.admin.WorkflowAttributesDeleteRequest) } @@ -2121,6 +2276,7 @@ void WorkflowAttributesDeleteRequest::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -2133,6 +2289,7 @@ void WorkflowAttributesDeleteRequest::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void WorkflowAttributesDeleteRequest::SetCachedSize(int size) const { @@ -2153,6 +2310,7 @@ void WorkflowAttributesDeleteRequest::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); workflow_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -2226,6 +2384,22 @@ const char* WorkflowAttributesDeleteRequest::_InternalParse(const char* begin, c GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); break; } + // string org = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.admin.WorkflowAttributesDeleteRequest.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -2319,6 +2493,21 @@ bool WorkflowAttributesDeleteRequest::MergePartialFromCodedStream( break; } + // string org = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.admin.WorkflowAttributesDeleteRequest.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -2382,6 +2571,16 @@ void WorkflowAttributesDeleteRequest::SerializeWithCachedSizes( 4, this->resource_type(), output); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.WorkflowAttributesDeleteRequest.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 5, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -2434,6 +2633,17 @@ ::google::protobuf::uint8* WorkflowAttributesDeleteRequest::InternalSerializeWit 4, this->resource_type(), target); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.admin.WorkflowAttributesDeleteRequest.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 5, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -2476,6 +2686,13 @@ size_t WorkflowAttributesDeleteRequest::ByteSizeLong() const { this->workflow()); } + // string org = 5; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.admin.MatchableResource resource_type = 4; if (this->resource_type() != 0) { total_size += 1 + @@ -2521,6 +2738,10 @@ void WorkflowAttributesDeleteRequest::MergeFrom(const WorkflowAttributesDeleteRe workflow_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.workflow_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -2557,6 +2778,8 @@ void WorkflowAttributesDeleteRequest::InternalSwap(WorkflowAttributesDeleteReque GetArenaNoVirtual()); workflow_.Swap(&other->workflow_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } diff --git a/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.h b/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.h index f6e12acc87..52d266d94b 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/admin/workflow_attributes.pb.h @@ -227,6 +227,20 @@ class WorkflowAttributes final : ::std::string* release_workflow(); void set_allocated_workflow(::std::string* workflow); + // string org = 5; + void clear_org(); + static const int kOrgFieldNumber = 5; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchingAttributes matching_attributes = 4; bool has_matching_attributes() const; void clear_matching_attributes(); @@ -244,6 +258,7 @@ class WorkflowAttributes final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr workflow_; + ::google::protobuf::internal::ArenaStringPtr org_; ::flyteidl::admin::MatchingAttributes* matching_attributes_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto; @@ -607,6 +622,20 @@ class WorkflowAttributesGetRequest final : ::std::string* release_workflow(); void set_allocated_workflow(::std::string* workflow); + // string org = 5; + void clear_org(); + static const int kOrgFieldNumber = 5; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchableResource resource_type = 4; void clear_resource_type(); static const int kResourceTypeFieldNumber = 4; @@ -621,6 +650,7 @@ class WorkflowAttributesGetRequest final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr workflow_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto; @@ -879,6 +909,20 @@ class WorkflowAttributesDeleteRequest final : ::std::string* release_workflow(); void set_allocated_workflow(::std::string* workflow); + // string org = 5; + void clear_org(); + static const int kOrgFieldNumber = 5; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.admin.MatchableResource resource_type = 4; void clear_resource_type(); static const int kResourceTypeFieldNumber = 4; @@ -893,6 +937,7 @@ class WorkflowAttributesDeleteRequest final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr workflow_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fadmin_2fworkflow_5fattributes_2eproto; @@ -1217,6 +1262,59 @@ inline void WorkflowAttributes::set_allocated_matching_attributes(::flyteidl::ad // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.WorkflowAttributes.matching_attributes) } +// string org = 5; +inline void WorkflowAttributes::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& WorkflowAttributes::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.WorkflowAttributes.org) + return org_.GetNoArena(); +} +inline void WorkflowAttributes::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.WorkflowAttributes.org) +} +#if LANG_CXX11 +inline void WorkflowAttributes::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.WorkflowAttributes.org) +} +#endif +inline void WorkflowAttributes::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.WorkflowAttributes.org) +} +inline void WorkflowAttributes::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.WorkflowAttributes.org) +} +inline ::std::string* WorkflowAttributes::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.WorkflowAttributes.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* WorkflowAttributes::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.WorkflowAttributes.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void WorkflowAttributes::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.WorkflowAttributes.org) +} + // ------------------------------------------------------------------- // WorkflowAttributesUpdateRequest @@ -1453,6 +1551,59 @@ inline void WorkflowAttributesGetRequest::set_resource_type(::flyteidl::admin::M // @@protoc_insertion_point(field_set:flyteidl.admin.WorkflowAttributesGetRequest.resource_type) } +// string org = 5; +inline void WorkflowAttributesGetRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& WorkflowAttributesGetRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.WorkflowAttributesGetRequest.org) + return org_.GetNoArena(); +} +inline void WorkflowAttributesGetRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.WorkflowAttributesGetRequest.org) +} +#if LANG_CXX11 +inline void WorkflowAttributesGetRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.WorkflowAttributesGetRequest.org) +} +#endif +inline void WorkflowAttributesGetRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.WorkflowAttributesGetRequest.org) +} +inline void WorkflowAttributesGetRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.WorkflowAttributesGetRequest.org) +} +inline ::std::string* WorkflowAttributesGetRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.WorkflowAttributesGetRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* WorkflowAttributesGetRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.WorkflowAttributesGetRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void WorkflowAttributesGetRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.WorkflowAttributesGetRequest.org) +} + // ------------------------------------------------------------------- // WorkflowAttributesGetResponse @@ -1685,6 +1836,59 @@ inline void WorkflowAttributesDeleteRequest::set_resource_type(::flyteidl::admin // @@protoc_insertion_point(field_set:flyteidl.admin.WorkflowAttributesDeleteRequest.resource_type) } +// string org = 5; +inline void WorkflowAttributesDeleteRequest::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& WorkflowAttributesDeleteRequest::org() const { + // @@protoc_insertion_point(field_get:flyteidl.admin.WorkflowAttributesDeleteRequest.org) + return org_.GetNoArena(); +} +inline void WorkflowAttributesDeleteRequest::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.admin.WorkflowAttributesDeleteRequest.org) +} +#if LANG_CXX11 +inline void WorkflowAttributesDeleteRequest::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.admin.WorkflowAttributesDeleteRequest.org) +} +#endif +inline void WorkflowAttributesDeleteRequest::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.admin.WorkflowAttributesDeleteRequest.org) +} +inline void WorkflowAttributesDeleteRequest::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.admin.WorkflowAttributesDeleteRequest.org) +} +inline ::std::string* WorkflowAttributesDeleteRequest::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.admin.WorkflowAttributesDeleteRequest.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* WorkflowAttributesDeleteRequest::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.admin.WorkflowAttributesDeleteRequest.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void WorkflowAttributesDeleteRequest::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.admin.WorkflowAttributesDeleteRequest.org) +} + // ------------------------------------------------------------------- // WorkflowAttributesDeleteResponse diff --git a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.grpc.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.grpc.pb.cc deleted file mode 100644 index bcf5f3f71a..0000000000 --- a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.grpc.pb.cc +++ /dev/null @@ -1,463 +0,0 @@ -// Generated by the gRPC C++ plugin. -// If you make any local change, they will be lost. -// source: flyteidl/artifact/artifacts.proto - -#include "flyteidl/artifact/artifacts.pb.h" -#include "flyteidl/artifact/artifacts.grpc.pb.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -namespace flyteidl { -namespace artifact { - -static const char* ArtifactRegistry_method_names[] = { - "/flyteidl.artifact.ArtifactRegistry/CreateArtifact", - "/flyteidl.artifact.ArtifactRegistry/GetArtifact", - "/flyteidl.artifact.ArtifactRegistry/SearchArtifacts", - "/flyteidl.artifact.ArtifactRegistry/CreateTrigger", - "/flyteidl.artifact.ArtifactRegistry/DeleteTrigger", - "/flyteidl.artifact.ArtifactRegistry/AddTag", - "/flyteidl.artifact.ArtifactRegistry/RegisterProducer", - "/flyteidl.artifact.ArtifactRegistry/RegisterConsumer", - "/flyteidl.artifact.ArtifactRegistry/SetExecutionInputs", - "/flyteidl.artifact.ArtifactRegistry/FindByWorkflowExec", -}; - -std::unique_ptr< ArtifactRegistry::Stub> ArtifactRegistry::NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options) { - (void)options; - std::unique_ptr< ArtifactRegistry::Stub> stub(new ArtifactRegistry::Stub(channel)); - return stub; -} - -ArtifactRegistry::Stub::Stub(const std::shared_ptr< ::grpc::ChannelInterface>& channel) - : channel_(channel), rpcmethod_CreateArtifact_(ArtifactRegistry_method_names[0], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetArtifact_(ArtifactRegistry_method_names[1], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_SearchArtifacts_(ArtifactRegistry_method_names[2], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_CreateTrigger_(ArtifactRegistry_method_names[3], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_DeleteTrigger_(ArtifactRegistry_method_names[4], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_AddTag_(ArtifactRegistry_method_names[5], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_RegisterProducer_(ArtifactRegistry_method_names[6], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_RegisterConsumer_(ArtifactRegistry_method_names[7], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_SetExecutionInputs_(ArtifactRegistry_method_names[8], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_FindByWorkflowExec_(ArtifactRegistry_method_names[9], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - {} - -::grpc::Status ArtifactRegistry::Stub::CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::flyteidl::artifact::CreateArtifactResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_CreateArtifact_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_CreateArtifact_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::CreateArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateArtifactResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_CreateArtifact_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_CreateArtifact_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::CreateArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_CreateArtifact_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>* ArtifactRegistry::Stub::AsyncCreateArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::CreateArtifactResponse>::Create(channel_.get(), cq, rpcmethod_CreateArtifact_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>* ArtifactRegistry::Stub::PrepareAsyncCreateArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::CreateArtifactResponse>::Create(channel_.get(), cq, rpcmethod_CreateArtifact_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::flyteidl::artifact::GetArtifactResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_GetArtifact_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetArtifact_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::GetArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::GetArtifactResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetArtifact_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetArtifact_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::GetArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::GetArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetArtifact_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>* ArtifactRegistry::Stub::AsyncGetArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::GetArtifactResponse>::Create(channel_.get(), cq, rpcmethod_GetArtifact_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>* ArtifactRegistry::Stub::PrepareAsyncGetArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::GetArtifactResponse>::Create(channel_.get(), cq, rpcmethod_GetArtifact_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::flyteidl::artifact::SearchArtifactsResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_SearchArtifacts_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_SearchArtifacts_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::SearchArtifacts(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_SearchArtifacts_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_SearchArtifacts_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::SearchArtifacts(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_SearchArtifacts_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* ArtifactRegistry::Stub::AsyncSearchArtifactsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::SearchArtifactsResponse>::Create(channel_.get(), cq, rpcmethod_SearchArtifacts_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* ArtifactRegistry::Stub::PrepareAsyncSearchArtifactsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::SearchArtifactsResponse>::Create(channel_.get(), cq, rpcmethod_SearchArtifacts_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::flyteidl::artifact::CreateTriggerResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_CreateTrigger_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_CreateTrigger_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::CreateTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateTriggerResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_CreateTrigger_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_CreateTrigger_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::CreateTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_CreateTrigger_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>* ArtifactRegistry::Stub::AsyncCreateTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::CreateTriggerResponse>::Create(channel_.get(), cq, rpcmethod_CreateTrigger_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>* ArtifactRegistry::Stub::PrepareAsyncCreateTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::CreateTriggerResponse>::Create(channel_.get(), cq, rpcmethod_CreateTrigger_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::flyteidl::artifact::DeleteTriggerResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_DeleteTrigger_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_DeleteTrigger_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::DeleteTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::DeleteTriggerResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_DeleteTrigger_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_DeleteTrigger_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::DeleteTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::DeleteTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_DeleteTrigger_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>* ArtifactRegistry::Stub::AsyncDeleteTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::DeleteTriggerResponse>::Create(channel_.get(), cq, rpcmethod_DeleteTrigger_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>* ArtifactRegistry::Stub::PrepareAsyncDeleteTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::DeleteTriggerResponse>::Create(channel_.get(), cq, rpcmethod_DeleteTrigger_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::flyteidl::artifact::AddTagResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_AddTag_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_AddTag_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::AddTag(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::AddTagResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_AddTag_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_AddTag_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::AddTag(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::AddTagResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_AddTag_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>* ArtifactRegistry::Stub::AsyncAddTagRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::AddTagResponse>::Create(channel_.get(), cq, rpcmethod_AddTag_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>* ArtifactRegistry::Stub::PrepareAsyncAddTagRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::AddTagResponse>::Create(channel_.get(), cq, rpcmethod_AddTag_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::flyteidl::artifact::RegisterResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_RegisterProducer_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_RegisterProducer_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterProducer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_RegisterProducer_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_RegisterProducer_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterProducer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_RegisterProducer_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* ArtifactRegistry::Stub::AsyncRegisterProducerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::RegisterResponse>::Create(channel_.get(), cq, rpcmethod_RegisterProducer_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* ArtifactRegistry::Stub::PrepareAsyncRegisterProducerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::RegisterResponse>::Create(channel_.get(), cq, rpcmethod_RegisterProducer_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::flyteidl::artifact::RegisterResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_RegisterConsumer_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_RegisterConsumer_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterConsumer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_RegisterConsumer_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_RegisterConsumer_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::RegisterConsumer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_RegisterConsumer_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* ArtifactRegistry::Stub::AsyncRegisterConsumerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::RegisterResponse>::Create(channel_.get(), cq, rpcmethod_RegisterConsumer_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* ArtifactRegistry::Stub::PrepareAsyncRegisterConsumerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::RegisterResponse>::Create(channel_.get(), cq, rpcmethod_RegisterConsumer_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::flyteidl::artifact::ExecutionInputsResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_SetExecutionInputs_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_SetExecutionInputs_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::SetExecutionInputs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::ExecutionInputsResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_SetExecutionInputs_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_SetExecutionInputs_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::SetExecutionInputs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::ExecutionInputsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_SetExecutionInputs_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>* ArtifactRegistry::Stub::AsyncSetExecutionInputsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::ExecutionInputsResponse>::Create(channel_.get(), cq, rpcmethod_SetExecutionInputs_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>* ArtifactRegistry::Stub::PrepareAsyncSetExecutionInputsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::ExecutionInputsResponse>::Create(channel_.get(), cq, rpcmethod_SetExecutionInputs_, context, request, false); -} - -::grpc::Status ArtifactRegistry::Stub::FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::flyteidl::artifact::SearchArtifactsResponse* response) { - return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_FindByWorkflowExec_, context, request, response); -} - -void ArtifactRegistry::Stub::experimental_async::FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_FindByWorkflowExec_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::FindByWorkflowExec(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function f) { - ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_FindByWorkflowExec_, context, request, response, std::move(f)); -} - -void ArtifactRegistry::Stub::experimental_async::FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_FindByWorkflowExec_, context, request, response, reactor); -} - -void ArtifactRegistry::Stub::experimental_async::FindByWorkflowExec(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { - ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_FindByWorkflowExec_, context, request, response, reactor); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* ArtifactRegistry::Stub::AsyncFindByWorkflowExecRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::SearchArtifactsResponse>::Create(channel_.get(), cq, rpcmethod_FindByWorkflowExec_, context, request, true); -} - -::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* ArtifactRegistry::Stub::PrepareAsyncFindByWorkflowExecRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) { - return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::artifact::SearchArtifactsResponse>::Create(channel_.get(), cq, rpcmethod_FindByWorkflowExec_, context, request, false); -} - -ArtifactRegistry::Service::Service() { - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[0], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::CreateArtifactRequest, ::flyteidl::artifact::CreateArtifactResponse>( - std::mem_fn(&ArtifactRegistry::Service::CreateArtifact), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[1], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::GetArtifactRequest, ::flyteidl::artifact::GetArtifactResponse>( - std::mem_fn(&ArtifactRegistry::Service::GetArtifact), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[2], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::SearchArtifactsRequest, ::flyteidl::artifact::SearchArtifactsResponse>( - std::mem_fn(&ArtifactRegistry::Service::SearchArtifacts), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[3], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::CreateTriggerRequest, ::flyteidl::artifact::CreateTriggerResponse>( - std::mem_fn(&ArtifactRegistry::Service::CreateTrigger), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[4], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::DeleteTriggerRequest, ::flyteidl::artifact::DeleteTriggerResponse>( - std::mem_fn(&ArtifactRegistry::Service::DeleteTrigger), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[5], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::AddTagRequest, ::flyteidl::artifact::AddTagResponse>( - std::mem_fn(&ArtifactRegistry::Service::AddTag), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[6], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::RegisterProducerRequest, ::flyteidl::artifact::RegisterResponse>( - std::mem_fn(&ArtifactRegistry::Service::RegisterProducer), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[7], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::RegisterConsumerRequest, ::flyteidl::artifact::RegisterResponse>( - std::mem_fn(&ArtifactRegistry::Service::RegisterConsumer), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[8], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::ExecutionInputsRequest, ::flyteidl::artifact::ExecutionInputsResponse>( - std::mem_fn(&ArtifactRegistry::Service::SetExecutionInputs), this))); - AddMethod(new ::grpc::internal::RpcServiceMethod( - ArtifactRegistry_method_names[9], - ::grpc::internal::RpcMethod::NORMAL_RPC, - new ::grpc::internal::RpcMethodHandler< ArtifactRegistry::Service, ::flyteidl::artifact::FindByWorkflowExecRequest, ::flyteidl::artifact::SearchArtifactsResponse>( - std::mem_fn(&ArtifactRegistry::Service::FindByWorkflowExec), this))); -} - -ArtifactRegistry::Service::~Service() { -} - -::grpc::Status ArtifactRegistry::Service::CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - -::grpc::Status ArtifactRegistry::Service::FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) { - (void) context; - (void) request; - (void) response; - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); -} - - -} // namespace flyteidl -} // namespace artifact - diff --git a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.grpc.pb.h b/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.grpc.pb.h deleted file mode 100644 index 1fbe28eb23..0000000000 --- a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.grpc.pb.h +++ /dev/null @@ -1,1704 +0,0 @@ -// Generated by the gRPC C++ plugin. -// If you make any local change, they will be lost. -// source: flyteidl/artifact/artifacts.proto -#ifndef GRPC_flyteidl_2fartifact_2fartifacts_2eproto__INCLUDED -#define GRPC_flyteidl_2fartifact_2fartifacts_2eproto__INCLUDED - -#include "flyteidl/artifact/artifacts.pb.h" - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace grpc_impl { -class Channel; -class CompletionQueue; -class ServerCompletionQueue; -} // namespace grpc_impl - -namespace grpc { -namespace experimental { -template -class MessageAllocator; -} // namespace experimental -} // namespace grpc_impl - -namespace grpc { -class ServerContext; -} // namespace grpc - -namespace flyteidl { -namespace artifact { - -class ArtifactRegistry final { - public: - static constexpr char const* service_full_name() { - return "flyteidl.artifact.ArtifactRegistry"; - } - class StubInterface { - public: - virtual ~StubInterface() {} - virtual ::grpc::Status CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::flyteidl::artifact::CreateArtifactResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateArtifactResponse>> AsyncCreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateArtifactResponse>>(AsyncCreateArtifactRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateArtifactResponse>> PrepareAsyncCreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateArtifactResponse>>(PrepareAsyncCreateArtifactRaw(context, request, cq)); - } - virtual ::grpc::Status GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::flyteidl::artifact::GetArtifactResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::GetArtifactResponse>> AsyncGetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::GetArtifactResponse>>(AsyncGetArtifactRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::GetArtifactResponse>> PrepareAsyncGetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::GetArtifactResponse>>(PrepareAsyncGetArtifactRaw(context, request, cq)); - } - virtual ::grpc::Status SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::flyteidl::artifact::SearchArtifactsResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>> AsyncSearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>>(AsyncSearchArtifactsRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>> PrepareAsyncSearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>>(PrepareAsyncSearchArtifactsRaw(context, request, cq)); - } - virtual ::grpc::Status CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::flyteidl::artifact::CreateTriggerResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateTriggerResponse>> AsyncCreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateTriggerResponse>>(AsyncCreateTriggerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateTriggerResponse>> PrepareAsyncCreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateTriggerResponse>>(PrepareAsyncCreateTriggerRaw(context, request, cq)); - } - virtual ::grpc::Status DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::flyteidl::artifact::DeleteTriggerResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::DeleteTriggerResponse>> AsyncDeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::DeleteTriggerResponse>>(AsyncDeleteTriggerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::DeleteTriggerResponse>> PrepareAsyncDeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::DeleteTriggerResponse>>(PrepareAsyncDeleteTriggerRaw(context, request, cq)); - } - virtual ::grpc::Status AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::flyteidl::artifact::AddTagResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::AddTagResponse>> AsyncAddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::AddTagResponse>>(AsyncAddTagRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::AddTagResponse>> PrepareAsyncAddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::AddTagResponse>>(PrepareAsyncAddTagRaw(context, request, cq)); - } - virtual ::grpc::Status RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::flyteidl::artifact::RegisterResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>> AsyncRegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>>(AsyncRegisterProducerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>> PrepareAsyncRegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>>(PrepareAsyncRegisterProducerRaw(context, request, cq)); - } - virtual ::grpc::Status RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::flyteidl::artifact::RegisterResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>> AsyncRegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>>(AsyncRegisterConsumerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>> PrepareAsyncRegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>>(PrepareAsyncRegisterConsumerRaw(context, request, cq)); - } - virtual ::grpc::Status SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::flyteidl::artifact::ExecutionInputsResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::ExecutionInputsResponse>> AsyncSetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::ExecutionInputsResponse>>(AsyncSetExecutionInputsRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::ExecutionInputsResponse>> PrepareAsyncSetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::ExecutionInputsResponse>>(PrepareAsyncSetExecutionInputsRaw(context, request, cq)); - } - virtual ::grpc::Status FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::flyteidl::artifact::SearchArtifactsResponse* response) = 0; - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>> AsyncFindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>>(AsyncFindByWorkflowExecRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>> PrepareAsyncFindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>>(PrepareAsyncFindByWorkflowExecRaw(context, request, cq)); - } - class experimental_async_interface { - public: - virtual ~experimental_async_interface() {} - virtual void CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response, std::function) = 0; - virtual void CreateArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateArtifactResponse* response, std::function) = 0; - virtual void CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void CreateArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response, std::function) = 0; - virtual void GetArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::GetArtifactResponse* response, std::function) = 0; - virtual void GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void GetArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::GetArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) = 0; - virtual void SearchArtifacts(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) = 0; - virtual void SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void SearchArtifacts(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response, std::function) = 0; - virtual void CreateTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateTriggerResponse* response, std::function) = 0; - virtual void CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void CreateTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response, std::function) = 0; - virtual void DeleteTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::DeleteTriggerResponse* response, std::function) = 0; - virtual void DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void DeleteTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::DeleteTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response, std::function) = 0; - virtual void AddTag(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::AddTagResponse* response, std::function) = 0; - virtual void AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void AddTag(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::AddTagResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response, std::function) = 0; - virtual void RegisterProducer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, std::function) = 0; - virtual void RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void RegisterProducer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response, std::function) = 0; - virtual void RegisterConsumer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, std::function) = 0; - virtual void RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void RegisterConsumer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response, std::function) = 0; - virtual void SetExecutionInputs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::ExecutionInputsResponse* response, std::function) = 0; - virtual void SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void SetExecutionInputs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::ExecutionInputsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) = 0; - virtual void FindByWorkflowExec(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) = 0; - virtual void FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - virtual void FindByWorkflowExec(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - }; - virtual class experimental_async_interface* experimental_async() { return nullptr; } - private: - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateArtifactResponse>* AsyncCreateArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateArtifactResponse>* PrepareAsyncCreateArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::GetArtifactResponse>* AsyncGetArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::GetArtifactResponse>* PrepareAsyncGetArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>* AsyncSearchArtifactsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>* PrepareAsyncSearchArtifactsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateTriggerResponse>* AsyncCreateTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::CreateTriggerResponse>* PrepareAsyncCreateTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::DeleteTriggerResponse>* AsyncDeleteTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::DeleteTriggerResponse>* PrepareAsyncDeleteTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::AddTagResponse>* AsyncAddTagRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::AddTagResponse>* PrepareAsyncAddTagRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>* AsyncRegisterProducerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>* PrepareAsyncRegisterProducerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>* AsyncRegisterConsumerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::RegisterResponse>* PrepareAsyncRegisterConsumerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::ExecutionInputsResponse>* AsyncSetExecutionInputsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::ExecutionInputsResponse>* PrepareAsyncSetExecutionInputsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>* AsyncFindByWorkflowExecRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) = 0; - virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::artifact::SearchArtifactsResponse>* PrepareAsyncFindByWorkflowExecRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) = 0; - }; - class Stub final : public StubInterface { - public: - Stub(const std::shared_ptr< ::grpc::ChannelInterface>& channel); - ::grpc::Status CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::flyteidl::artifact::CreateArtifactResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>> AsyncCreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>>(AsyncCreateArtifactRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>> PrepareAsyncCreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>>(PrepareAsyncCreateArtifactRaw(context, request, cq)); - } - ::grpc::Status GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::flyteidl::artifact::GetArtifactResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>> AsyncGetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>>(AsyncGetArtifactRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>> PrepareAsyncGetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>>(PrepareAsyncGetArtifactRaw(context, request, cq)); - } - ::grpc::Status SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::flyteidl::artifact::SearchArtifactsResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>> AsyncSearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>>(AsyncSearchArtifactsRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>> PrepareAsyncSearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>>(PrepareAsyncSearchArtifactsRaw(context, request, cq)); - } - ::grpc::Status CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::flyteidl::artifact::CreateTriggerResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>> AsyncCreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>>(AsyncCreateTriggerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>> PrepareAsyncCreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>>(PrepareAsyncCreateTriggerRaw(context, request, cq)); - } - ::grpc::Status DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::flyteidl::artifact::DeleteTriggerResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>> AsyncDeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>>(AsyncDeleteTriggerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>> PrepareAsyncDeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>>(PrepareAsyncDeleteTriggerRaw(context, request, cq)); - } - ::grpc::Status AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::flyteidl::artifact::AddTagResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>> AsyncAddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>>(AsyncAddTagRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>> PrepareAsyncAddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>>(PrepareAsyncAddTagRaw(context, request, cq)); - } - ::grpc::Status RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::flyteidl::artifact::RegisterResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>> AsyncRegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>>(AsyncRegisterProducerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>> PrepareAsyncRegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>>(PrepareAsyncRegisterProducerRaw(context, request, cq)); - } - ::grpc::Status RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::flyteidl::artifact::RegisterResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>> AsyncRegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>>(AsyncRegisterConsumerRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>> PrepareAsyncRegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>>(PrepareAsyncRegisterConsumerRaw(context, request, cq)); - } - ::grpc::Status SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::flyteidl::artifact::ExecutionInputsResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>> AsyncSetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>>(AsyncSetExecutionInputsRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>> PrepareAsyncSetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>>(PrepareAsyncSetExecutionInputsRaw(context, request, cq)); - } - ::grpc::Status FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::flyteidl::artifact::SearchArtifactsResponse* response) override; - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>> AsyncFindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>>(AsyncFindByWorkflowExecRaw(context, request, cq)); - } - std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>> PrepareAsyncFindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) { - return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>>(PrepareAsyncFindByWorkflowExecRaw(context, request, cq)); - } - class experimental_async final : - public StubInterface::experimental_async_interface { - public: - void CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response, std::function) override; - void CreateArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateArtifactResponse* response, std::function) override; - void CreateArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void CreateArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response, std::function) override; - void GetArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::GetArtifactResponse* response, std::function) override; - void GetArtifact(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void GetArtifact(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::GetArtifactResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) override; - void SearchArtifacts(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) override; - void SearchArtifacts(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void SearchArtifacts(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response, std::function) override; - void CreateTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateTriggerResponse* response, std::function) override; - void CreateTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void CreateTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::CreateTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response, std::function) override; - void DeleteTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::DeleteTriggerResponse* response, std::function) override; - void DeleteTrigger(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void DeleteTrigger(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::DeleteTriggerResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response, std::function) override; - void AddTag(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::AddTagResponse* response, std::function) override; - void AddTag(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void AddTag(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::AddTagResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response, std::function) override; - void RegisterProducer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, std::function) override; - void RegisterProducer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void RegisterProducer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response, std::function) override; - void RegisterConsumer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, std::function) override; - void RegisterConsumer(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void RegisterConsumer(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response, std::function) override; - void SetExecutionInputs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::ExecutionInputsResponse* response, std::function) override; - void SetExecutionInputs(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void SetExecutionInputs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::ExecutionInputsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) override; - void FindByWorkflowExec(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, std::function) override; - void FindByWorkflowExec(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - void FindByWorkflowExec(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; - private: - friend class Stub; - explicit experimental_async(Stub* stub): stub_(stub) { } - Stub* stub() { return stub_; } - Stub* stub_; - }; - class experimental_async_interface* experimental_async() override { return &async_stub_; } - - private: - std::shared_ptr< ::grpc::ChannelInterface> channel_; - class experimental_async async_stub_{this}; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>* AsyncCreateArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateArtifactResponse>* PrepareAsyncCreateArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateArtifactRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>* AsyncGetArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::GetArtifactResponse>* PrepareAsyncGetArtifactRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::GetArtifactRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* AsyncSearchArtifactsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* PrepareAsyncSearchArtifactsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::SearchArtifactsRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>* AsyncCreateTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::CreateTriggerResponse>* PrepareAsyncCreateTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::CreateTriggerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>* AsyncDeleteTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::DeleteTriggerResponse>* PrepareAsyncDeleteTriggerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::DeleteTriggerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>* AsyncAddTagRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::AddTagResponse>* PrepareAsyncAddTagRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::AddTagRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* AsyncRegisterProducerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* PrepareAsyncRegisterProducerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterProducerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* AsyncRegisterConsumerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::RegisterResponse>* PrepareAsyncRegisterConsumerRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::RegisterConsumerRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>* AsyncSetExecutionInputsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::ExecutionInputsResponse>* PrepareAsyncSetExecutionInputsRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::ExecutionInputsRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* AsyncFindByWorkflowExecRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) override; - ::grpc::ClientAsyncResponseReader< ::flyteidl::artifact::SearchArtifactsResponse>* PrepareAsyncFindByWorkflowExecRaw(::grpc::ClientContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest& request, ::grpc::CompletionQueue* cq) override; - const ::grpc::internal::RpcMethod rpcmethod_CreateArtifact_; - const ::grpc::internal::RpcMethod rpcmethod_GetArtifact_; - const ::grpc::internal::RpcMethod rpcmethod_SearchArtifacts_; - const ::grpc::internal::RpcMethod rpcmethod_CreateTrigger_; - const ::grpc::internal::RpcMethod rpcmethod_DeleteTrigger_; - const ::grpc::internal::RpcMethod rpcmethod_AddTag_; - const ::grpc::internal::RpcMethod rpcmethod_RegisterProducer_; - const ::grpc::internal::RpcMethod rpcmethod_RegisterConsumer_; - const ::grpc::internal::RpcMethod rpcmethod_SetExecutionInputs_; - const ::grpc::internal::RpcMethod rpcmethod_FindByWorkflowExec_; - }; - static std::unique_ptr NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options = ::grpc::StubOptions()); - - class Service : public ::grpc::Service { - public: - Service(); - virtual ~Service(); - virtual ::grpc::Status CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response); - virtual ::grpc::Status GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response); - virtual ::grpc::Status SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response); - virtual ::grpc::Status CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response); - virtual ::grpc::Status DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response); - virtual ::grpc::Status AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response); - virtual ::grpc::Status RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response); - virtual ::grpc::Status RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response); - virtual ::grpc::Status SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response); - virtual ::grpc::Status FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response); - }; - template - class WithAsyncMethod_CreateArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_CreateArtifact() { - ::grpc::Service::MarkMethodAsync(0); - } - ~WithAsyncMethod_CreateArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestCreateArtifact(::grpc::ServerContext* context, ::flyteidl::artifact::CreateArtifactRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::CreateArtifactResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(0, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_GetArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_GetArtifact() { - ::grpc::Service::MarkMethodAsync(1); - } - ~WithAsyncMethod_GetArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestGetArtifact(::grpc::ServerContext* context, ::flyteidl::artifact::GetArtifactRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::GetArtifactResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(1, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_SearchArtifacts : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_SearchArtifacts() { - ::grpc::Service::MarkMethodAsync(2); - } - ~WithAsyncMethod_SearchArtifacts() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestSearchArtifacts(::grpc::ServerContext* context, ::flyteidl::artifact::SearchArtifactsRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::SearchArtifactsResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(2, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_CreateTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_CreateTrigger() { - ::grpc::Service::MarkMethodAsync(3); - } - ~WithAsyncMethod_CreateTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestCreateTrigger(::grpc::ServerContext* context, ::flyteidl::artifact::CreateTriggerRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::CreateTriggerResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(3, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_DeleteTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_DeleteTrigger() { - ::grpc::Service::MarkMethodAsync(4); - } - ~WithAsyncMethod_DeleteTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestDeleteTrigger(::grpc::ServerContext* context, ::flyteidl::artifact::DeleteTriggerRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::DeleteTriggerResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(4, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_AddTag : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_AddTag() { - ::grpc::Service::MarkMethodAsync(5); - } - ~WithAsyncMethod_AddTag() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestAddTag(::grpc::ServerContext* context, ::flyteidl::artifact::AddTagRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::AddTagResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(5, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_RegisterProducer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_RegisterProducer() { - ::grpc::Service::MarkMethodAsync(6); - } - ~WithAsyncMethod_RegisterProducer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestRegisterProducer(::grpc::ServerContext* context, ::flyteidl::artifact::RegisterProducerRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::RegisterResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(6, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_RegisterConsumer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_RegisterConsumer() { - ::grpc::Service::MarkMethodAsync(7); - } - ~WithAsyncMethod_RegisterConsumer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestRegisterConsumer(::grpc::ServerContext* context, ::flyteidl::artifact::RegisterConsumerRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::RegisterResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(7, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_SetExecutionInputs : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_SetExecutionInputs() { - ::grpc::Service::MarkMethodAsync(8); - } - ~WithAsyncMethod_SetExecutionInputs() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestSetExecutionInputs(::grpc::ServerContext* context, ::flyteidl::artifact::ExecutionInputsRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::ExecutionInputsResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(8, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithAsyncMethod_FindByWorkflowExec : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithAsyncMethod_FindByWorkflowExec() { - ::grpc::Service::MarkMethodAsync(9); - } - ~WithAsyncMethod_FindByWorkflowExec() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestFindByWorkflowExec(::grpc::ServerContext* context, ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::artifact::SearchArtifactsResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(9, context, request, response, new_call_cq, notification_cq, tag); - } - }; - typedef WithAsyncMethod_CreateArtifact > > > > > > > > > AsyncService; - template - class ExperimentalWithCallbackMethod_CreateArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_CreateArtifact() { - ::grpc::Service::experimental().MarkMethodCallback(0, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::CreateArtifactRequest, ::flyteidl::artifact::CreateArtifactResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::CreateArtifactRequest* request, - ::flyteidl::artifact::CreateArtifactResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->CreateArtifact(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_CreateArtifact( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::CreateArtifactRequest, ::flyteidl::artifact::CreateArtifactResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::CreateArtifactRequest, ::flyteidl::artifact::CreateArtifactResponse>*>( - ::grpc::Service::experimental().GetHandler(0)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_CreateArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_GetArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_GetArtifact() { - ::grpc::Service::experimental().MarkMethodCallback(1, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::GetArtifactRequest, ::flyteidl::artifact::GetArtifactResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::GetArtifactRequest* request, - ::flyteidl::artifact::GetArtifactResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->GetArtifact(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_GetArtifact( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::GetArtifactRequest, ::flyteidl::artifact::GetArtifactResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::GetArtifactRequest, ::flyteidl::artifact::GetArtifactResponse>*>( - ::grpc::Service::experimental().GetHandler(1)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_GetArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_SearchArtifacts : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_SearchArtifacts() { - ::grpc::Service::experimental().MarkMethodCallback(2, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::SearchArtifactsRequest, ::flyteidl::artifact::SearchArtifactsResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::SearchArtifactsRequest* request, - ::flyteidl::artifact::SearchArtifactsResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->SearchArtifacts(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_SearchArtifacts( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::SearchArtifactsRequest, ::flyteidl::artifact::SearchArtifactsResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::SearchArtifactsRequest, ::flyteidl::artifact::SearchArtifactsResponse>*>( - ::grpc::Service::experimental().GetHandler(2)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_SearchArtifacts() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_CreateTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_CreateTrigger() { - ::grpc::Service::experimental().MarkMethodCallback(3, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::CreateTriggerRequest, ::flyteidl::artifact::CreateTriggerResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::CreateTriggerRequest* request, - ::flyteidl::artifact::CreateTriggerResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->CreateTrigger(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_CreateTrigger( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::CreateTriggerRequest, ::flyteidl::artifact::CreateTriggerResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::CreateTriggerRequest, ::flyteidl::artifact::CreateTriggerResponse>*>( - ::grpc::Service::experimental().GetHandler(3)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_CreateTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_DeleteTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_DeleteTrigger() { - ::grpc::Service::experimental().MarkMethodCallback(4, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::DeleteTriggerRequest, ::flyteidl::artifact::DeleteTriggerResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::DeleteTriggerRequest* request, - ::flyteidl::artifact::DeleteTriggerResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->DeleteTrigger(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_DeleteTrigger( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::DeleteTriggerRequest, ::flyteidl::artifact::DeleteTriggerResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::DeleteTriggerRequest, ::flyteidl::artifact::DeleteTriggerResponse>*>( - ::grpc::Service::experimental().GetHandler(4)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_DeleteTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_AddTag : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_AddTag() { - ::grpc::Service::experimental().MarkMethodCallback(5, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::AddTagRequest, ::flyteidl::artifact::AddTagResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::AddTagRequest* request, - ::flyteidl::artifact::AddTagResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->AddTag(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_AddTag( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::AddTagRequest, ::flyteidl::artifact::AddTagResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::AddTagRequest, ::flyteidl::artifact::AddTagResponse>*>( - ::grpc::Service::experimental().GetHandler(5)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_AddTag() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_RegisterProducer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_RegisterProducer() { - ::grpc::Service::experimental().MarkMethodCallback(6, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::RegisterProducerRequest, ::flyteidl::artifact::RegisterResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::RegisterProducerRequest* request, - ::flyteidl::artifact::RegisterResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->RegisterProducer(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_RegisterProducer( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::RegisterProducerRequest, ::flyteidl::artifact::RegisterResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::RegisterProducerRequest, ::flyteidl::artifact::RegisterResponse>*>( - ::grpc::Service::experimental().GetHandler(6)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_RegisterProducer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_RegisterConsumer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_RegisterConsumer() { - ::grpc::Service::experimental().MarkMethodCallback(7, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::RegisterConsumerRequest, ::flyteidl::artifact::RegisterResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::RegisterConsumerRequest* request, - ::flyteidl::artifact::RegisterResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->RegisterConsumer(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_RegisterConsumer( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::RegisterConsumerRequest, ::flyteidl::artifact::RegisterResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::RegisterConsumerRequest, ::flyteidl::artifact::RegisterResponse>*>( - ::grpc::Service::experimental().GetHandler(7)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_RegisterConsumer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_SetExecutionInputs : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_SetExecutionInputs() { - ::grpc::Service::experimental().MarkMethodCallback(8, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::ExecutionInputsRequest, ::flyteidl::artifact::ExecutionInputsResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::ExecutionInputsRequest* request, - ::flyteidl::artifact::ExecutionInputsResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->SetExecutionInputs(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_SetExecutionInputs( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::ExecutionInputsRequest, ::flyteidl::artifact::ExecutionInputsResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::ExecutionInputsRequest, ::flyteidl::artifact::ExecutionInputsResponse>*>( - ::grpc::Service::experimental().GetHandler(8)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_SetExecutionInputs() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithCallbackMethod_FindByWorkflowExec : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithCallbackMethod_FindByWorkflowExec() { - ::grpc::Service::experimental().MarkMethodCallback(9, - new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::FindByWorkflowExecRequest, ::flyteidl::artifact::SearchArtifactsResponse>( - [this](::grpc::ServerContext* context, - const ::flyteidl::artifact::FindByWorkflowExecRequest* request, - ::flyteidl::artifact::SearchArtifactsResponse* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - return this->FindByWorkflowExec(context, request, response, controller); - })); - } - void SetMessageAllocatorFor_FindByWorkflowExec( - ::grpc::experimental::MessageAllocator< ::flyteidl::artifact::FindByWorkflowExecRequest, ::flyteidl::artifact::SearchArtifactsResponse>* allocator) { - static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::artifact::FindByWorkflowExecRequest, ::flyteidl::artifact::SearchArtifactsResponse>*>( - ::grpc::Service::experimental().GetHandler(9)) - ->SetMessageAllocator(allocator); - } - ~ExperimentalWithCallbackMethod_FindByWorkflowExec() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - typedef ExperimentalWithCallbackMethod_CreateArtifact > > > > > > > > > ExperimentalCallbackService; - template - class WithGenericMethod_CreateArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_CreateArtifact() { - ::grpc::Service::MarkMethodGeneric(0); - } - ~WithGenericMethod_CreateArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_GetArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_GetArtifact() { - ::grpc::Service::MarkMethodGeneric(1); - } - ~WithGenericMethod_GetArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_SearchArtifacts : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_SearchArtifacts() { - ::grpc::Service::MarkMethodGeneric(2); - } - ~WithGenericMethod_SearchArtifacts() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_CreateTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_CreateTrigger() { - ::grpc::Service::MarkMethodGeneric(3); - } - ~WithGenericMethod_CreateTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_DeleteTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_DeleteTrigger() { - ::grpc::Service::MarkMethodGeneric(4); - } - ~WithGenericMethod_DeleteTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_AddTag : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_AddTag() { - ::grpc::Service::MarkMethodGeneric(5); - } - ~WithGenericMethod_AddTag() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_RegisterProducer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_RegisterProducer() { - ::grpc::Service::MarkMethodGeneric(6); - } - ~WithGenericMethod_RegisterProducer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_RegisterConsumer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_RegisterConsumer() { - ::grpc::Service::MarkMethodGeneric(7); - } - ~WithGenericMethod_RegisterConsumer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_SetExecutionInputs : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_SetExecutionInputs() { - ::grpc::Service::MarkMethodGeneric(8); - } - ~WithGenericMethod_SetExecutionInputs() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithGenericMethod_FindByWorkflowExec : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithGenericMethod_FindByWorkflowExec() { - ::grpc::Service::MarkMethodGeneric(9); - } - ~WithGenericMethod_FindByWorkflowExec() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - }; - template - class WithRawMethod_CreateArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_CreateArtifact() { - ::grpc::Service::MarkMethodRaw(0); - } - ~WithRawMethod_CreateArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestCreateArtifact(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(0, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_GetArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_GetArtifact() { - ::grpc::Service::MarkMethodRaw(1); - } - ~WithRawMethod_GetArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestGetArtifact(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(1, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_SearchArtifacts : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_SearchArtifacts() { - ::grpc::Service::MarkMethodRaw(2); - } - ~WithRawMethod_SearchArtifacts() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestSearchArtifacts(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(2, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_CreateTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_CreateTrigger() { - ::grpc::Service::MarkMethodRaw(3); - } - ~WithRawMethod_CreateTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestCreateTrigger(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(3, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_DeleteTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_DeleteTrigger() { - ::grpc::Service::MarkMethodRaw(4); - } - ~WithRawMethod_DeleteTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestDeleteTrigger(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(4, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_AddTag : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_AddTag() { - ::grpc::Service::MarkMethodRaw(5); - } - ~WithRawMethod_AddTag() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestAddTag(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(5, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_RegisterProducer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_RegisterProducer() { - ::grpc::Service::MarkMethodRaw(6); - } - ~WithRawMethod_RegisterProducer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestRegisterProducer(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(6, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_RegisterConsumer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_RegisterConsumer() { - ::grpc::Service::MarkMethodRaw(7); - } - ~WithRawMethod_RegisterConsumer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestRegisterConsumer(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(7, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_SetExecutionInputs : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_SetExecutionInputs() { - ::grpc::Service::MarkMethodRaw(8); - } - ~WithRawMethod_SetExecutionInputs() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestSetExecutionInputs(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(8, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class WithRawMethod_FindByWorkflowExec : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithRawMethod_FindByWorkflowExec() { - ::grpc::Service::MarkMethodRaw(9); - } - ~WithRawMethod_FindByWorkflowExec() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - void RequestFindByWorkflowExec(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(9, context, request, response, new_call_cq, notification_cq, tag); - } - }; - template - class ExperimentalWithRawCallbackMethod_CreateArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_CreateArtifact() { - ::grpc::Service::experimental().MarkMethodRawCallback(0, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->CreateArtifact(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_CreateArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void CreateArtifact(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_GetArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_GetArtifact() { - ::grpc::Service::experimental().MarkMethodRawCallback(1, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->GetArtifact(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_GetArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void GetArtifact(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_SearchArtifacts : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_SearchArtifacts() { - ::grpc::Service::experimental().MarkMethodRawCallback(2, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->SearchArtifacts(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_SearchArtifacts() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void SearchArtifacts(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_CreateTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_CreateTrigger() { - ::grpc::Service::experimental().MarkMethodRawCallback(3, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->CreateTrigger(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_CreateTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void CreateTrigger(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_DeleteTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_DeleteTrigger() { - ::grpc::Service::experimental().MarkMethodRawCallback(4, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->DeleteTrigger(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_DeleteTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void DeleteTrigger(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_AddTag : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_AddTag() { - ::grpc::Service::experimental().MarkMethodRawCallback(5, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->AddTag(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_AddTag() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void AddTag(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_RegisterProducer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_RegisterProducer() { - ::grpc::Service::experimental().MarkMethodRawCallback(6, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->RegisterProducer(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_RegisterProducer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void RegisterProducer(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_RegisterConsumer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_RegisterConsumer() { - ::grpc::Service::experimental().MarkMethodRawCallback(7, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->RegisterConsumer(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_RegisterConsumer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void RegisterConsumer(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_SetExecutionInputs : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_SetExecutionInputs() { - ::grpc::Service::experimental().MarkMethodRawCallback(8, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->SetExecutionInputs(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_SetExecutionInputs() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void SetExecutionInputs(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class ExperimentalWithRawCallbackMethod_FindByWorkflowExec : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - ExperimentalWithRawCallbackMethod_FindByWorkflowExec() { - ::grpc::Service::experimental().MarkMethodRawCallback(9, - new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( - [this](::grpc::ServerContext* context, - const ::grpc::ByteBuffer* request, - ::grpc::ByteBuffer* response, - ::grpc::experimental::ServerCallbackRpcController* controller) { - this->FindByWorkflowExec(context, request, response, controller); - })); - } - ~ExperimentalWithRawCallbackMethod_FindByWorkflowExec() override { - BaseClassMustBeDerivedFromService(this); - } - // disable synchronous version of this method - ::grpc::Status FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - virtual void FindByWorkflowExec(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } - }; - template - class WithStreamedUnaryMethod_CreateArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_CreateArtifact() { - ::grpc::Service::MarkMethodStreamed(0, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::CreateArtifactRequest, ::flyteidl::artifact::CreateArtifactResponse>(std::bind(&WithStreamedUnaryMethod_CreateArtifact::StreamedCreateArtifact, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_CreateArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status CreateArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateArtifactRequest* request, ::flyteidl::artifact::CreateArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedCreateArtifact(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::CreateArtifactRequest,::flyteidl::artifact::CreateArtifactResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_GetArtifact : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_GetArtifact() { - ::grpc::Service::MarkMethodStreamed(1, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::GetArtifactRequest, ::flyteidl::artifact::GetArtifactResponse>(std::bind(&WithStreamedUnaryMethod_GetArtifact::StreamedGetArtifact, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_GetArtifact() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status GetArtifact(::grpc::ServerContext* context, const ::flyteidl::artifact::GetArtifactRequest* request, ::flyteidl::artifact::GetArtifactResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedGetArtifact(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::GetArtifactRequest,::flyteidl::artifact::GetArtifactResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_SearchArtifacts : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_SearchArtifacts() { - ::grpc::Service::MarkMethodStreamed(2, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::SearchArtifactsRequest, ::flyteidl::artifact::SearchArtifactsResponse>(std::bind(&WithStreamedUnaryMethod_SearchArtifacts::StreamedSearchArtifacts, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_SearchArtifacts() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status SearchArtifacts(::grpc::ServerContext* context, const ::flyteidl::artifact::SearchArtifactsRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedSearchArtifacts(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::SearchArtifactsRequest,::flyteidl::artifact::SearchArtifactsResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_CreateTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_CreateTrigger() { - ::grpc::Service::MarkMethodStreamed(3, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::CreateTriggerRequest, ::flyteidl::artifact::CreateTriggerResponse>(std::bind(&WithStreamedUnaryMethod_CreateTrigger::StreamedCreateTrigger, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_CreateTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status CreateTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::CreateTriggerRequest* request, ::flyteidl::artifact::CreateTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedCreateTrigger(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::CreateTriggerRequest,::flyteidl::artifact::CreateTriggerResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_DeleteTrigger : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_DeleteTrigger() { - ::grpc::Service::MarkMethodStreamed(4, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::DeleteTriggerRequest, ::flyteidl::artifact::DeleteTriggerResponse>(std::bind(&WithStreamedUnaryMethod_DeleteTrigger::StreamedDeleteTrigger, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_DeleteTrigger() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status DeleteTrigger(::grpc::ServerContext* context, const ::flyteidl::artifact::DeleteTriggerRequest* request, ::flyteidl::artifact::DeleteTriggerResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedDeleteTrigger(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::DeleteTriggerRequest,::flyteidl::artifact::DeleteTriggerResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_AddTag : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_AddTag() { - ::grpc::Service::MarkMethodStreamed(5, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::AddTagRequest, ::flyteidl::artifact::AddTagResponse>(std::bind(&WithStreamedUnaryMethod_AddTag::StreamedAddTag, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_AddTag() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status AddTag(::grpc::ServerContext* context, const ::flyteidl::artifact::AddTagRequest* request, ::flyteidl::artifact::AddTagResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedAddTag(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::AddTagRequest,::flyteidl::artifact::AddTagResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_RegisterProducer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_RegisterProducer() { - ::grpc::Service::MarkMethodStreamed(6, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::RegisterProducerRequest, ::flyteidl::artifact::RegisterResponse>(std::bind(&WithStreamedUnaryMethod_RegisterProducer::StreamedRegisterProducer, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_RegisterProducer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status RegisterProducer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterProducerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedRegisterProducer(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::RegisterProducerRequest,::flyteidl::artifact::RegisterResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_RegisterConsumer : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_RegisterConsumer() { - ::grpc::Service::MarkMethodStreamed(7, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::RegisterConsumerRequest, ::flyteidl::artifact::RegisterResponse>(std::bind(&WithStreamedUnaryMethod_RegisterConsumer::StreamedRegisterConsumer, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_RegisterConsumer() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status RegisterConsumer(::grpc::ServerContext* context, const ::flyteidl::artifact::RegisterConsumerRequest* request, ::flyteidl::artifact::RegisterResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedRegisterConsumer(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::RegisterConsumerRequest,::flyteidl::artifact::RegisterResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_SetExecutionInputs : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_SetExecutionInputs() { - ::grpc::Service::MarkMethodStreamed(8, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::ExecutionInputsRequest, ::flyteidl::artifact::ExecutionInputsResponse>(std::bind(&WithStreamedUnaryMethod_SetExecutionInputs::StreamedSetExecutionInputs, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_SetExecutionInputs() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status SetExecutionInputs(::grpc::ServerContext* context, const ::flyteidl::artifact::ExecutionInputsRequest* request, ::flyteidl::artifact::ExecutionInputsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedSetExecutionInputs(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::ExecutionInputsRequest,::flyteidl::artifact::ExecutionInputsResponse>* server_unary_streamer) = 0; - }; - template - class WithStreamedUnaryMethod_FindByWorkflowExec : public BaseClass { - private: - void BaseClassMustBeDerivedFromService(const Service *service) {} - public: - WithStreamedUnaryMethod_FindByWorkflowExec() { - ::grpc::Service::MarkMethodStreamed(9, - new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::artifact::FindByWorkflowExecRequest, ::flyteidl::artifact::SearchArtifactsResponse>(std::bind(&WithStreamedUnaryMethod_FindByWorkflowExec::StreamedFindByWorkflowExec, this, std::placeholders::_1, std::placeholders::_2))); - } - ~WithStreamedUnaryMethod_FindByWorkflowExec() override { - BaseClassMustBeDerivedFromService(this); - } - // disable regular version of this method - ::grpc::Status FindByWorkflowExec(::grpc::ServerContext* context, const ::flyteidl::artifact::FindByWorkflowExecRequest* request, ::flyteidl::artifact::SearchArtifactsResponse* response) override { - abort(); - return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); - } - // replace default version of method with streamed unary - virtual ::grpc::Status StreamedFindByWorkflowExec(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::artifact::FindByWorkflowExecRequest,::flyteidl::artifact::SearchArtifactsResponse>* server_unary_streamer) = 0; - }; - typedef WithStreamedUnaryMethod_CreateArtifact > > > > > > > > > StreamedUnaryService; - typedef Service SplitStreamedService; - typedef WithStreamedUnaryMethod_CreateArtifact > > > > > > > > > StreamedService; -}; - -} // namespace artifact -} // namespace flyteidl - - -#endif // GRPC_flyteidl_2fartifact_2fartifacts_2eproto__INCLUDED diff --git a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.pb.cc deleted file mode 100644 index 230d0b093a..0000000000 --- a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.pb.cc +++ /dev/null @@ -1,9792 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: flyteidl/artifact/artifacts.proto - -#include "flyteidl/artifact/artifacts.pb.h" - -#include - -#include -#include -#include -#include -#include -#include -#include -#include -// @@protoc_insertion_point(includes) -#include - -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fadmin_2flaunch_5fplan_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_LaunchPlan_flyteidl_2fadmin_2flaunch_5fplan_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_CreateArtifactRequest_PartitionsEntry_DoNotUse_flyteidl_2fartifact_2fartifacts_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_SearchOptions_flyteidl_2fartifact_2fartifacts_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_ArtifactKey_flyteidl_2fcore_2fartifact_5fid_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_Partitions_flyteidl_2fcore_2fartifact_5fid_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactQuery_flyteidl_2fcore_2fartifact_5fid_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_WorkflowExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2finterface_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_ParameterMap_flyteidl_2fcore_2finterface_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2finterface_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_VariableMap_flyteidl_2fcore_2finterface_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fliterals_2eproto ::google::protobuf::internal::SCCInfo<10> scc_info_Literal_flyteidl_2fcore_2fliterals_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2ftypes_2eproto ::google::protobuf::internal::SCCInfo<5> scc_info_LiteralType_flyteidl_2fcore_2ftypes_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2fany_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Any_google_2fprotobuf_2fany_2eproto; -namespace flyteidl { -namespace artifact { -class ArtifactDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _Artifact_default_instance_; -class CreateArtifactRequest_PartitionsEntry_DoNotUseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _CreateArtifactRequest_PartitionsEntry_DoNotUse_default_instance_; -class CreateArtifactRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _CreateArtifactRequest_default_instance_; -class ArtifactSourceDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _ArtifactSource_default_instance_; -class ArtifactSpecDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _ArtifactSpec_default_instance_; -class CreateArtifactResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _CreateArtifactResponse_default_instance_; -class GetArtifactRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _GetArtifactRequest_default_instance_; -class GetArtifactResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _GetArtifactResponse_default_instance_; -class SearchOptionsDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _SearchOptions_default_instance_; -class SearchArtifactsRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _SearchArtifactsRequest_default_instance_; -class SearchArtifactsResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _SearchArtifactsResponse_default_instance_; -class FindByWorkflowExecRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _FindByWorkflowExecRequest_default_instance_; -class AddTagRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _AddTagRequest_default_instance_; -class AddTagResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _AddTagResponse_default_instance_; -class CreateTriggerRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _CreateTriggerRequest_default_instance_; -class CreateTriggerResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _CreateTriggerResponse_default_instance_; -class DeleteTriggerRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _DeleteTriggerRequest_default_instance_; -class DeleteTriggerResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _DeleteTriggerResponse_default_instance_; -class ArtifactProducerDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _ArtifactProducer_default_instance_; -class RegisterProducerRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _RegisterProducerRequest_default_instance_; -class ArtifactConsumerDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _ArtifactConsumer_default_instance_; -class RegisterConsumerRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _RegisterConsumerRequest_default_instance_; -class RegisterResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _RegisterResponse_default_instance_; -class ExecutionInputsRequestDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _ExecutionInputsRequest_default_instance_; -class ExecutionInputsResponseDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _ExecutionInputsResponse_default_instance_; -} // namespace artifact -} // namespace flyteidl -static void InitDefaultsArtifact_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_Artifact_default_instance_; - new (ptr) ::flyteidl::artifact::Artifact(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::Artifact::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<3> scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsArtifact_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base, - &scc_info_ArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto.base, - &scc_info_ArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsCreateArtifactRequest_PartitionsEntry_DoNotUse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_CreateArtifactRequest_PartitionsEntry_DoNotUse_default_instance_; - new (ptr) ::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse(); - } - ::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<0> scc_info_CreateArtifactRequest_PartitionsEntry_DoNotUse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsCreateArtifactRequest_PartitionsEntry_DoNotUse_flyteidl_2fartifact_2fartifacts_2eproto}, {}}; - -static void InitDefaultsCreateArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_CreateArtifactRequest_default_instance_; - new (ptr) ::flyteidl::artifact::CreateArtifactRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::CreateArtifactRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<4> scc_info_CreateArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 4, InitDefaultsCreateArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_ArtifactKey_flyteidl_2fcore_2fartifact_5fid_2eproto.base, - &scc_info_ArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto.base, - &scc_info_CreateArtifactRequest_PartitionsEntry_DoNotUse_flyteidl_2fartifact_2fartifacts_2eproto.base, - &scc_info_ArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_ArtifactSource_default_instance_; - new (ptr) ::flyteidl::artifact::ArtifactSource(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::ArtifactSource::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_WorkflowExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base, - &scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto.base,}}; - -static void InitDefaultsArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_ArtifactSpec_default_instance_; - new (ptr) ::flyteidl::artifact::ArtifactSpec(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::ArtifactSpec::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_Literal_flyteidl_2fcore_2fliterals_2eproto.base, - &scc_info_LiteralType_flyteidl_2fcore_2ftypes_2eproto.base, - &scc_info_Any_google_2fprotobuf_2fany_2eproto.base,}}; - -static void InitDefaultsCreateArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_CreateArtifactResponse_default_instance_; - new (ptr) ::flyteidl::artifact::CreateArtifactResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::CreateArtifactResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_CreateArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsCreateArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsGetArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_GetArtifactRequest_default_instance_; - new (ptr) ::flyteidl::artifact::GetArtifactRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::GetArtifactRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_GetArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsGetArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_ArtifactQuery_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; - -static void InitDefaultsGetArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_GetArtifactResponse_default_instance_; - new (ptr) ::flyteidl::artifact::GetArtifactResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::GetArtifactResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_GetArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsGetArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsSearchOptions_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_SearchOptions_default_instance_; - new (ptr) ::flyteidl::artifact::SearchOptions(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::SearchOptions::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<0> scc_info_SearchOptions_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsSearchOptions_flyteidl_2fartifact_2fartifacts_2eproto}, {}}; - -static void InitDefaultsSearchArtifactsRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_SearchArtifactsRequest_default_instance_; - new (ptr) ::flyteidl::artifact::SearchArtifactsRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::SearchArtifactsRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<3> scc_info_SearchArtifactsRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsSearchArtifactsRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_ArtifactKey_flyteidl_2fcore_2fartifact_5fid_2eproto.base, - &scc_info_Partitions_flyteidl_2fcore_2fartifact_5fid_2eproto.base, - &scc_info_SearchOptions_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsSearchArtifactsResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_SearchArtifactsResponse_default_instance_; - new (ptr) ::flyteidl::artifact::SearchArtifactsResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::SearchArtifactsResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_SearchArtifactsResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsSearchArtifactsResponse_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsFindByWorkflowExecRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_FindByWorkflowExecRequest_default_instance_; - new (ptr) ::flyteidl::artifact::FindByWorkflowExecRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::FindByWorkflowExecRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_FindByWorkflowExecRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsFindByWorkflowExecRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_WorkflowExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base,}}; - -static void InitDefaultsAddTagRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_AddTagRequest_default_instance_; - new (ptr) ::flyteidl::artifact::AddTagRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::AddTagRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_AddTagRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsAddTagRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; - -static void InitDefaultsAddTagResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_AddTagResponse_default_instance_; - new (ptr) ::flyteidl::artifact::AddTagResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::AddTagResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<0> scc_info_AddTagResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsAddTagResponse_flyteidl_2fartifact_2fartifacts_2eproto}, {}}; - -static void InitDefaultsCreateTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_CreateTriggerRequest_default_instance_; - new (ptr) ::flyteidl::artifact::CreateTriggerRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::CreateTriggerRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_CreateTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsCreateTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_LaunchPlan_flyteidl_2fadmin_2flaunch_5fplan_2eproto.base,}}; - -static void InitDefaultsCreateTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_CreateTriggerResponse_default_instance_; - new (ptr) ::flyteidl::artifact::CreateTriggerResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::CreateTriggerResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<0> scc_info_CreateTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsCreateTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto}, {}}; - -static void InitDefaultsDeleteTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_DeleteTriggerRequest_default_instance_; - new (ptr) ::flyteidl::artifact::DeleteTriggerRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::DeleteTriggerRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_DeleteTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsDeleteTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto.base,}}; - -static void InitDefaultsDeleteTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_DeleteTriggerResponse_default_instance_; - new (ptr) ::flyteidl::artifact::DeleteTriggerResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::DeleteTriggerResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<0> scc_info_DeleteTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsDeleteTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto}, {}}; - -static void InitDefaultsArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_ArtifactProducer_default_instance_; - new (ptr) ::flyteidl::artifact::ArtifactProducer(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::ArtifactProducer::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto.base, - &scc_info_VariableMap_flyteidl_2fcore_2finterface_2eproto.base,}}; - -static void InitDefaultsRegisterProducerRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_RegisterProducerRequest_default_instance_; - new (ptr) ::flyteidl::artifact::RegisterProducerRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::RegisterProducerRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_RegisterProducerRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsRegisterProducerRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_ArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_ArtifactConsumer_default_instance_; - new (ptr) ::flyteidl::artifact::ArtifactConsumer(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::ArtifactConsumer::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto.base, - &scc_info_ParameterMap_flyteidl_2fcore_2finterface_2eproto.base,}}; - -static void InitDefaultsRegisterConsumerRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_RegisterConsumerRequest_default_instance_; - new (ptr) ::flyteidl::artifact::RegisterConsumerRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::RegisterConsumerRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<1> scc_info_RegisterConsumerRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsRegisterConsumerRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_ArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto.base,}}; - -static void InitDefaultsRegisterResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_RegisterResponse_default_instance_; - new (ptr) ::flyteidl::artifact::RegisterResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::RegisterResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<0> scc_info_RegisterResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsRegisterResponse_flyteidl_2fartifact_2fartifacts_2eproto}, {}}; - -static void InitDefaultsExecutionInputsRequest_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_ExecutionInputsRequest_default_instance_; - new (ptr) ::flyteidl::artifact::ExecutionInputsRequest(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::ExecutionInputsRequest::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<2> scc_info_ExecutionInputsRequest_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsExecutionInputsRequest_flyteidl_2fartifact_2fartifacts_2eproto}, { - &scc_info_WorkflowExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base, - &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; - -static void InitDefaultsExecutionInputsResponse_flyteidl_2fartifact_2fartifacts_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::artifact::_ExecutionInputsResponse_default_instance_; - new (ptr) ::flyteidl::artifact::ExecutionInputsResponse(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::artifact::ExecutionInputsResponse::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<0> scc_info_ExecutionInputsResponse_flyteidl_2fartifact_2fartifacts_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 0, InitDefaultsExecutionInputsResponse_flyteidl_2fartifact_2fartifacts_2eproto}, {}}; - -void InitDefaults_flyteidl_2fartifact_2fartifacts_2eproto() { - ::google::protobuf::internal::InitSCC(&scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_CreateArtifactRequest_PartitionsEntry_DoNotUse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_CreateArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_ArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_ArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_CreateArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_GetArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_GetArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_SearchOptions_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_SearchArtifactsRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_SearchArtifactsResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_FindByWorkflowExecRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_AddTagRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_AddTagResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_CreateTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_CreateTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_DeleteTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_DeleteTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_ArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_RegisterProducerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_ArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_RegisterConsumerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_RegisterResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_ExecutionInputsRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_ExecutionInputsResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); -} - -::google::protobuf::Metadata file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[25]; -const ::google::protobuf::EnumDescriptor* file_level_enum_descriptors_flyteidl_2fartifact_2fartifacts_2eproto[1]; -constexpr ::google::protobuf::ServiceDescriptor const** file_level_service_descriptors_flyteidl_2fartifact_2fartifacts_2eproto = nullptr; - -const ::google::protobuf::uint32 TableStruct_flyteidl_2fartifact_2fartifacts_2eproto::offsets[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::Artifact, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::Artifact, artifact_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::Artifact, spec_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::Artifact, tags_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::Artifact, source_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse, _has_bits_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse, key_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse, value_), - 0, - 1, - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest, artifact_key_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest, version_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest, spec_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest, partitions_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest, tag_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactRequest, source_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSource, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSource, workflow_execution_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSource, node_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSource, task_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSource, retry_attempt_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSource, principal_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSpec, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSpec, value_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSpec, type_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSpec, short_description_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSpec, user_metadata_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactSpec, metadata_type_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateArtifactResponse, artifact_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::GetArtifactRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::GetArtifactRequest, query_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::GetArtifactRequest, details_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::GetArtifactResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::GetArtifactResponse, artifact_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchOptions, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchOptions, strict_partitions_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchOptions, latest_by_key_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, artifact_key_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, partitions_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, principal_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, version_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, options_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, token_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsRequest, limit_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsResponse, artifacts_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::SearchArtifactsResponse, token_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::FindByWorkflowExecRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::FindByWorkflowExecRequest, exec_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::FindByWorkflowExecRequest, direction_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::AddTagRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::AddTagRequest, artifact_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::AddTagRequest, value_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::AddTagRequest, overwrite_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::AddTagResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateTriggerRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateTriggerRequest, trigger_launch_plan_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::CreateTriggerResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::DeleteTriggerRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::DeleteTriggerRequest, trigger_id_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::DeleteTriggerResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactProducer, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactProducer, entity_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactProducer, outputs_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::RegisterProducerRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::RegisterProducerRequest, producers_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactConsumer, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactConsumer, entity_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ArtifactConsumer, inputs_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::RegisterConsumerRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::RegisterConsumerRequest, consumers_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::RegisterResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ExecutionInputsRequest, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ExecutionInputsRequest, execution_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ExecutionInputsRequest, inputs_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::artifact::ExecutionInputsResponse, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ -}; -static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { - { 0, -1, sizeof(::flyteidl::artifact::Artifact)}, - { 9, 16, sizeof(::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse)}, - { 18, -1, sizeof(::flyteidl::artifact::CreateArtifactRequest)}, - { 29, -1, sizeof(::flyteidl::artifact::ArtifactSource)}, - { 39, -1, sizeof(::flyteidl::artifact::ArtifactSpec)}, - { 49, -1, sizeof(::flyteidl::artifact::CreateArtifactResponse)}, - { 55, -1, sizeof(::flyteidl::artifact::GetArtifactRequest)}, - { 62, -1, sizeof(::flyteidl::artifact::GetArtifactResponse)}, - { 68, -1, sizeof(::flyteidl::artifact::SearchOptions)}, - { 75, -1, sizeof(::flyteidl::artifact::SearchArtifactsRequest)}, - { 87, -1, sizeof(::flyteidl::artifact::SearchArtifactsResponse)}, - { 94, -1, sizeof(::flyteidl::artifact::FindByWorkflowExecRequest)}, - { 101, -1, sizeof(::flyteidl::artifact::AddTagRequest)}, - { 109, -1, sizeof(::flyteidl::artifact::AddTagResponse)}, - { 114, -1, sizeof(::flyteidl::artifact::CreateTriggerRequest)}, - { 120, -1, sizeof(::flyteidl::artifact::CreateTriggerResponse)}, - { 125, -1, sizeof(::flyteidl::artifact::DeleteTriggerRequest)}, - { 131, -1, sizeof(::flyteidl::artifact::DeleteTriggerResponse)}, - { 136, -1, sizeof(::flyteidl::artifact::ArtifactProducer)}, - { 143, -1, sizeof(::flyteidl::artifact::RegisterProducerRequest)}, - { 149, -1, sizeof(::flyteidl::artifact::ArtifactConsumer)}, - { 156, -1, sizeof(::flyteidl::artifact::RegisterConsumerRequest)}, - { 162, -1, sizeof(::flyteidl::artifact::RegisterResponse)}, - { 167, -1, sizeof(::flyteidl::artifact::ExecutionInputsRequest)}, - { 174, -1, sizeof(::flyteidl::artifact::ExecutionInputsResponse)}, -}; - -static ::google::protobuf::Message const * const file_default_instances[] = { - reinterpret_cast(&::flyteidl::artifact::_Artifact_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_CreateArtifactRequest_PartitionsEntry_DoNotUse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_CreateArtifactRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_ArtifactSource_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_ArtifactSpec_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_CreateArtifactResponse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_GetArtifactRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_GetArtifactResponse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_SearchOptions_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_SearchArtifactsRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_SearchArtifactsResponse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_FindByWorkflowExecRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_AddTagRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_AddTagResponse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_CreateTriggerRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_CreateTriggerResponse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_DeleteTriggerRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_DeleteTriggerResponse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_ArtifactProducer_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_RegisterProducerRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_ArtifactConsumer_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_RegisterConsumerRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_RegisterResponse_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_ExecutionInputsRequest_default_instance_), - reinterpret_cast(&::flyteidl::artifact::_ExecutionInputsResponse_default_instance_), -}; - -::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto = { - {}, AddDescriptors_flyteidl_2fartifact_2fartifacts_2eproto, "flyteidl/artifact/artifacts.proto", schemas, - file_default_instances, TableStruct_flyteidl_2fartifact_2fartifacts_2eproto::offsets, - file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto, 25, file_level_enum_descriptors_flyteidl_2fartifact_2fartifacts_2eproto, file_level_service_descriptors_flyteidl_2fartifact_2fartifacts_2eproto, -}; - -const char descriptor_table_protodef_flyteidl_2fartifact_2fartifacts_2eproto[] = - "\n!flyteidl/artifact/artifacts.proto\022\021fly" - "teidl.artifact\032\031google/protobuf/any.prot" - "o\032\034google/api/annotations.proto\032 flyteid" - "l/admin/launch_plan.proto\032\034flyteidl/core" - "/literals.proto\032\031flyteidl/core/types.pro" - "to\032\036flyteidl/core/identifier.proto\032\037flyt" - "eidl/core/artifact_id.proto\032\035flyteidl/co" - "re/interface.proto\032 flyteidl/event/cloud" - "events.proto\"\252\001\n\010Artifact\022.\n\013artifact_id" - "\030\001 \001(\0132\031.flyteidl.core.ArtifactID\022-\n\004spe" - "c\030\002 \001(\0132\037.flyteidl.artifact.ArtifactSpec" - "\022\014\n\004tags\030\003 \003(\t\0221\n\006source\030\004 \001(\0132!.flyteid" - "l.artifact.ArtifactSource\"\312\002\n\025CreateArti" - "factRequest\0220\n\014artifact_key\030\001 \001(\0132\032.flyt" - "eidl.core.ArtifactKey\022\017\n\007version\030\003 \001(\t\022-" - "\n\004spec\030\002 \001(\0132\037.flyteidl.artifact.Artifac" - "tSpec\022L\n\npartitions\030\004 \003(\01328.flyteidl.art" - "ifact.CreateArtifactRequest.PartitionsEn" - "try\022\013\n\003tag\030\005 \001(\t\0221\n\006source\030\006 \001(\0132!.flyte" - "idl.artifact.ArtifactSource\0321\n\017Partition" - "sEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\277" - "\001\n\016ArtifactSource\022F\n\022workflow_execution\030" - "\001 \001(\0132*.flyteidl.core.WorkflowExecutionI" - "dentifier\022\017\n\007node_id\030\002 \001(\t\022*\n\007task_id\030\003 " - "\001(\0132\031.flyteidl.core.Identifier\022\025\n\rretry_" - "attempt\030\004 \001(\r\022\021\n\tprincipal\030\005 \001(\t\"\276\001\n\014Art" - "ifactSpec\022%\n\005value\030\001 \001(\0132\026.flyteidl.core" - ".Literal\022(\n\004type\030\002 \001(\0132\032.flyteidl.core.L" - "iteralType\022\031\n\021short_description\030\003 \001(\t\022+\n" - "\ruser_metadata\030\004 \001(\0132\024.google.protobuf.A" - "ny\022\025\n\rmetadata_type\030\005 \001(\t\"G\n\026CreateArtif" - "actResponse\022-\n\010artifact\030\001 \001(\0132\033.flyteidl" - ".artifact.Artifact\"R\n\022GetArtifactRequest" - "\022+\n\005query\030\001 \001(\0132\034.flyteidl.core.Artifact" - "Query\022\017\n\007details\030\002 \001(\010\"D\n\023GetArtifactRes" - "ponse\022-\n\010artifact\030\001 \001(\0132\033.flyteidl.artif" - "act.Artifact\"A\n\rSearchOptions\022\031\n\021strict_" - "partitions\030\001 \001(\010\022\025\n\rlatest_by_key\030\002 \001(\010\"" - "\356\001\n\026SearchArtifactsRequest\0220\n\014artifact_k" - "ey\030\001 \001(\0132\032.flyteidl.core.ArtifactKey\022-\n\n" - "partitions\030\002 \001(\0132\031.flyteidl.core.Partiti" - "ons\022\021\n\tprincipal\030\003 \001(\t\022\017\n\007version\030\004 \001(\t\022" - "1\n\007options\030\005 \001(\0132 .flyteidl.artifact.Sea" - "rchOptions\022\r\n\005token\030\006 \001(\t\022\r\n\005limit\030\007 \001(\005" - "\"X\n\027SearchArtifactsResponse\022.\n\tartifacts" - "\030\001 \003(\0132\033.flyteidl.artifact.Artifact\022\r\n\005t" - "oken\030\002 \001(\t\"\311\001\n\031FindByWorkflowExecRequest" - "\022;\n\007exec_id\030\001 \001(\0132*.flyteidl.core.Workfl" - "owExecutionIdentifier\022I\n\tdirection\030\002 \001(\016" - "26.flyteidl.artifact.FindByWorkflowExecR" - "equest.Direction\"$\n\tDirection\022\n\n\006INPUTS\020" - "\000\022\013\n\007OUTPUTS\020\001\"a\n\rAddTagRequest\022.\n\013artif" - "act_id\030\001 \001(\0132\031.flyteidl.core.ArtifactID\022" - "\r\n\005value\030\002 \001(\t\022\021\n\toverwrite\030\003 \001(\010\"\020\n\016Add" - "TagResponse\"O\n\024CreateTriggerRequest\0227\n\023t" - "rigger_launch_plan\030\001 \001(\0132\032.flyteidl.admi" - "n.LaunchPlan\"\027\n\025CreateTriggerResponse\"E\n" - "\024DeleteTriggerRequest\022-\n\ntrigger_id\030\001 \001(" - "\0132\031.flyteidl.core.Identifier\"\027\n\025DeleteTr" - "iggerResponse\"m\n\020ArtifactProducer\022,\n\tent" - "ity_id\030\001 \001(\0132\031.flyteidl.core.Identifier\022" - "+\n\007outputs\030\002 \001(\0132\032.flyteidl.core.Variabl" - "eMap\"Q\n\027RegisterProducerRequest\0226\n\tprodu" - "cers\030\001 \003(\0132#.flyteidl.artifact.ArtifactP" - "roducer\"m\n\020ArtifactConsumer\022,\n\tentity_id" - "\030\001 \001(\0132\031.flyteidl.core.Identifier\022+\n\006inp" - "uts\030\002 \001(\0132\033.flyteidl.core.ParameterMap\"Q" - "\n\027RegisterConsumerRequest\0226\n\tconsumers\030\001" - " \003(\0132#.flyteidl.artifact.ArtifactConsume" - "r\"\022\n\020RegisterResponse\"\205\001\n\026ExecutionInput" - "sRequest\022@\n\014execution_id\030\001 \001(\0132*.flyteid" - "l.core.WorkflowExecutionIdentifier\022)\n\006in" - "puts\030\002 \003(\0132\031.flyteidl.core.ArtifactID\"\031\n" - "\027ExecutionInputsResponse2\327\016\n\020ArtifactReg" - "istry\022g\n\016CreateArtifact\022(.flyteidl.artif" - "act.CreateArtifactRequest\032).flyteidl.art" - "ifact.CreateArtifactResponse\"\000\022\205\005\n\013GetAr" - "tifact\022%.flyteidl.artifact.GetArtifactRe" - "quest\032&.flyteidl.artifact.GetArtifactRes" - "ponse\"\246\004\202\323\344\223\002\237\004\022 /artifacts/api/v1/data/" - "artifactsZ\270\001\022\265\001/artifacts/api/v1/data/ar" - "tifact/id/{query.artifact_id.artifact_ke" - "y.project}/{query.artifact_id.artifact_k" - "ey.domain}/{query.artifact_id.artifact_k" - "ey.name}/{query.artifact_id.version}Z\234\001\022" - "\231\001/artifacts/api/v1/data/artifact/id/{qu" - "ery.artifact_id.artifact_key.project}/{q" - "uery.artifact_id.artifact_key.domain}/{q" - "uery.artifact_id.artifact_key.name}Z\240\001\022\235" - "\001/artifacts/api/v1/data/artifact/tag/{qu" - "ery.artifact_tag.artifact_key.project}/{" - "query.artifact_tag.artifact_key.domain}/" - "{query.artifact_tag.artifact_key.name}\022\240" - "\002\n\017SearchArtifacts\022).flyteidl.artifact.S" - "earchArtifactsRequest\032*.flyteidl.artifac" - "t.SearchArtifactsResponse\"\265\001\202\323\344\223\002\256\001\022_/ar" - "tifacts/api/v1/data/query/s/{artifact_ke" - "y.project}/{artifact_key.domain}/{artifa" - "ct_key.name}ZK\022I/artifacts/api/v1/data/q" - "uery/{artifact_key.project}/{artifact_ke" - "y.domain}\022d\n\rCreateTrigger\022\'.flyteidl.ar" - "tifact.CreateTriggerRequest\032(.flyteidl.a" - "rtifact.CreateTriggerResponse\"\000\022d\n\rDelet" - "eTrigger\022\'.flyteidl.artifact.DeleteTrigg" - "erRequest\032(.flyteidl.artifact.DeleteTrig" - "gerResponse\"\000\022O\n\006AddTag\022 .flyteidl.artif" - "act.AddTagRequest\032!.flyteidl.artifact.Ad" - "dTagResponse\"\000\022e\n\020RegisterProducer\022*.fly" - "teidl.artifact.RegisterProducerRequest\032#" - ".flyteidl.artifact.RegisterResponse\"\000\022e\n" - "\020RegisterConsumer\022*.flyteidl.artifact.Re" - "gisterConsumerRequest\032#.flyteidl.artifac" - "t.RegisterResponse\"\000\022m\n\022SetExecutionInpu" - "ts\022).flyteidl.artifact.ExecutionInputsRe" - "quest\032*.flyteidl.artifact.ExecutionInput" - "sResponse\"\000\022\324\001\n\022FindByWorkflowExec\022,.fly" - "teidl.artifact.FindByWorkflowExecRequest" - "\032*.flyteidl.artifact.SearchArtifactsResp" - "onse\"d\202\323\344\223\002^\022\\/artifacts/api/v1/data/que" - "ry/e/{exec_id.project}/{exec_id.domain}/" - "{exec_id.name}/{direction}B@Z>github.com" - "/flyteorg/flyte/flyteidl/gen/pb-go/flyte" - "idl/artifactb\006proto3" - ; -::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fartifact_2fartifacts_2eproto = { - false, InitDefaults_flyteidl_2fartifact_2fartifacts_2eproto, - descriptor_table_protodef_flyteidl_2fartifact_2fartifacts_2eproto, - "flyteidl/artifact/artifacts.proto", &assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto, 4900, -}; - -void AddDescriptors_flyteidl_2fartifact_2fartifacts_2eproto() { - static constexpr ::google::protobuf::internal::InitFunc deps[9] = - { - ::AddDescriptors_google_2fprotobuf_2fany_2eproto, - ::AddDescriptors_google_2fapi_2fannotations_2eproto, - ::AddDescriptors_flyteidl_2fadmin_2flaunch_5fplan_2eproto, - ::AddDescriptors_flyteidl_2fcore_2fliterals_2eproto, - ::AddDescriptors_flyteidl_2fcore_2ftypes_2eproto, - ::AddDescriptors_flyteidl_2fcore_2fidentifier_2eproto, - ::AddDescriptors_flyteidl_2fcore_2fartifact_5fid_2eproto, - ::AddDescriptors_flyteidl_2fcore_2finterface_2eproto, - ::AddDescriptors_flyteidl_2fevent_2fcloudevents_2eproto, - }; - ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fartifact_2fartifacts_2eproto, deps, 9); -} - -// Force running AddDescriptors() at dynamic initialization time. -static bool dynamic_init_dummy_flyteidl_2fartifact_2fartifacts_2eproto = []() { AddDescriptors_flyteidl_2fartifact_2fartifacts_2eproto(); return true; }(); -namespace flyteidl { -namespace artifact { -const ::google::protobuf::EnumDescriptor* FindByWorkflowExecRequest_Direction_descriptor() { - ::google::protobuf::internal::AssignDescriptors(&assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return file_level_enum_descriptors_flyteidl_2fartifact_2fartifacts_2eproto[0]; -} -bool FindByWorkflowExecRequest_Direction_IsValid(int value) { - switch (value) { - case 0: - case 1: - return true; - default: - return false; - } -} - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const FindByWorkflowExecRequest_Direction FindByWorkflowExecRequest::INPUTS; -const FindByWorkflowExecRequest_Direction FindByWorkflowExecRequest::OUTPUTS; -const FindByWorkflowExecRequest_Direction FindByWorkflowExecRequest::Direction_MIN; -const FindByWorkflowExecRequest_Direction FindByWorkflowExecRequest::Direction_MAX; -const int FindByWorkflowExecRequest::Direction_ARRAYSIZE; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -// =================================================================== - -void Artifact::InitAsDefaultInstance() { - ::flyteidl::artifact::_Artifact_default_instance_._instance.get_mutable()->artifact_id_ = const_cast< ::flyteidl::core::ArtifactID*>( - ::flyteidl::core::ArtifactID::internal_default_instance()); - ::flyteidl::artifact::_Artifact_default_instance_._instance.get_mutable()->spec_ = const_cast< ::flyteidl::artifact::ArtifactSpec*>( - ::flyteidl::artifact::ArtifactSpec::internal_default_instance()); - ::flyteidl::artifact::_Artifact_default_instance_._instance.get_mutable()->source_ = const_cast< ::flyteidl::artifact::ArtifactSource*>( - ::flyteidl::artifact::ArtifactSource::internal_default_instance()); -} -class Artifact::HasBitSetters { - public: - static const ::flyteidl::core::ArtifactID& artifact_id(const Artifact* msg); - static const ::flyteidl::artifact::ArtifactSpec& spec(const Artifact* msg); - static const ::flyteidl::artifact::ArtifactSource& source(const Artifact* msg); -}; - -const ::flyteidl::core::ArtifactID& -Artifact::HasBitSetters::artifact_id(const Artifact* msg) { - return *msg->artifact_id_; -} -const ::flyteidl::artifact::ArtifactSpec& -Artifact::HasBitSetters::spec(const Artifact* msg) { - return *msg->spec_; -} -const ::flyteidl::artifact::ArtifactSource& -Artifact::HasBitSetters::source(const Artifact* msg) { - return *msg->source_; -} -void Artifact::clear_artifact_id() { - if (GetArenaNoVirtual() == nullptr && artifact_id_ != nullptr) { - delete artifact_id_; - } - artifact_id_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int Artifact::kArtifactIdFieldNumber; -const int Artifact::kSpecFieldNumber; -const int Artifact::kTagsFieldNumber; -const int Artifact::kSourceFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -Artifact::Artifact() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.Artifact) -} -Artifact::Artifact(const Artifact& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr), - tags_(from.tags_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_artifact_id()) { - artifact_id_ = new ::flyteidl::core::ArtifactID(*from.artifact_id_); - } else { - artifact_id_ = nullptr; - } - if (from.has_spec()) { - spec_ = new ::flyteidl::artifact::ArtifactSpec(*from.spec_); - } else { - spec_ = nullptr; - } - if (from.has_source()) { - source_ = new ::flyteidl::artifact::ArtifactSource(*from.source_); - } else { - source_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.Artifact) -} - -void Artifact::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::memset(&artifact_id_, 0, static_cast( - reinterpret_cast(&source_) - - reinterpret_cast(&artifact_id_)) + sizeof(source_)); -} - -Artifact::~Artifact() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.Artifact) - SharedDtor(); -} - -void Artifact::SharedDtor() { - if (this != internal_default_instance()) delete artifact_id_; - if (this != internal_default_instance()) delete spec_; - if (this != internal_default_instance()) delete source_; -} - -void Artifact::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const Artifact& Artifact::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_Artifact_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void Artifact::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.Artifact) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - tags_.Clear(); - if (GetArenaNoVirtual() == nullptr && artifact_id_ != nullptr) { - delete artifact_id_; - } - artifact_id_ = nullptr; - if (GetArenaNoVirtual() == nullptr && spec_ != nullptr) { - delete spec_; - } - spec_ = nullptr; - if (GetArenaNoVirtual() == nullptr && source_ != nullptr) { - delete source_; - } - source_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* Artifact::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.ArtifactID artifact_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactID::_InternalParse; - object = msg->mutable_artifact_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.artifact.ArtifactSpec spec = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::ArtifactSpec::_InternalParse; - object = msg->mutable_spec(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // repeated string tags = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; - do { - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.Artifact.tags"); - object = msg->add_tags(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 26 && (ptr += 1)); - break; - } - // .flyteidl.artifact.ArtifactSource source = 4; - case 4: { - if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::ArtifactSource::_InternalParse; - object = msg->mutable_source(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -string_till_end: - static_cast<::std::string*>(object)->clear(); - static_cast<::std::string*>(object)->reserve(size); - goto len_delim_till_end; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool Artifact::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.Artifact) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.ArtifactID artifact_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_artifact_id())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_spec())); - } else { - goto handle_unusual; - } - break; - } - - // repeated string tags = 3; - case 3: { - if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->add_tags())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->tags(this->tags_size() - 1).data(), - static_cast(this->tags(this->tags_size() - 1).length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.Artifact.tags")); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.artifact.ArtifactSource source = 4; - case 4: { - if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_source())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.Artifact) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.Artifact) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void Artifact::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.Artifact) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactID artifact_id = 1; - if (this->has_artifact_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::artifact_id(this), output); - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - if (this->has_spec()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::spec(this), output); - } - - // repeated string tags = 3; - for (int i = 0, n = this->tags_size(); i < n; i++) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->tags(i).data(), static_cast(this->tags(i).length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.Artifact.tags"); - ::google::protobuf::internal::WireFormatLite::WriteString( - 3, this->tags(i), output); - } - - // .flyteidl.artifact.ArtifactSource source = 4; - if (this->has_source()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 4, HasBitSetters::source(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.Artifact) -} - -::google::protobuf::uint8* Artifact::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.Artifact) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactID artifact_id = 1; - if (this->has_artifact_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::artifact_id(this), target); - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - if (this->has_spec()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, HasBitSetters::spec(this), target); - } - - // repeated string tags = 3; - for (int i = 0, n = this->tags_size(); i < n; i++) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->tags(i).data(), static_cast(this->tags(i).length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.Artifact.tags"); - target = ::google::protobuf::internal::WireFormatLite:: - WriteStringToArray(3, this->tags(i), target); - } - - // .flyteidl.artifact.ArtifactSource source = 4; - if (this->has_source()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 4, HasBitSetters::source(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.Artifact) - return target; -} - -size_t Artifact::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.Artifact) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated string tags = 3; - total_size += 1 * - ::google::protobuf::internal::FromIntSize(this->tags_size()); - for (int i = 0, n = this->tags_size(); i < n; i++) { - total_size += ::google::protobuf::internal::WireFormatLite::StringSize( - this->tags(i)); - } - - // .flyteidl.core.ArtifactID artifact_id = 1; - if (this->has_artifact_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *artifact_id_); - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - if (this->has_spec()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *spec_); - } - - // .flyteidl.artifact.ArtifactSource source = 4; - if (this->has_source()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *source_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void Artifact::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.Artifact) - GOOGLE_DCHECK_NE(&from, this); - const Artifact* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.Artifact) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.Artifact) - MergeFrom(*source); - } -} - -void Artifact::MergeFrom(const Artifact& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.Artifact) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - tags_.MergeFrom(from.tags_); - if (from.has_artifact_id()) { - mutable_artifact_id()->::flyteidl::core::ArtifactID::MergeFrom(from.artifact_id()); - } - if (from.has_spec()) { - mutable_spec()->::flyteidl::artifact::ArtifactSpec::MergeFrom(from.spec()); - } - if (from.has_source()) { - mutable_source()->::flyteidl::artifact::ArtifactSource::MergeFrom(from.source()); - } -} - -void Artifact::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.Artifact) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void Artifact::CopyFrom(const Artifact& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.Artifact) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool Artifact::IsInitialized() const { - return true; -} - -void Artifact::Swap(Artifact* other) { - if (other == this) return; - InternalSwap(other); -} -void Artifact::InternalSwap(Artifact* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - tags_.InternalSwap(CastToBase(&other->tags_)); - swap(artifact_id_, other->artifact_id_); - swap(spec_, other->spec_); - swap(source_, other->source_); -} - -::google::protobuf::Metadata Artifact::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -CreateArtifactRequest_PartitionsEntry_DoNotUse::CreateArtifactRequest_PartitionsEntry_DoNotUse() {} -CreateArtifactRequest_PartitionsEntry_DoNotUse::CreateArtifactRequest_PartitionsEntry_DoNotUse(::google::protobuf::Arena* arena) - : SuperType(arena) {} -void CreateArtifactRequest_PartitionsEntry_DoNotUse::MergeFrom(const CreateArtifactRequest_PartitionsEntry_DoNotUse& other) { - MergeFromInternal(other); -} -::google::protobuf::Metadata CreateArtifactRequest_PartitionsEntry_DoNotUse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[1]; -} -void CreateArtifactRequest_PartitionsEntry_DoNotUse::MergeFrom( - const ::google::protobuf::Message& other) { - ::google::protobuf::Message::MergeFrom(other); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool CreateArtifactRequest_PartitionsEntry_DoNotUse::_ParseMap(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx) { - using MF = ::google::protobuf::internal::MapField< - CreateArtifactRequest_PartitionsEntry_DoNotUse, EntryKeyType, EntryValueType, - kEntryKeyFieldType, kEntryValueFieldType, - kEntryDefaultEnumValue>; - auto mf = static_cast(object); - Parser> parser(mf); -#define DO_(x) if (!(x)) return false - DO_(parser.ParseMap(begin, end)); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - parser.key().data(), static_cast(parser.key().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.key")); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - parser.value().data(), static_cast(parser.value().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.value")); -#undef DO_ - return true; -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - - -// =================================================================== - -void CreateArtifactRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_CreateArtifactRequest_default_instance_._instance.get_mutable()->artifact_key_ = const_cast< ::flyteidl::core::ArtifactKey*>( - ::flyteidl::core::ArtifactKey::internal_default_instance()); - ::flyteidl::artifact::_CreateArtifactRequest_default_instance_._instance.get_mutable()->spec_ = const_cast< ::flyteidl::artifact::ArtifactSpec*>( - ::flyteidl::artifact::ArtifactSpec::internal_default_instance()); - ::flyteidl::artifact::_CreateArtifactRequest_default_instance_._instance.get_mutable()->source_ = const_cast< ::flyteidl::artifact::ArtifactSource*>( - ::flyteidl::artifact::ArtifactSource::internal_default_instance()); -} -class CreateArtifactRequest::HasBitSetters { - public: - static const ::flyteidl::core::ArtifactKey& artifact_key(const CreateArtifactRequest* msg); - static const ::flyteidl::artifact::ArtifactSpec& spec(const CreateArtifactRequest* msg); - static const ::flyteidl::artifact::ArtifactSource& source(const CreateArtifactRequest* msg); -}; - -const ::flyteidl::core::ArtifactKey& -CreateArtifactRequest::HasBitSetters::artifact_key(const CreateArtifactRequest* msg) { - return *msg->artifact_key_; -} -const ::flyteidl::artifact::ArtifactSpec& -CreateArtifactRequest::HasBitSetters::spec(const CreateArtifactRequest* msg) { - return *msg->spec_; -} -const ::flyteidl::artifact::ArtifactSource& -CreateArtifactRequest::HasBitSetters::source(const CreateArtifactRequest* msg) { - return *msg->source_; -} -void CreateArtifactRequest::clear_artifact_key() { - if (GetArenaNoVirtual() == nullptr && artifact_key_ != nullptr) { - delete artifact_key_; - } - artifact_key_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int CreateArtifactRequest::kArtifactKeyFieldNumber; -const int CreateArtifactRequest::kVersionFieldNumber; -const int CreateArtifactRequest::kSpecFieldNumber; -const int CreateArtifactRequest::kPartitionsFieldNumber; -const int CreateArtifactRequest::kTagFieldNumber; -const int CreateArtifactRequest::kSourceFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -CreateArtifactRequest::CreateArtifactRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.CreateArtifactRequest) -} -CreateArtifactRequest::CreateArtifactRequest(const CreateArtifactRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - partitions_.MergeFrom(from.partitions_); - version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.version().size() > 0) { - version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); - } - tag_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.tag().size() > 0) { - tag_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.tag_); - } - if (from.has_artifact_key()) { - artifact_key_ = new ::flyteidl::core::ArtifactKey(*from.artifact_key_); - } else { - artifact_key_ = nullptr; - } - if (from.has_spec()) { - spec_ = new ::flyteidl::artifact::ArtifactSpec(*from.spec_); - } else { - spec_ = nullptr; - } - if (from.has_source()) { - source_ = new ::flyteidl::artifact::ArtifactSource(*from.source_); - } else { - source_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.CreateArtifactRequest) -} - -void CreateArtifactRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_CreateArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - tag_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - ::memset(&artifact_key_, 0, static_cast( - reinterpret_cast(&source_) - - reinterpret_cast(&artifact_key_)) + sizeof(source_)); -} - -CreateArtifactRequest::~CreateArtifactRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.CreateArtifactRequest) - SharedDtor(); -} - -void CreateArtifactRequest::SharedDtor() { - version_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - tag_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (this != internal_default_instance()) delete artifact_key_; - if (this != internal_default_instance()) delete spec_; - if (this != internal_default_instance()) delete source_; -} - -void CreateArtifactRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const CreateArtifactRequest& CreateArtifactRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_CreateArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void CreateArtifactRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.CreateArtifactRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - partitions_.Clear(); - version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - tag_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (GetArenaNoVirtual() == nullptr && artifact_key_ != nullptr) { - delete artifact_key_; - } - artifact_key_ = nullptr; - if (GetArenaNoVirtual() == nullptr && spec_ != nullptr) { - delete spec_; - } - spec_ = nullptr; - if (GetArenaNoVirtual() == nullptr && source_ != nullptr) { - delete source_; - } - source_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* CreateArtifactRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.ArtifactKey artifact_key = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactKey::_InternalParse; - object = msg->mutable_artifact_key(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.artifact.ArtifactSpec spec = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::ArtifactSpec::_InternalParse; - object = msg->mutable_spec(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // string version = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.CreateArtifactRequest.version"); - object = msg->mutable_version(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // map partitions = 4; - case 4: { - if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; - do { - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::google::protobuf::internal::SlowMapEntryParser; - auto parse_map = ::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse::_ParseMap; - ctx->extra_parse_data().payload.clear(); - ctx->extra_parse_data().parse_map = parse_map; - object = &msg->partitions_; - if (size > end - ptr) goto len_delim_till_end; - auto newend = ptr + size; - GOOGLE_PROTOBUF_PARSER_ASSERT(parse_map(ptr, newend, object, ctx)); - ptr = newend; - if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 34 && (ptr += 1)); - break; - } - // string tag = 5; - case 5: { - if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.CreateArtifactRequest.tag"); - object = msg->mutable_tag(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // .flyteidl.artifact.ArtifactSource source = 6; - case 6: { - if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::ArtifactSource::_InternalParse; - object = msg->mutable_source(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -string_till_end: - static_cast<::std::string*>(object)->clear(); - static_cast<::std::string*>(object)->reserve(size); - goto len_delim_till_end; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool CreateArtifactRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.CreateArtifactRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.ArtifactKey artifact_key = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_artifact_key())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_spec())); - } else { - goto handle_unusual; - } - break; - } - - // string version = 3; - case 3: { - if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_version())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->version().data(), static_cast(this->version().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.CreateArtifactRequest.version")); - } else { - goto handle_unusual; - } - break; - } - - // map partitions = 4; - case 4: { - if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { - CreateArtifactRequest_PartitionsEntry_DoNotUse::Parser< ::google::protobuf::internal::MapField< - CreateArtifactRequest_PartitionsEntry_DoNotUse, - ::std::string, ::std::string, - ::google::protobuf::internal::WireFormatLite::TYPE_STRING, - ::google::protobuf::internal::WireFormatLite::TYPE_STRING, - 0 >, - ::google::protobuf::Map< ::std::string, ::std::string > > parser(&partitions_); - DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual( - input, &parser)); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - parser.key().data(), static_cast(parser.key().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.key")); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - parser.value().data(), static_cast(parser.value().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.value")); - } else { - goto handle_unusual; - } - break; - } - - // string tag = 5; - case 5: { - if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_tag())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->tag().data(), static_cast(this->tag().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.CreateArtifactRequest.tag")); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.artifact.ArtifactSource source = 6; - case 6: { - if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_source())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.CreateArtifactRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.CreateArtifactRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void CreateArtifactRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.CreateArtifactRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactKey artifact_key = 1; - if (this->has_artifact_key()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::artifact_key(this), output); - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - if (this->has_spec()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::spec(this), output); - } - - // string version = 3; - if (this->version().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->version().data(), static_cast(this->version().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.version"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 3, this->version(), output); - } - - // map partitions = 4; - if (!this->partitions().empty()) { - typedef ::google::protobuf::Map< ::std::string, ::std::string >::const_pointer - ConstPtr; - typedef ConstPtr SortItem; - typedef ::google::protobuf::internal::CompareByDerefFirst Less; - struct Utf8Check { - static void Check(ConstPtr p) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - p->first.data(), static_cast(p->first.length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.key"); - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - p->second.data(), static_cast(p->second.length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.value"); - } - }; - - if (output->IsSerializationDeterministic() && - this->partitions().size() > 1) { - ::std::unique_ptr items( - new SortItem[this->partitions().size()]); - typedef ::google::protobuf::Map< ::std::string, ::std::string >::size_type size_type; - size_type n = 0; - for (::google::protobuf::Map< ::std::string, ::std::string >::const_iterator - it = this->partitions().begin(); - it != this->partitions().end(); ++it, ++n) { - items[static_cast(n)] = SortItem(&*it); - } - ::std::sort(&items[0], &items[static_cast(n)], Less()); - ::std::unique_ptr entry; - for (size_type i = 0; i < n; i++) { - entry.reset(partitions_.NewEntryWrapper(items[static_cast(i)]->first, items[static_cast(i)]->second)); - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(4, *entry, output); - Utf8Check::Check(&(*items[static_cast(i)])); - } - } else { - ::std::unique_ptr entry; - for (::google::protobuf::Map< ::std::string, ::std::string >::const_iterator - it = this->partitions().begin(); - it != this->partitions().end(); ++it) { - entry.reset(partitions_.NewEntryWrapper(it->first, it->second)); - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(4, *entry, output); - Utf8Check::Check(&(*it)); - } - } - } - - // string tag = 5; - if (this->tag().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->tag().data(), static_cast(this->tag().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.tag"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 5, this->tag(), output); - } - - // .flyteidl.artifact.ArtifactSource source = 6; - if (this->has_source()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 6, HasBitSetters::source(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.CreateArtifactRequest) -} - -::google::protobuf::uint8* CreateArtifactRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.CreateArtifactRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactKey artifact_key = 1; - if (this->has_artifact_key()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::artifact_key(this), target); - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - if (this->has_spec()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, HasBitSetters::spec(this), target); - } - - // string version = 3; - if (this->version().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->version().data(), static_cast(this->version().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.version"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 3, this->version(), target); - } - - // map partitions = 4; - if (!this->partitions().empty()) { - typedef ::google::protobuf::Map< ::std::string, ::std::string >::const_pointer - ConstPtr; - typedef ConstPtr SortItem; - typedef ::google::protobuf::internal::CompareByDerefFirst Less; - struct Utf8Check { - static void Check(ConstPtr p) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - p->first.data(), static_cast(p->first.length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.key"); - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - p->second.data(), static_cast(p->second.length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry.value"); - } - }; - - if (false && - this->partitions().size() > 1) { - ::std::unique_ptr items( - new SortItem[this->partitions().size()]); - typedef ::google::protobuf::Map< ::std::string, ::std::string >::size_type size_type; - size_type n = 0; - for (::google::protobuf::Map< ::std::string, ::std::string >::const_iterator - it = this->partitions().begin(); - it != this->partitions().end(); ++it, ++n) { - items[static_cast(n)] = SortItem(&*it); - } - ::std::sort(&items[0], &items[static_cast(n)], Less()); - ::std::unique_ptr entry; - for (size_type i = 0; i < n; i++) { - entry.reset(partitions_.NewEntryWrapper(items[static_cast(i)]->first, items[static_cast(i)]->second)); - target = ::google::protobuf::internal::WireFormatLite::InternalWriteMessageNoVirtualToArray(4, *entry, target); - Utf8Check::Check(&(*items[static_cast(i)])); - } - } else { - ::std::unique_ptr entry; - for (::google::protobuf::Map< ::std::string, ::std::string >::const_iterator - it = this->partitions().begin(); - it != this->partitions().end(); ++it) { - entry.reset(partitions_.NewEntryWrapper(it->first, it->second)); - target = ::google::protobuf::internal::WireFormatLite::InternalWriteMessageNoVirtualToArray(4, *entry, target); - Utf8Check::Check(&(*it)); - } - } - } - - // string tag = 5; - if (this->tag().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->tag().data(), static_cast(this->tag().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.CreateArtifactRequest.tag"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 5, this->tag(), target); - } - - // .flyteidl.artifact.ArtifactSource source = 6; - if (this->has_source()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 6, HasBitSetters::source(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.CreateArtifactRequest) - return target; -} - -size_t CreateArtifactRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.CreateArtifactRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // map partitions = 4; - total_size += 1 * - ::google::protobuf::internal::FromIntSize(this->partitions_size()); - { - ::std::unique_ptr entry; - for (::google::protobuf::Map< ::std::string, ::std::string >::const_iterator - it = this->partitions().begin(); - it != this->partitions().end(); ++it) { - entry.reset(partitions_.NewEntryWrapper(it->first, it->second)); - total_size += ::google::protobuf::internal::WireFormatLite:: - MessageSizeNoVirtual(*entry); - } - } - - // string version = 3; - if (this->version().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->version()); - } - - // string tag = 5; - if (this->tag().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->tag()); - } - - // .flyteidl.core.ArtifactKey artifact_key = 1; - if (this->has_artifact_key()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *artifact_key_); - } - - // .flyteidl.artifact.ArtifactSpec spec = 2; - if (this->has_spec()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *spec_); - } - - // .flyteidl.artifact.ArtifactSource source = 6; - if (this->has_source()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *source_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void CreateArtifactRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.CreateArtifactRequest) - GOOGLE_DCHECK_NE(&from, this); - const CreateArtifactRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.CreateArtifactRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.CreateArtifactRequest) - MergeFrom(*source); - } -} - -void CreateArtifactRequest::MergeFrom(const CreateArtifactRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.CreateArtifactRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - partitions_.MergeFrom(from.partitions_); - if (from.version().size() > 0) { - - version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); - } - if (from.tag().size() > 0) { - - tag_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.tag_); - } - if (from.has_artifact_key()) { - mutable_artifact_key()->::flyteidl::core::ArtifactKey::MergeFrom(from.artifact_key()); - } - if (from.has_spec()) { - mutable_spec()->::flyteidl::artifact::ArtifactSpec::MergeFrom(from.spec()); - } - if (from.has_source()) { - mutable_source()->::flyteidl::artifact::ArtifactSource::MergeFrom(from.source()); - } -} - -void CreateArtifactRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.CreateArtifactRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void CreateArtifactRequest::CopyFrom(const CreateArtifactRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.CreateArtifactRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool CreateArtifactRequest::IsInitialized() const { - return true; -} - -void CreateArtifactRequest::Swap(CreateArtifactRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void CreateArtifactRequest::InternalSwap(CreateArtifactRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - partitions_.Swap(&other->partitions_); - version_.Swap(&other->version_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - tag_.Swap(&other->tag_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(artifact_key_, other->artifact_key_); - swap(spec_, other->spec_); - swap(source_, other->source_); -} - -::google::protobuf::Metadata CreateArtifactRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void ArtifactSource::InitAsDefaultInstance() { - ::flyteidl::artifact::_ArtifactSource_default_instance_._instance.get_mutable()->workflow_execution_ = const_cast< ::flyteidl::core::WorkflowExecutionIdentifier*>( - ::flyteidl::core::WorkflowExecutionIdentifier::internal_default_instance()); - ::flyteidl::artifact::_ArtifactSource_default_instance_._instance.get_mutable()->task_id_ = const_cast< ::flyteidl::core::Identifier*>( - ::flyteidl::core::Identifier::internal_default_instance()); -} -class ArtifactSource::HasBitSetters { - public: - static const ::flyteidl::core::WorkflowExecutionIdentifier& workflow_execution(const ArtifactSource* msg); - static const ::flyteidl::core::Identifier& task_id(const ArtifactSource* msg); -}; - -const ::flyteidl::core::WorkflowExecutionIdentifier& -ArtifactSource::HasBitSetters::workflow_execution(const ArtifactSource* msg) { - return *msg->workflow_execution_; -} -const ::flyteidl::core::Identifier& -ArtifactSource::HasBitSetters::task_id(const ArtifactSource* msg) { - return *msg->task_id_; -} -void ArtifactSource::clear_workflow_execution() { - if (GetArenaNoVirtual() == nullptr && workflow_execution_ != nullptr) { - delete workflow_execution_; - } - workflow_execution_ = nullptr; -} -void ArtifactSource::clear_task_id() { - if (GetArenaNoVirtual() == nullptr && task_id_ != nullptr) { - delete task_id_; - } - task_id_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int ArtifactSource::kWorkflowExecutionFieldNumber; -const int ArtifactSource::kNodeIdFieldNumber; -const int ArtifactSource::kTaskIdFieldNumber; -const int ArtifactSource::kRetryAttemptFieldNumber; -const int ArtifactSource::kPrincipalFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -ArtifactSource::ArtifactSource() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.ArtifactSource) -} -ArtifactSource::ArtifactSource(const ArtifactSource& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - node_id_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.node_id().size() > 0) { - node_id_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.node_id_); - } - principal_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.principal().size() > 0) { - principal_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.principal_); - } - if (from.has_workflow_execution()) { - workflow_execution_ = new ::flyteidl::core::WorkflowExecutionIdentifier(*from.workflow_execution_); - } else { - workflow_execution_ = nullptr; - } - if (from.has_task_id()) { - task_id_ = new ::flyteidl::core::Identifier(*from.task_id_); - } else { - task_id_ = nullptr; - } - retry_attempt_ = from.retry_attempt_; - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.ArtifactSource) -} - -void ArtifactSource::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_ArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto.base); - node_id_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - principal_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - ::memset(&workflow_execution_, 0, static_cast( - reinterpret_cast(&retry_attempt_) - - reinterpret_cast(&workflow_execution_)) + sizeof(retry_attempt_)); -} - -ArtifactSource::~ArtifactSource() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.ArtifactSource) - SharedDtor(); -} - -void ArtifactSource::SharedDtor() { - node_id_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - principal_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (this != internal_default_instance()) delete workflow_execution_; - if (this != internal_default_instance()) delete task_id_; -} - -void ArtifactSource::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const ArtifactSource& ArtifactSource::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_ArtifactSource_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void ArtifactSource::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.ArtifactSource) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - node_id_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - principal_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (GetArenaNoVirtual() == nullptr && workflow_execution_ != nullptr) { - delete workflow_execution_; - } - workflow_execution_ = nullptr; - if (GetArenaNoVirtual() == nullptr && task_id_ != nullptr) { - delete task_id_; - } - task_id_ = nullptr; - retry_attempt_ = 0u; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* ArtifactSource::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::WorkflowExecutionIdentifier::_InternalParse; - object = msg->mutable_workflow_execution(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // string node_id = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.ArtifactSource.node_id"); - object = msg->mutable_node_id(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // .flyteidl.core.Identifier task_id = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::Identifier::_InternalParse; - object = msg->mutable_task_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // uint32 retry_attempt = 4; - case 4: { - if (static_cast<::google::protobuf::uint8>(tag) != 32) goto handle_unusual; - msg->set_retry_attempt(::google::protobuf::internal::ReadVarint(&ptr)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } - // string principal = 5; - case 5: { - if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.ArtifactSource.principal"); - object = msg->mutable_principal(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -string_till_end: - static_cast<::std::string*>(object)->clear(); - static_cast<::std::string*>(object)->reserve(size); - goto len_delim_till_end; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool ArtifactSource::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.ArtifactSource) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_workflow_execution())); - } else { - goto handle_unusual; - } - break; - } - - // string node_id = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_node_id())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->node_id().data(), static_cast(this->node_id().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.ArtifactSource.node_id")); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.core.Identifier task_id = 3; - case 3: { - if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_task_id())); - } else { - goto handle_unusual; - } - break; - } - - // uint32 retry_attempt = 4; - case 4: { - if (static_cast< ::google::protobuf::uint8>(tag) == (32 & 0xFF)) { - - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - ::google::protobuf::uint32, ::google::protobuf::internal::WireFormatLite::TYPE_UINT32>( - input, &retry_attempt_))); - } else { - goto handle_unusual; - } - break; - } - - // string principal = 5; - case 5: { - if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_principal())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->principal().data(), static_cast(this->principal().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.ArtifactSource.principal")); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.ArtifactSource) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.ArtifactSource) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void ArtifactSource::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.ArtifactSource) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - if (this->has_workflow_execution()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::workflow_execution(this), output); - } - - // string node_id = 2; - if (this->node_id().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->node_id().data(), static_cast(this->node_id().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSource.node_id"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 2, this->node_id(), output); - } - - // .flyteidl.core.Identifier task_id = 3; - if (this->has_task_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 3, HasBitSetters::task_id(this), output); - } - - // uint32 retry_attempt = 4; - if (this->retry_attempt() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteUInt32(4, this->retry_attempt(), output); - } - - // string principal = 5; - if (this->principal().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->principal().data(), static_cast(this->principal().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSource.principal"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 5, this->principal(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.ArtifactSource) -} - -::google::protobuf::uint8* ArtifactSource::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.ArtifactSource) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - if (this->has_workflow_execution()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::workflow_execution(this), target); - } - - // string node_id = 2; - if (this->node_id().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->node_id().data(), static_cast(this->node_id().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSource.node_id"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 2, this->node_id(), target); - } - - // .flyteidl.core.Identifier task_id = 3; - if (this->has_task_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 3, HasBitSetters::task_id(this), target); - } - - // uint32 retry_attempt = 4; - if (this->retry_attempt() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteUInt32ToArray(4, this->retry_attempt(), target); - } - - // string principal = 5; - if (this->principal().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->principal().data(), static_cast(this->principal().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSource.principal"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 5, this->principal(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.ArtifactSource) - return target; -} - -size_t ArtifactSource::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.ArtifactSource) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // string node_id = 2; - if (this->node_id().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->node_id()); - } - - // string principal = 5; - if (this->principal().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->principal()); - } - - // .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - if (this->has_workflow_execution()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *workflow_execution_); - } - - // .flyteidl.core.Identifier task_id = 3; - if (this->has_task_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *task_id_); - } - - // uint32 retry_attempt = 4; - if (this->retry_attempt() != 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::UInt32Size( - this->retry_attempt()); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void ArtifactSource::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.ArtifactSource) - GOOGLE_DCHECK_NE(&from, this); - const ArtifactSource* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.ArtifactSource) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.ArtifactSource) - MergeFrom(*source); - } -} - -void ArtifactSource::MergeFrom(const ArtifactSource& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.ArtifactSource) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.node_id().size() > 0) { - - node_id_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.node_id_); - } - if (from.principal().size() > 0) { - - principal_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.principal_); - } - if (from.has_workflow_execution()) { - mutable_workflow_execution()->::flyteidl::core::WorkflowExecutionIdentifier::MergeFrom(from.workflow_execution()); - } - if (from.has_task_id()) { - mutable_task_id()->::flyteidl::core::Identifier::MergeFrom(from.task_id()); - } - if (from.retry_attempt() != 0) { - set_retry_attempt(from.retry_attempt()); - } -} - -void ArtifactSource::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.ArtifactSource) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void ArtifactSource::CopyFrom(const ArtifactSource& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.ArtifactSource) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool ArtifactSource::IsInitialized() const { - return true; -} - -void ArtifactSource::Swap(ArtifactSource* other) { - if (other == this) return; - InternalSwap(other); -} -void ArtifactSource::InternalSwap(ArtifactSource* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - node_id_.Swap(&other->node_id_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - principal_.Swap(&other->principal_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(workflow_execution_, other->workflow_execution_); - swap(task_id_, other->task_id_); - swap(retry_attempt_, other->retry_attempt_); -} - -::google::protobuf::Metadata ArtifactSource::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void ArtifactSpec::InitAsDefaultInstance() { - ::flyteidl::artifact::_ArtifactSpec_default_instance_._instance.get_mutable()->value_ = const_cast< ::flyteidl::core::Literal*>( - ::flyteidl::core::Literal::internal_default_instance()); - ::flyteidl::artifact::_ArtifactSpec_default_instance_._instance.get_mutable()->type_ = const_cast< ::flyteidl::core::LiteralType*>( - ::flyteidl::core::LiteralType::internal_default_instance()); - ::flyteidl::artifact::_ArtifactSpec_default_instance_._instance.get_mutable()->user_metadata_ = const_cast< ::google::protobuf::Any*>( - ::google::protobuf::Any::internal_default_instance()); -} -class ArtifactSpec::HasBitSetters { - public: - static const ::flyteidl::core::Literal& value(const ArtifactSpec* msg); - static const ::flyteidl::core::LiteralType& type(const ArtifactSpec* msg); - static const ::google::protobuf::Any& user_metadata(const ArtifactSpec* msg); -}; - -const ::flyteidl::core::Literal& -ArtifactSpec::HasBitSetters::value(const ArtifactSpec* msg) { - return *msg->value_; -} -const ::flyteidl::core::LiteralType& -ArtifactSpec::HasBitSetters::type(const ArtifactSpec* msg) { - return *msg->type_; -} -const ::google::protobuf::Any& -ArtifactSpec::HasBitSetters::user_metadata(const ArtifactSpec* msg) { - return *msg->user_metadata_; -} -void ArtifactSpec::clear_value() { - if (GetArenaNoVirtual() == nullptr && value_ != nullptr) { - delete value_; - } - value_ = nullptr; -} -void ArtifactSpec::clear_type() { - if (GetArenaNoVirtual() == nullptr && type_ != nullptr) { - delete type_; - } - type_ = nullptr; -} -void ArtifactSpec::clear_user_metadata() { - if (GetArenaNoVirtual() == nullptr && user_metadata_ != nullptr) { - delete user_metadata_; - } - user_metadata_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int ArtifactSpec::kValueFieldNumber; -const int ArtifactSpec::kTypeFieldNumber; -const int ArtifactSpec::kShortDescriptionFieldNumber; -const int ArtifactSpec::kUserMetadataFieldNumber; -const int ArtifactSpec::kMetadataTypeFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -ArtifactSpec::ArtifactSpec() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.ArtifactSpec) -} -ArtifactSpec::ArtifactSpec(const ArtifactSpec& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - short_description_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.short_description().size() > 0) { - short_description_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.short_description_); - } - metadata_type_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.metadata_type().size() > 0) { - metadata_type_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.metadata_type_); - } - if (from.has_value()) { - value_ = new ::flyteidl::core::Literal(*from.value_); - } else { - value_ = nullptr; - } - if (from.has_type()) { - type_ = new ::flyteidl::core::LiteralType(*from.type_); - } else { - type_ = nullptr; - } - if (from.has_user_metadata()) { - user_metadata_ = new ::google::protobuf::Any(*from.user_metadata_); - } else { - user_metadata_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.ArtifactSpec) -} - -void ArtifactSpec::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_ArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto.base); - short_description_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - metadata_type_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - ::memset(&value_, 0, static_cast( - reinterpret_cast(&user_metadata_) - - reinterpret_cast(&value_)) + sizeof(user_metadata_)); -} - -ArtifactSpec::~ArtifactSpec() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.ArtifactSpec) - SharedDtor(); -} - -void ArtifactSpec::SharedDtor() { - short_description_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - metadata_type_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (this != internal_default_instance()) delete value_; - if (this != internal_default_instance()) delete type_; - if (this != internal_default_instance()) delete user_metadata_; -} - -void ArtifactSpec::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const ArtifactSpec& ArtifactSpec::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_ArtifactSpec_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void ArtifactSpec::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.ArtifactSpec) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - short_description_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - metadata_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (GetArenaNoVirtual() == nullptr && value_ != nullptr) { - delete value_; - } - value_ = nullptr; - if (GetArenaNoVirtual() == nullptr && type_ != nullptr) { - delete type_; - } - type_ = nullptr; - if (GetArenaNoVirtual() == nullptr && user_metadata_ != nullptr) { - delete user_metadata_; - } - user_metadata_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* ArtifactSpec::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.Literal value = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::Literal::_InternalParse; - object = msg->mutable_value(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.core.LiteralType type = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::LiteralType::_InternalParse; - object = msg->mutable_type(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // string short_description = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.ArtifactSpec.short_description"); - object = msg->mutable_short_description(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // .google.protobuf.Any user_metadata = 4; - case 4: { - if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::google::protobuf::Any::_InternalParse; - object = msg->mutable_user_metadata(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // string metadata_type = 5; - case 5: { - if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.ArtifactSpec.metadata_type"); - object = msg->mutable_metadata_type(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -string_till_end: - static_cast<::std::string*>(object)->clear(); - static_cast<::std::string*>(object)->reserve(size); - goto len_delim_till_end; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool ArtifactSpec::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.ArtifactSpec) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.Literal value = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_value())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.core.LiteralType type = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_type())); - } else { - goto handle_unusual; - } - break; - } - - // string short_description = 3; - case 3: { - if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_short_description())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->short_description().data(), static_cast(this->short_description().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.ArtifactSpec.short_description")); - } else { - goto handle_unusual; - } - break; - } - - // .google.protobuf.Any user_metadata = 4; - case 4: { - if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_user_metadata())); - } else { - goto handle_unusual; - } - break; - } - - // string metadata_type = 5; - case 5: { - if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_metadata_type())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->metadata_type().data(), static_cast(this->metadata_type().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.ArtifactSpec.metadata_type")); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.ArtifactSpec) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.ArtifactSpec) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void ArtifactSpec::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.ArtifactSpec) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Literal value = 1; - if (this->has_value()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::value(this), output); - } - - // .flyteidl.core.LiteralType type = 2; - if (this->has_type()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::type(this), output); - } - - // string short_description = 3; - if (this->short_description().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->short_description().data(), static_cast(this->short_description().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSpec.short_description"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 3, this->short_description(), output); - } - - // .google.protobuf.Any user_metadata = 4; - if (this->has_user_metadata()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 4, HasBitSetters::user_metadata(this), output); - } - - // string metadata_type = 5; - if (this->metadata_type().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->metadata_type().data(), static_cast(this->metadata_type().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSpec.metadata_type"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 5, this->metadata_type(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.ArtifactSpec) -} - -::google::protobuf::uint8* ArtifactSpec::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.ArtifactSpec) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Literal value = 1; - if (this->has_value()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::value(this), target); - } - - // .flyteidl.core.LiteralType type = 2; - if (this->has_type()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, HasBitSetters::type(this), target); - } - - // string short_description = 3; - if (this->short_description().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->short_description().data(), static_cast(this->short_description().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSpec.short_description"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 3, this->short_description(), target); - } - - // .google.protobuf.Any user_metadata = 4; - if (this->has_user_metadata()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 4, HasBitSetters::user_metadata(this), target); - } - - // string metadata_type = 5; - if (this->metadata_type().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->metadata_type().data(), static_cast(this->metadata_type().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.ArtifactSpec.metadata_type"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 5, this->metadata_type(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.ArtifactSpec) - return target; -} - -size_t ArtifactSpec::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.ArtifactSpec) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // string short_description = 3; - if (this->short_description().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->short_description()); - } - - // string metadata_type = 5; - if (this->metadata_type().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->metadata_type()); - } - - // .flyteidl.core.Literal value = 1; - if (this->has_value()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *value_); - } - - // .flyteidl.core.LiteralType type = 2; - if (this->has_type()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *type_); - } - - // .google.protobuf.Any user_metadata = 4; - if (this->has_user_metadata()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *user_metadata_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void ArtifactSpec::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.ArtifactSpec) - GOOGLE_DCHECK_NE(&from, this); - const ArtifactSpec* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.ArtifactSpec) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.ArtifactSpec) - MergeFrom(*source); - } -} - -void ArtifactSpec::MergeFrom(const ArtifactSpec& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.ArtifactSpec) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.short_description().size() > 0) { - - short_description_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.short_description_); - } - if (from.metadata_type().size() > 0) { - - metadata_type_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.metadata_type_); - } - if (from.has_value()) { - mutable_value()->::flyteidl::core::Literal::MergeFrom(from.value()); - } - if (from.has_type()) { - mutable_type()->::flyteidl::core::LiteralType::MergeFrom(from.type()); - } - if (from.has_user_metadata()) { - mutable_user_metadata()->::google::protobuf::Any::MergeFrom(from.user_metadata()); - } -} - -void ArtifactSpec::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.ArtifactSpec) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void ArtifactSpec::CopyFrom(const ArtifactSpec& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.ArtifactSpec) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool ArtifactSpec::IsInitialized() const { - return true; -} - -void ArtifactSpec::Swap(ArtifactSpec* other) { - if (other == this) return; - InternalSwap(other); -} -void ArtifactSpec::InternalSwap(ArtifactSpec* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - short_description_.Swap(&other->short_description_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - metadata_type_.Swap(&other->metadata_type_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(value_, other->value_); - swap(type_, other->type_); - swap(user_metadata_, other->user_metadata_); -} - -::google::protobuf::Metadata ArtifactSpec::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void CreateArtifactResponse::InitAsDefaultInstance() { - ::flyteidl::artifact::_CreateArtifactResponse_default_instance_._instance.get_mutable()->artifact_ = const_cast< ::flyteidl::artifact::Artifact*>( - ::flyteidl::artifact::Artifact::internal_default_instance()); -} -class CreateArtifactResponse::HasBitSetters { - public: - static const ::flyteidl::artifact::Artifact& artifact(const CreateArtifactResponse* msg); -}; - -const ::flyteidl::artifact::Artifact& -CreateArtifactResponse::HasBitSetters::artifact(const CreateArtifactResponse* msg) { - return *msg->artifact_; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int CreateArtifactResponse::kArtifactFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -CreateArtifactResponse::CreateArtifactResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.CreateArtifactResponse) -} -CreateArtifactResponse::CreateArtifactResponse(const CreateArtifactResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_artifact()) { - artifact_ = new ::flyteidl::artifact::Artifact(*from.artifact_); - } else { - artifact_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.CreateArtifactResponse) -} - -void CreateArtifactResponse::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_CreateArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - artifact_ = nullptr; -} - -CreateArtifactResponse::~CreateArtifactResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.CreateArtifactResponse) - SharedDtor(); -} - -void CreateArtifactResponse::SharedDtor() { - if (this != internal_default_instance()) delete artifact_; -} - -void CreateArtifactResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const CreateArtifactResponse& CreateArtifactResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_CreateArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void CreateArtifactResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.CreateArtifactResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && artifact_ != nullptr) { - delete artifact_; - } - artifact_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* CreateArtifactResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.artifact.Artifact artifact = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::Artifact::_InternalParse; - object = msg->mutable_artifact(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool CreateArtifactResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.CreateArtifactResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.artifact.Artifact artifact = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_artifact())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.CreateArtifactResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.CreateArtifactResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void CreateArtifactResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.CreateArtifactResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.artifact.Artifact artifact = 1; - if (this->has_artifact()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::artifact(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.CreateArtifactResponse) -} - -::google::protobuf::uint8* CreateArtifactResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.CreateArtifactResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.artifact.Artifact artifact = 1; - if (this->has_artifact()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::artifact(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.CreateArtifactResponse) - return target; -} - -size_t CreateArtifactResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.CreateArtifactResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.artifact.Artifact artifact = 1; - if (this->has_artifact()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *artifact_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void CreateArtifactResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.CreateArtifactResponse) - GOOGLE_DCHECK_NE(&from, this); - const CreateArtifactResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.CreateArtifactResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.CreateArtifactResponse) - MergeFrom(*source); - } -} - -void CreateArtifactResponse::MergeFrom(const CreateArtifactResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.CreateArtifactResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_artifact()) { - mutable_artifact()->::flyteidl::artifact::Artifact::MergeFrom(from.artifact()); - } -} - -void CreateArtifactResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.CreateArtifactResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void CreateArtifactResponse::CopyFrom(const CreateArtifactResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.CreateArtifactResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool CreateArtifactResponse::IsInitialized() const { - return true; -} - -void CreateArtifactResponse::Swap(CreateArtifactResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void CreateArtifactResponse::InternalSwap(CreateArtifactResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(artifact_, other->artifact_); -} - -::google::protobuf::Metadata CreateArtifactResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void GetArtifactRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_GetArtifactRequest_default_instance_._instance.get_mutable()->query_ = const_cast< ::flyteidl::core::ArtifactQuery*>( - ::flyteidl::core::ArtifactQuery::internal_default_instance()); -} -class GetArtifactRequest::HasBitSetters { - public: - static const ::flyteidl::core::ArtifactQuery& query(const GetArtifactRequest* msg); -}; - -const ::flyteidl::core::ArtifactQuery& -GetArtifactRequest::HasBitSetters::query(const GetArtifactRequest* msg) { - return *msg->query_; -} -void GetArtifactRequest::clear_query() { - if (GetArenaNoVirtual() == nullptr && query_ != nullptr) { - delete query_; - } - query_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int GetArtifactRequest::kQueryFieldNumber; -const int GetArtifactRequest::kDetailsFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -GetArtifactRequest::GetArtifactRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.GetArtifactRequest) -} -GetArtifactRequest::GetArtifactRequest(const GetArtifactRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_query()) { - query_ = new ::flyteidl::core::ArtifactQuery(*from.query_); - } else { - query_ = nullptr; - } - details_ = from.details_; - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.GetArtifactRequest) -} - -void GetArtifactRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_GetArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::memset(&query_, 0, static_cast( - reinterpret_cast(&details_) - - reinterpret_cast(&query_)) + sizeof(details_)); -} - -GetArtifactRequest::~GetArtifactRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.GetArtifactRequest) - SharedDtor(); -} - -void GetArtifactRequest::SharedDtor() { - if (this != internal_default_instance()) delete query_; -} - -void GetArtifactRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const GetArtifactRequest& GetArtifactRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_GetArtifactRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void GetArtifactRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.GetArtifactRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && query_ != nullptr) { - delete query_; - } - query_ = nullptr; - details_ = false; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* GetArtifactRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.ArtifactQuery query = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactQuery::_InternalParse; - object = msg->mutable_query(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // bool details = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual; - msg->set_details(::google::protobuf::internal::ReadVarint(&ptr)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool GetArtifactRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.GetArtifactRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.ArtifactQuery query = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_query())); - } else { - goto handle_unusual; - } - break; - } - - // bool details = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (16 & 0xFF)) { - - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( - input, &details_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.GetArtifactRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.GetArtifactRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void GetArtifactRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.GetArtifactRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactQuery query = 1; - if (this->has_query()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::query(this), output); - } - - // bool details = 2; - if (this->details() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteBool(2, this->details(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.GetArtifactRequest) -} - -::google::protobuf::uint8* GetArtifactRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.GetArtifactRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactQuery query = 1; - if (this->has_query()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::query(this), target); - } - - // bool details = 2; - if (this->details() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(2, this->details(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.GetArtifactRequest) - return target; -} - -size_t GetArtifactRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.GetArtifactRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.core.ArtifactQuery query = 1; - if (this->has_query()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *query_); - } - - // bool details = 2; - if (this->details() != 0) { - total_size += 1 + 1; - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void GetArtifactRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.GetArtifactRequest) - GOOGLE_DCHECK_NE(&from, this); - const GetArtifactRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.GetArtifactRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.GetArtifactRequest) - MergeFrom(*source); - } -} - -void GetArtifactRequest::MergeFrom(const GetArtifactRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.GetArtifactRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_query()) { - mutable_query()->::flyteidl::core::ArtifactQuery::MergeFrom(from.query()); - } - if (from.details() != 0) { - set_details(from.details()); - } -} - -void GetArtifactRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.GetArtifactRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void GetArtifactRequest::CopyFrom(const GetArtifactRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.GetArtifactRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool GetArtifactRequest::IsInitialized() const { - return true; -} - -void GetArtifactRequest::Swap(GetArtifactRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void GetArtifactRequest::InternalSwap(GetArtifactRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(query_, other->query_); - swap(details_, other->details_); -} - -::google::protobuf::Metadata GetArtifactRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void GetArtifactResponse::InitAsDefaultInstance() { - ::flyteidl::artifact::_GetArtifactResponse_default_instance_._instance.get_mutable()->artifact_ = const_cast< ::flyteidl::artifact::Artifact*>( - ::flyteidl::artifact::Artifact::internal_default_instance()); -} -class GetArtifactResponse::HasBitSetters { - public: - static const ::flyteidl::artifact::Artifact& artifact(const GetArtifactResponse* msg); -}; - -const ::flyteidl::artifact::Artifact& -GetArtifactResponse::HasBitSetters::artifact(const GetArtifactResponse* msg) { - return *msg->artifact_; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int GetArtifactResponse::kArtifactFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -GetArtifactResponse::GetArtifactResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.GetArtifactResponse) -} -GetArtifactResponse::GetArtifactResponse(const GetArtifactResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_artifact()) { - artifact_ = new ::flyteidl::artifact::Artifact(*from.artifact_); - } else { - artifact_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.GetArtifactResponse) -} - -void GetArtifactResponse::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_GetArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - artifact_ = nullptr; -} - -GetArtifactResponse::~GetArtifactResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.GetArtifactResponse) - SharedDtor(); -} - -void GetArtifactResponse::SharedDtor() { - if (this != internal_default_instance()) delete artifact_; -} - -void GetArtifactResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const GetArtifactResponse& GetArtifactResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_GetArtifactResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void GetArtifactResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.GetArtifactResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && artifact_ != nullptr) { - delete artifact_; - } - artifact_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* GetArtifactResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.artifact.Artifact artifact = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::Artifact::_InternalParse; - object = msg->mutable_artifact(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool GetArtifactResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.GetArtifactResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.artifact.Artifact artifact = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_artifact())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.GetArtifactResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.GetArtifactResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void GetArtifactResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.GetArtifactResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.artifact.Artifact artifact = 1; - if (this->has_artifact()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::artifact(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.GetArtifactResponse) -} - -::google::protobuf::uint8* GetArtifactResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.GetArtifactResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.artifact.Artifact artifact = 1; - if (this->has_artifact()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::artifact(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.GetArtifactResponse) - return target; -} - -size_t GetArtifactResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.GetArtifactResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.artifact.Artifact artifact = 1; - if (this->has_artifact()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *artifact_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void GetArtifactResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.GetArtifactResponse) - GOOGLE_DCHECK_NE(&from, this); - const GetArtifactResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.GetArtifactResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.GetArtifactResponse) - MergeFrom(*source); - } -} - -void GetArtifactResponse::MergeFrom(const GetArtifactResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.GetArtifactResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_artifact()) { - mutable_artifact()->::flyteidl::artifact::Artifact::MergeFrom(from.artifact()); - } -} - -void GetArtifactResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.GetArtifactResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void GetArtifactResponse::CopyFrom(const GetArtifactResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.GetArtifactResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool GetArtifactResponse::IsInitialized() const { - return true; -} - -void GetArtifactResponse::Swap(GetArtifactResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void GetArtifactResponse::InternalSwap(GetArtifactResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(artifact_, other->artifact_); -} - -::google::protobuf::Metadata GetArtifactResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void SearchOptions::InitAsDefaultInstance() { -} -class SearchOptions::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int SearchOptions::kStrictPartitionsFieldNumber; -const int SearchOptions::kLatestByKeyFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -SearchOptions::SearchOptions() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.SearchOptions) -} -SearchOptions::SearchOptions(const SearchOptions& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::memcpy(&strict_partitions_, &from.strict_partitions_, - static_cast(reinterpret_cast(&latest_by_key_) - - reinterpret_cast(&strict_partitions_)) + sizeof(latest_by_key_)); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.SearchOptions) -} - -void SearchOptions::SharedCtor() { - ::memset(&strict_partitions_, 0, static_cast( - reinterpret_cast(&latest_by_key_) - - reinterpret_cast(&strict_partitions_)) + sizeof(latest_by_key_)); -} - -SearchOptions::~SearchOptions() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.SearchOptions) - SharedDtor(); -} - -void SearchOptions::SharedDtor() { -} - -void SearchOptions::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const SearchOptions& SearchOptions::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_SearchOptions_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void SearchOptions::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.SearchOptions) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - ::memset(&strict_partitions_, 0, static_cast( - reinterpret_cast(&latest_by_key_) - - reinterpret_cast(&strict_partitions_)) + sizeof(latest_by_key_)); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* SearchOptions::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // bool strict_partitions = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual; - msg->set_strict_partitions(::google::protobuf::internal::ReadVarint(&ptr)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } - // bool latest_by_key = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual; - msg->set_latest_by_key(::google::protobuf::internal::ReadVarint(&ptr)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool SearchOptions::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.SearchOptions) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // bool strict_partitions = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (8 & 0xFF)) { - - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( - input, &strict_partitions_))); - } else { - goto handle_unusual; - } - break; - } - - // bool latest_by_key = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (16 & 0xFF)) { - - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( - input, &latest_by_key_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.SearchOptions) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.SearchOptions) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void SearchOptions::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.SearchOptions) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // bool strict_partitions = 1; - if (this->strict_partitions() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteBool(1, this->strict_partitions(), output); - } - - // bool latest_by_key = 2; - if (this->latest_by_key() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteBool(2, this->latest_by_key(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.SearchOptions) -} - -::google::protobuf::uint8* SearchOptions::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.SearchOptions) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // bool strict_partitions = 1; - if (this->strict_partitions() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(1, this->strict_partitions(), target); - } - - // bool latest_by_key = 2; - if (this->latest_by_key() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(2, this->latest_by_key(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.SearchOptions) - return target; -} - -size_t SearchOptions::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.SearchOptions) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // bool strict_partitions = 1; - if (this->strict_partitions() != 0) { - total_size += 1 + 1; - } - - // bool latest_by_key = 2; - if (this->latest_by_key() != 0) { - total_size += 1 + 1; - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void SearchOptions::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.SearchOptions) - GOOGLE_DCHECK_NE(&from, this); - const SearchOptions* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.SearchOptions) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.SearchOptions) - MergeFrom(*source); - } -} - -void SearchOptions::MergeFrom(const SearchOptions& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.SearchOptions) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.strict_partitions() != 0) { - set_strict_partitions(from.strict_partitions()); - } - if (from.latest_by_key() != 0) { - set_latest_by_key(from.latest_by_key()); - } -} - -void SearchOptions::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.SearchOptions) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void SearchOptions::CopyFrom(const SearchOptions& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.SearchOptions) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool SearchOptions::IsInitialized() const { - return true; -} - -void SearchOptions::Swap(SearchOptions* other) { - if (other == this) return; - InternalSwap(other); -} -void SearchOptions::InternalSwap(SearchOptions* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(strict_partitions_, other->strict_partitions_); - swap(latest_by_key_, other->latest_by_key_); -} - -::google::protobuf::Metadata SearchOptions::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void SearchArtifactsRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_SearchArtifactsRequest_default_instance_._instance.get_mutable()->artifact_key_ = const_cast< ::flyteidl::core::ArtifactKey*>( - ::flyteidl::core::ArtifactKey::internal_default_instance()); - ::flyteidl::artifact::_SearchArtifactsRequest_default_instance_._instance.get_mutable()->partitions_ = const_cast< ::flyteidl::core::Partitions*>( - ::flyteidl::core::Partitions::internal_default_instance()); - ::flyteidl::artifact::_SearchArtifactsRequest_default_instance_._instance.get_mutable()->options_ = const_cast< ::flyteidl::artifact::SearchOptions*>( - ::flyteidl::artifact::SearchOptions::internal_default_instance()); -} -class SearchArtifactsRequest::HasBitSetters { - public: - static const ::flyteidl::core::ArtifactKey& artifact_key(const SearchArtifactsRequest* msg); - static const ::flyteidl::core::Partitions& partitions(const SearchArtifactsRequest* msg); - static const ::flyteidl::artifact::SearchOptions& options(const SearchArtifactsRequest* msg); -}; - -const ::flyteidl::core::ArtifactKey& -SearchArtifactsRequest::HasBitSetters::artifact_key(const SearchArtifactsRequest* msg) { - return *msg->artifact_key_; -} -const ::flyteidl::core::Partitions& -SearchArtifactsRequest::HasBitSetters::partitions(const SearchArtifactsRequest* msg) { - return *msg->partitions_; -} -const ::flyteidl::artifact::SearchOptions& -SearchArtifactsRequest::HasBitSetters::options(const SearchArtifactsRequest* msg) { - return *msg->options_; -} -void SearchArtifactsRequest::clear_artifact_key() { - if (GetArenaNoVirtual() == nullptr && artifact_key_ != nullptr) { - delete artifact_key_; - } - artifact_key_ = nullptr; -} -void SearchArtifactsRequest::clear_partitions() { - if (GetArenaNoVirtual() == nullptr && partitions_ != nullptr) { - delete partitions_; - } - partitions_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int SearchArtifactsRequest::kArtifactKeyFieldNumber; -const int SearchArtifactsRequest::kPartitionsFieldNumber; -const int SearchArtifactsRequest::kPrincipalFieldNumber; -const int SearchArtifactsRequest::kVersionFieldNumber; -const int SearchArtifactsRequest::kOptionsFieldNumber; -const int SearchArtifactsRequest::kTokenFieldNumber; -const int SearchArtifactsRequest::kLimitFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -SearchArtifactsRequest::SearchArtifactsRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.SearchArtifactsRequest) -} -SearchArtifactsRequest::SearchArtifactsRequest(const SearchArtifactsRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - principal_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.principal().size() > 0) { - principal_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.principal_); - } - version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.version().size() > 0) { - version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); - } - token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.token().size() > 0) { - token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); - } - if (from.has_artifact_key()) { - artifact_key_ = new ::flyteidl::core::ArtifactKey(*from.artifact_key_); - } else { - artifact_key_ = nullptr; - } - if (from.has_partitions()) { - partitions_ = new ::flyteidl::core::Partitions(*from.partitions_); - } else { - partitions_ = nullptr; - } - if (from.has_options()) { - options_ = new ::flyteidl::artifact::SearchOptions(*from.options_); - } else { - options_ = nullptr; - } - limit_ = from.limit_; - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.SearchArtifactsRequest) -} - -void SearchArtifactsRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_SearchArtifactsRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - principal_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - ::memset(&artifact_key_, 0, static_cast( - reinterpret_cast(&limit_) - - reinterpret_cast(&artifact_key_)) + sizeof(limit_)); -} - -SearchArtifactsRequest::~SearchArtifactsRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.SearchArtifactsRequest) - SharedDtor(); -} - -void SearchArtifactsRequest::SharedDtor() { - principal_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - version_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (this != internal_default_instance()) delete artifact_key_; - if (this != internal_default_instance()) delete partitions_; - if (this != internal_default_instance()) delete options_; -} - -void SearchArtifactsRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const SearchArtifactsRequest& SearchArtifactsRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_SearchArtifactsRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void SearchArtifactsRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.SearchArtifactsRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - principal_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (GetArenaNoVirtual() == nullptr && artifact_key_ != nullptr) { - delete artifact_key_; - } - artifact_key_ = nullptr; - if (GetArenaNoVirtual() == nullptr && partitions_ != nullptr) { - delete partitions_; - } - partitions_ = nullptr; - if (GetArenaNoVirtual() == nullptr && options_ != nullptr) { - delete options_; - } - options_ = nullptr; - limit_ = 0; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* SearchArtifactsRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.ArtifactKey artifact_key = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactKey::_InternalParse; - object = msg->mutable_artifact_key(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.core.Partitions partitions = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::Partitions::_InternalParse; - object = msg->mutable_partitions(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // string principal = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.SearchArtifactsRequest.principal"); - object = msg->mutable_principal(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // string version = 4; - case 4: { - if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.SearchArtifactsRequest.version"); - object = msg->mutable_version(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // .flyteidl.artifact.SearchOptions options = 5; - case 5: { - if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::SearchOptions::_InternalParse; - object = msg->mutable_options(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // string token = 6; - case 6: { - if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.SearchArtifactsRequest.token"); - object = msg->mutable_token(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // int32 limit = 7; - case 7: { - if (static_cast<::google::protobuf::uint8>(tag) != 56) goto handle_unusual; - msg->set_limit(::google::protobuf::internal::ReadVarint(&ptr)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -string_till_end: - static_cast<::std::string*>(object)->clear(); - static_cast<::std::string*>(object)->reserve(size); - goto len_delim_till_end; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool SearchArtifactsRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.SearchArtifactsRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.ArtifactKey artifact_key = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_artifact_key())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.core.Partitions partitions = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_partitions())); - } else { - goto handle_unusual; - } - break; - } - - // string principal = 3; - case 3: { - if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_principal())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->principal().data(), static_cast(this->principal().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.SearchArtifactsRequest.principal")); - } else { - goto handle_unusual; - } - break; - } - - // string version = 4; - case 4: { - if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_version())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->version().data(), static_cast(this->version().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.SearchArtifactsRequest.version")); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.artifact.SearchOptions options = 5; - case 5: { - if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_options())); - } else { - goto handle_unusual; - } - break; - } - - // string token = 6; - case 6: { - if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_token())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->token().data(), static_cast(this->token().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.SearchArtifactsRequest.token")); - } else { - goto handle_unusual; - } - break; - } - - // int32 limit = 7; - case 7: { - if (static_cast< ::google::protobuf::uint8>(tag) == (56 & 0xFF)) { - - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - ::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>( - input, &limit_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.SearchArtifactsRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.SearchArtifactsRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void SearchArtifactsRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.SearchArtifactsRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactKey artifact_key = 1; - if (this->has_artifact_key()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::artifact_key(this), output); - } - - // .flyteidl.core.Partitions partitions = 2; - if (this->has_partitions()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::partitions(this), output); - } - - // string principal = 3; - if (this->principal().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->principal().data(), static_cast(this->principal().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsRequest.principal"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 3, this->principal(), output); - } - - // string version = 4; - if (this->version().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->version().data(), static_cast(this->version().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsRequest.version"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 4, this->version(), output); - } - - // .flyteidl.artifact.SearchOptions options = 5; - if (this->has_options()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 5, HasBitSetters::options(this), output); - } - - // string token = 6; - if (this->token().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->token().data(), static_cast(this->token().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsRequest.token"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 6, this->token(), output); - } - - // int32 limit = 7; - if (this->limit() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteInt32(7, this->limit(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.SearchArtifactsRequest) -} - -::google::protobuf::uint8* SearchArtifactsRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.SearchArtifactsRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactKey artifact_key = 1; - if (this->has_artifact_key()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::artifact_key(this), target); - } - - // .flyteidl.core.Partitions partitions = 2; - if (this->has_partitions()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, HasBitSetters::partitions(this), target); - } - - // string principal = 3; - if (this->principal().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->principal().data(), static_cast(this->principal().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsRequest.principal"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 3, this->principal(), target); - } - - // string version = 4; - if (this->version().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->version().data(), static_cast(this->version().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsRequest.version"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 4, this->version(), target); - } - - // .flyteidl.artifact.SearchOptions options = 5; - if (this->has_options()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 5, HasBitSetters::options(this), target); - } - - // string token = 6; - if (this->token().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->token().data(), static_cast(this->token().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsRequest.token"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 6, this->token(), target); - } - - // int32 limit = 7; - if (this->limit() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(7, this->limit(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.SearchArtifactsRequest) - return target; -} - -size_t SearchArtifactsRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.SearchArtifactsRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // string principal = 3; - if (this->principal().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->principal()); - } - - // string version = 4; - if (this->version().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->version()); - } - - // string token = 6; - if (this->token().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->token()); - } - - // .flyteidl.core.ArtifactKey artifact_key = 1; - if (this->has_artifact_key()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *artifact_key_); - } - - // .flyteidl.core.Partitions partitions = 2; - if (this->has_partitions()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *partitions_); - } - - // .flyteidl.artifact.SearchOptions options = 5; - if (this->has_options()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *options_); - } - - // int32 limit = 7; - if (this->limit() != 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::Int32Size( - this->limit()); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void SearchArtifactsRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.SearchArtifactsRequest) - GOOGLE_DCHECK_NE(&from, this); - const SearchArtifactsRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.SearchArtifactsRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.SearchArtifactsRequest) - MergeFrom(*source); - } -} - -void SearchArtifactsRequest::MergeFrom(const SearchArtifactsRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.SearchArtifactsRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.principal().size() > 0) { - - principal_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.principal_); - } - if (from.version().size() > 0) { - - version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); - } - if (from.token().size() > 0) { - - token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); - } - if (from.has_artifact_key()) { - mutable_artifact_key()->::flyteidl::core::ArtifactKey::MergeFrom(from.artifact_key()); - } - if (from.has_partitions()) { - mutable_partitions()->::flyteidl::core::Partitions::MergeFrom(from.partitions()); - } - if (from.has_options()) { - mutable_options()->::flyteidl::artifact::SearchOptions::MergeFrom(from.options()); - } - if (from.limit() != 0) { - set_limit(from.limit()); - } -} - -void SearchArtifactsRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.SearchArtifactsRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void SearchArtifactsRequest::CopyFrom(const SearchArtifactsRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.SearchArtifactsRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool SearchArtifactsRequest::IsInitialized() const { - return true; -} - -void SearchArtifactsRequest::Swap(SearchArtifactsRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void SearchArtifactsRequest::InternalSwap(SearchArtifactsRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - principal_.Swap(&other->principal_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - version_.Swap(&other->version_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - token_.Swap(&other->token_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(artifact_key_, other->artifact_key_); - swap(partitions_, other->partitions_); - swap(options_, other->options_); - swap(limit_, other->limit_); -} - -::google::protobuf::Metadata SearchArtifactsRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void SearchArtifactsResponse::InitAsDefaultInstance() { -} -class SearchArtifactsResponse::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int SearchArtifactsResponse::kArtifactsFieldNumber; -const int SearchArtifactsResponse::kTokenFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -SearchArtifactsResponse::SearchArtifactsResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.SearchArtifactsResponse) -} -SearchArtifactsResponse::SearchArtifactsResponse(const SearchArtifactsResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr), - artifacts_(from.artifacts_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.token().size() > 0) { - token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.SearchArtifactsResponse) -} - -void SearchArtifactsResponse::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_SearchArtifactsResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - token_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} - -SearchArtifactsResponse::~SearchArtifactsResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.SearchArtifactsResponse) - SharedDtor(); -} - -void SearchArtifactsResponse::SharedDtor() { - token_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} - -void SearchArtifactsResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const SearchArtifactsResponse& SearchArtifactsResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_SearchArtifactsResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void SearchArtifactsResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.SearchArtifactsResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - artifacts_.Clear(); - token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* SearchArtifactsResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // repeated .flyteidl.artifact.Artifact artifacts = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - do { - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::Artifact::_InternalParse; - object = msg->add_artifacts(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 10 && (ptr += 1)); - break; - } - // string token = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.SearchArtifactsResponse.token"); - object = msg->mutable_token(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -string_till_end: - static_cast<::std::string*>(object)->clear(); - static_cast<::std::string*>(object)->reserve(size); - goto len_delim_till_end; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool SearchArtifactsResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.SearchArtifactsResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // repeated .flyteidl.artifact.Artifact artifacts = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, add_artifacts())); - } else { - goto handle_unusual; - } - break; - } - - // string token = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_token())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->token().data(), static_cast(this->token().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.SearchArtifactsResponse.token")); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.SearchArtifactsResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.SearchArtifactsResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void SearchArtifactsResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.SearchArtifactsResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .flyteidl.artifact.Artifact artifacts = 1; - for (unsigned int i = 0, - n = static_cast(this->artifacts_size()); i < n; i++) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, - this->artifacts(static_cast(i)), - output); - } - - // string token = 2; - if (this->token().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->token().data(), static_cast(this->token().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsResponse.token"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 2, this->token(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.SearchArtifactsResponse) -} - -::google::protobuf::uint8* SearchArtifactsResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.SearchArtifactsResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .flyteidl.artifact.Artifact artifacts = 1; - for (unsigned int i = 0, - n = static_cast(this->artifacts_size()); i < n; i++) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, this->artifacts(static_cast(i)), target); - } - - // string token = 2; - if (this->token().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->token().data(), static_cast(this->token().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.SearchArtifactsResponse.token"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 2, this->token(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.SearchArtifactsResponse) - return target; -} - -size_t SearchArtifactsResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.SearchArtifactsResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .flyteidl.artifact.Artifact artifacts = 1; - { - unsigned int count = static_cast(this->artifacts_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::google::protobuf::internal::WireFormatLite::MessageSize( - this->artifacts(static_cast(i))); - } - } - - // string token = 2; - if (this->token().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->token()); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void SearchArtifactsResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.SearchArtifactsResponse) - GOOGLE_DCHECK_NE(&from, this); - const SearchArtifactsResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.SearchArtifactsResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.SearchArtifactsResponse) - MergeFrom(*source); - } -} - -void SearchArtifactsResponse::MergeFrom(const SearchArtifactsResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.SearchArtifactsResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - artifacts_.MergeFrom(from.artifacts_); - if (from.token().size() > 0) { - - token_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.token_); - } -} - -void SearchArtifactsResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.SearchArtifactsResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void SearchArtifactsResponse::CopyFrom(const SearchArtifactsResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.SearchArtifactsResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool SearchArtifactsResponse::IsInitialized() const { - return true; -} - -void SearchArtifactsResponse::Swap(SearchArtifactsResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void SearchArtifactsResponse::InternalSwap(SearchArtifactsResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - CastToBase(&artifacts_)->InternalSwap(CastToBase(&other->artifacts_)); - token_.Swap(&other->token_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); -} - -::google::protobuf::Metadata SearchArtifactsResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void FindByWorkflowExecRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_FindByWorkflowExecRequest_default_instance_._instance.get_mutable()->exec_id_ = const_cast< ::flyteidl::core::WorkflowExecutionIdentifier*>( - ::flyteidl::core::WorkflowExecutionIdentifier::internal_default_instance()); -} -class FindByWorkflowExecRequest::HasBitSetters { - public: - static const ::flyteidl::core::WorkflowExecutionIdentifier& exec_id(const FindByWorkflowExecRequest* msg); -}; - -const ::flyteidl::core::WorkflowExecutionIdentifier& -FindByWorkflowExecRequest::HasBitSetters::exec_id(const FindByWorkflowExecRequest* msg) { - return *msg->exec_id_; -} -void FindByWorkflowExecRequest::clear_exec_id() { - if (GetArenaNoVirtual() == nullptr && exec_id_ != nullptr) { - delete exec_id_; - } - exec_id_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int FindByWorkflowExecRequest::kExecIdFieldNumber; -const int FindByWorkflowExecRequest::kDirectionFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -FindByWorkflowExecRequest::FindByWorkflowExecRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.FindByWorkflowExecRequest) -} -FindByWorkflowExecRequest::FindByWorkflowExecRequest(const FindByWorkflowExecRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_exec_id()) { - exec_id_ = new ::flyteidl::core::WorkflowExecutionIdentifier(*from.exec_id_); - } else { - exec_id_ = nullptr; - } - direction_ = from.direction_; - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.FindByWorkflowExecRequest) -} - -void FindByWorkflowExecRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_FindByWorkflowExecRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::memset(&exec_id_, 0, static_cast( - reinterpret_cast(&direction_) - - reinterpret_cast(&exec_id_)) + sizeof(direction_)); -} - -FindByWorkflowExecRequest::~FindByWorkflowExecRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.FindByWorkflowExecRequest) - SharedDtor(); -} - -void FindByWorkflowExecRequest::SharedDtor() { - if (this != internal_default_instance()) delete exec_id_; -} - -void FindByWorkflowExecRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const FindByWorkflowExecRequest& FindByWorkflowExecRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_FindByWorkflowExecRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void FindByWorkflowExecRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.FindByWorkflowExecRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && exec_id_ != nullptr) { - delete exec_id_; - } - exec_id_ = nullptr; - direction_ = 0; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* FindByWorkflowExecRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::WorkflowExecutionIdentifier::_InternalParse; - object = msg->mutable_exec_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual; - ::google::protobuf::uint64 val = ::google::protobuf::internal::ReadVarint(&ptr); - msg->set_direction(static_cast<::flyteidl::artifact::FindByWorkflowExecRequest_Direction>(val)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool FindByWorkflowExecRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.FindByWorkflowExecRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_exec_id())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (16 & 0xFF)) { - int value = 0; - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>( - input, &value))); - set_direction(static_cast< ::flyteidl::artifact::FindByWorkflowExecRequest_Direction >(value)); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.FindByWorkflowExecRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.FindByWorkflowExecRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void FindByWorkflowExecRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.FindByWorkflowExecRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - if (this->has_exec_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::exec_id(this), output); - } - - // .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - if (this->direction() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteEnum( - 2, this->direction(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.FindByWorkflowExecRequest) -} - -::google::protobuf::uint8* FindByWorkflowExecRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.FindByWorkflowExecRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - if (this->has_exec_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::exec_id(this), target); - } - - // .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - if (this->direction() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray( - 2, this->direction(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.FindByWorkflowExecRequest) - return target; -} - -size_t FindByWorkflowExecRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.FindByWorkflowExecRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - if (this->has_exec_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *exec_id_); - } - - // .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - if (this->direction() != 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::EnumSize(this->direction()); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void FindByWorkflowExecRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.FindByWorkflowExecRequest) - GOOGLE_DCHECK_NE(&from, this); - const FindByWorkflowExecRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.FindByWorkflowExecRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.FindByWorkflowExecRequest) - MergeFrom(*source); - } -} - -void FindByWorkflowExecRequest::MergeFrom(const FindByWorkflowExecRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.FindByWorkflowExecRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_exec_id()) { - mutable_exec_id()->::flyteidl::core::WorkflowExecutionIdentifier::MergeFrom(from.exec_id()); - } - if (from.direction() != 0) { - set_direction(from.direction()); - } -} - -void FindByWorkflowExecRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.FindByWorkflowExecRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void FindByWorkflowExecRequest::CopyFrom(const FindByWorkflowExecRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.FindByWorkflowExecRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool FindByWorkflowExecRequest::IsInitialized() const { - return true; -} - -void FindByWorkflowExecRequest::Swap(FindByWorkflowExecRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void FindByWorkflowExecRequest::InternalSwap(FindByWorkflowExecRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(exec_id_, other->exec_id_); - swap(direction_, other->direction_); -} - -::google::protobuf::Metadata FindByWorkflowExecRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void AddTagRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_AddTagRequest_default_instance_._instance.get_mutable()->artifact_id_ = const_cast< ::flyteidl::core::ArtifactID*>( - ::flyteidl::core::ArtifactID::internal_default_instance()); -} -class AddTagRequest::HasBitSetters { - public: - static const ::flyteidl::core::ArtifactID& artifact_id(const AddTagRequest* msg); -}; - -const ::flyteidl::core::ArtifactID& -AddTagRequest::HasBitSetters::artifact_id(const AddTagRequest* msg) { - return *msg->artifact_id_; -} -void AddTagRequest::clear_artifact_id() { - if (GetArenaNoVirtual() == nullptr && artifact_id_ != nullptr) { - delete artifact_id_; - } - artifact_id_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int AddTagRequest::kArtifactIdFieldNumber; -const int AddTagRequest::kValueFieldNumber; -const int AddTagRequest::kOverwriteFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -AddTagRequest::AddTagRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.AddTagRequest) -} -AddTagRequest::AddTagRequest(const AddTagRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - value_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.value().size() > 0) { - value_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.value_); - } - if (from.has_artifact_id()) { - artifact_id_ = new ::flyteidl::core::ArtifactID(*from.artifact_id_); - } else { - artifact_id_ = nullptr; - } - overwrite_ = from.overwrite_; - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.AddTagRequest) -} - -void AddTagRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_AddTagRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - value_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - ::memset(&artifact_id_, 0, static_cast( - reinterpret_cast(&overwrite_) - - reinterpret_cast(&artifact_id_)) + sizeof(overwrite_)); -} - -AddTagRequest::~AddTagRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.AddTagRequest) - SharedDtor(); -} - -void AddTagRequest::SharedDtor() { - value_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (this != internal_default_instance()) delete artifact_id_; -} - -void AddTagRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AddTagRequest& AddTagRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_AddTagRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void AddTagRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.AddTagRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - value_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (GetArenaNoVirtual() == nullptr && artifact_id_ != nullptr) { - delete artifact_id_; - } - artifact_id_ = nullptr; - overwrite_ = false; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AddTagRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.ArtifactID artifact_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactID::_InternalParse; - object = msg->mutable_artifact_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // string value = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.artifact.AddTagRequest.value"); - object = msg->mutable_value(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; - } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; - } - // bool overwrite = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 24) goto handle_unusual; - msg->set_overwrite(::google::protobuf::internal::ReadVarint(&ptr)); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -string_till_end: - static_cast<::std::string*>(object)->clear(); - static_cast<::std::string*>(object)->reserve(size); - goto len_delim_till_end; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AddTagRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.AddTagRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.ArtifactID artifact_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_artifact_id())); - } else { - goto handle_unusual; - } - break; - } - - // string value = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->mutable_value())); - DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->value().data(), static_cast(this->value().length()), - ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.artifact.AddTagRequest.value")); - } else { - goto handle_unusual; - } - break; - } - - // bool overwrite = 3; - case 3: { - if (static_cast< ::google::protobuf::uint8>(tag) == (24 & 0xFF)) { - - DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< - bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( - input, &overwrite_))); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.AddTagRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.AddTagRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AddTagRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.AddTagRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactID artifact_id = 1; - if (this->has_artifact_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::artifact_id(this), output); - } - - // string value = 2; - if (this->value().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->value().data(), static_cast(this->value().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.AddTagRequest.value"); - ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 2, this->value(), output); - } - - // bool overwrite = 3; - if (this->overwrite() != 0) { - ::google::protobuf::internal::WireFormatLite::WriteBool(3, this->overwrite(), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.AddTagRequest) -} - -::google::protobuf::uint8* AddTagRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.AddTagRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.ArtifactID artifact_id = 1; - if (this->has_artifact_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::artifact_id(this), target); - } - - // string value = 2; - if (this->value().size() > 0) { - ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->value().data(), static_cast(this->value().length()), - ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.artifact.AddTagRequest.value"); - target = - ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 2, this->value(), target); - } - - // bool overwrite = 3; - if (this->overwrite() != 0) { - target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(3, this->overwrite(), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.AddTagRequest) - return target; -} - -size_t AddTagRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.AddTagRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // string value = 2; - if (this->value().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->value()); - } - - // .flyteidl.core.ArtifactID artifact_id = 1; - if (this->has_artifact_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *artifact_id_); - } - - // bool overwrite = 3; - if (this->overwrite() != 0) { - total_size += 1 + 1; - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AddTagRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.AddTagRequest) - GOOGLE_DCHECK_NE(&from, this); - const AddTagRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.AddTagRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.AddTagRequest) - MergeFrom(*source); - } -} - -void AddTagRequest::MergeFrom(const AddTagRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.AddTagRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.value().size() > 0) { - - value_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.value_); - } - if (from.has_artifact_id()) { - mutable_artifact_id()->::flyteidl::core::ArtifactID::MergeFrom(from.artifact_id()); - } - if (from.overwrite() != 0) { - set_overwrite(from.overwrite()); - } -} - -void AddTagRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.AddTagRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void AddTagRequest::CopyFrom(const AddTagRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.AddTagRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AddTagRequest::IsInitialized() const { - return true; -} - -void AddTagRequest::Swap(AddTagRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void AddTagRequest::InternalSwap(AddTagRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - value_.Swap(&other->value_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); - swap(artifact_id_, other->artifact_id_); - swap(overwrite_, other->overwrite_); -} - -::google::protobuf::Metadata AddTagRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void AddTagResponse::InitAsDefaultInstance() { -} -class AddTagResponse::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -AddTagResponse::AddTagResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.AddTagResponse) -} -AddTagResponse::AddTagResponse(const AddTagResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.AddTagResponse) -} - -void AddTagResponse::SharedCtor() { -} - -AddTagResponse::~AddTagResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.AddTagResponse) - SharedDtor(); -} - -void AddTagResponse::SharedDtor() { -} - -void AddTagResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const AddTagResponse& AddTagResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_AddTagResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void AddTagResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.AddTagResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* AddTagResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - default: { - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool AddTagResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.AddTagResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.AddTagResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.AddTagResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void AddTagResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.AddTagResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.AddTagResponse) -} - -::google::protobuf::uint8* AddTagResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.AddTagResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.AddTagResponse) - return target; -} - -size_t AddTagResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.AddTagResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void AddTagResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.AddTagResponse) - GOOGLE_DCHECK_NE(&from, this); - const AddTagResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.AddTagResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.AddTagResponse) - MergeFrom(*source); - } -} - -void AddTagResponse::MergeFrom(const AddTagResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.AddTagResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - -} - -void AddTagResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.AddTagResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void AddTagResponse::CopyFrom(const AddTagResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.AddTagResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool AddTagResponse::IsInitialized() const { - return true; -} - -void AddTagResponse::Swap(AddTagResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void AddTagResponse::InternalSwap(AddTagResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); -} - -::google::protobuf::Metadata AddTagResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void CreateTriggerRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_CreateTriggerRequest_default_instance_._instance.get_mutable()->trigger_launch_plan_ = const_cast< ::flyteidl::admin::LaunchPlan*>( - ::flyteidl::admin::LaunchPlan::internal_default_instance()); -} -class CreateTriggerRequest::HasBitSetters { - public: - static const ::flyteidl::admin::LaunchPlan& trigger_launch_plan(const CreateTriggerRequest* msg); -}; - -const ::flyteidl::admin::LaunchPlan& -CreateTriggerRequest::HasBitSetters::trigger_launch_plan(const CreateTriggerRequest* msg) { - return *msg->trigger_launch_plan_; -} -void CreateTriggerRequest::clear_trigger_launch_plan() { - if (GetArenaNoVirtual() == nullptr && trigger_launch_plan_ != nullptr) { - delete trigger_launch_plan_; - } - trigger_launch_plan_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int CreateTriggerRequest::kTriggerLaunchPlanFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -CreateTriggerRequest::CreateTriggerRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.CreateTriggerRequest) -} -CreateTriggerRequest::CreateTriggerRequest(const CreateTriggerRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_trigger_launch_plan()) { - trigger_launch_plan_ = new ::flyteidl::admin::LaunchPlan(*from.trigger_launch_plan_); - } else { - trigger_launch_plan_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.CreateTriggerRequest) -} - -void CreateTriggerRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_CreateTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - trigger_launch_plan_ = nullptr; -} - -CreateTriggerRequest::~CreateTriggerRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.CreateTriggerRequest) - SharedDtor(); -} - -void CreateTriggerRequest::SharedDtor() { - if (this != internal_default_instance()) delete trigger_launch_plan_; -} - -void CreateTriggerRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const CreateTriggerRequest& CreateTriggerRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_CreateTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void CreateTriggerRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.CreateTriggerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && trigger_launch_plan_ != nullptr) { - delete trigger_launch_plan_; - } - trigger_launch_plan_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* CreateTriggerRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::admin::LaunchPlan::_InternalParse; - object = msg->mutable_trigger_launch_plan(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool CreateTriggerRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.CreateTriggerRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_trigger_launch_plan())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.CreateTriggerRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.CreateTriggerRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void CreateTriggerRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.CreateTriggerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - if (this->has_trigger_launch_plan()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::trigger_launch_plan(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.CreateTriggerRequest) -} - -::google::protobuf::uint8* CreateTriggerRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.CreateTriggerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - if (this->has_trigger_launch_plan()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::trigger_launch_plan(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.CreateTriggerRequest) - return target; -} - -size_t CreateTriggerRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.CreateTriggerRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - if (this->has_trigger_launch_plan()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *trigger_launch_plan_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void CreateTriggerRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.CreateTriggerRequest) - GOOGLE_DCHECK_NE(&from, this); - const CreateTriggerRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.CreateTriggerRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.CreateTriggerRequest) - MergeFrom(*source); - } -} - -void CreateTriggerRequest::MergeFrom(const CreateTriggerRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.CreateTriggerRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_trigger_launch_plan()) { - mutable_trigger_launch_plan()->::flyteidl::admin::LaunchPlan::MergeFrom(from.trigger_launch_plan()); - } -} - -void CreateTriggerRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.CreateTriggerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void CreateTriggerRequest::CopyFrom(const CreateTriggerRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.CreateTriggerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool CreateTriggerRequest::IsInitialized() const { - return true; -} - -void CreateTriggerRequest::Swap(CreateTriggerRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void CreateTriggerRequest::InternalSwap(CreateTriggerRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(trigger_launch_plan_, other->trigger_launch_plan_); -} - -::google::protobuf::Metadata CreateTriggerRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void CreateTriggerResponse::InitAsDefaultInstance() { -} -class CreateTriggerResponse::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -CreateTriggerResponse::CreateTriggerResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.CreateTriggerResponse) -} -CreateTriggerResponse::CreateTriggerResponse(const CreateTriggerResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.CreateTriggerResponse) -} - -void CreateTriggerResponse::SharedCtor() { -} - -CreateTriggerResponse::~CreateTriggerResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.CreateTriggerResponse) - SharedDtor(); -} - -void CreateTriggerResponse::SharedDtor() { -} - -void CreateTriggerResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const CreateTriggerResponse& CreateTriggerResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_CreateTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void CreateTriggerResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.CreateTriggerResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* CreateTriggerResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - default: { - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool CreateTriggerResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.CreateTriggerResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.CreateTriggerResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.CreateTriggerResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void CreateTriggerResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.CreateTriggerResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.CreateTriggerResponse) -} - -::google::protobuf::uint8* CreateTriggerResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.CreateTriggerResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.CreateTriggerResponse) - return target; -} - -size_t CreateTriggerResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.CreateTriggerResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void CreateTriggerResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.CreateTriggerResponse) - GOOGLE_DCHECK_NE(&from, this); - const CreateTriggerResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.CreateTriggerResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.CreateTriggerResponse) - MergeFrom(*source); - } -} - -void CreateTriggerResponse::MergeFrom(const CreateTriggerResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.CreateTriggerResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - -} - -void CreateTriggerResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.CreateTriggerResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void CreateTriggerResponse::CopyFrom(const CreateTriggerResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.CreateTriggerResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool CreateTriggerResponse::IsInitialized() const { - return true; -} - -void CreateTriggerResponse::Swap(CreateTriggerResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void CreateTriggerResponse::InternalSwap(CreateTriggerResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); -} - -::google::protobuf::Metadata CreateTriggerResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void DeleteTriggerRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_DeleteTriggerRequest_default_instance_._instance.get_mutable()->trigger_id_ = const_cast< ::flyteidl::core::Identifier*>( - ::flyteidl::core::Identifier::internal_default_instance()); -} -class DeleteTriggerRequest::HasBitSetters { - public: - static const ::flyteidl::core::Identifier& trigger_id(const DeleteTriggerRequest* msg); -}; - -const ::flyteidl::core::Identifier& -DeleteTriggerRequest::HasBitSetters::trigger_id(const DeleteTriggerRequest* msg) { - return *msg->trigger_id_; -} -void DeleteTriggerRequest::clear_trigger_id() { - if (GetArenaNoVirtual() == nullptr && trigger_id_ != nullptr) { - delete trigger_id_; - } - trigger_id_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int DeleteTriggerRequest::kTriggerIdFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -DeleteTriggerRequest::DeleteTriggerRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.DeleteTriggerRequest) -} -DeleteTriggerRequest::DeleteTriggerRequest(const DeleteTriggerRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_trigger_id()) { - trigger_id_ = new ::flyteidl::core::Identifier(*from.trigger_id_); - } else { - trigger_id_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.DeleteTriggerRequest) -} - -void DeleteTriggerRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_DeleteTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - trigger_id_ = nullptr; -} - -DeleteTriggerRequest::~DeleteTriggerRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.DeleteTriggerRequest) - SharedDtor(); -} - -void DeleteTriggerRequest::SharedDtor() { - if (this != internal_default_instance()) delete trigger_id_; -} - -void DeleteTriggerRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const DeleteTriggerRequest& DeleteTriggerRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_DeleteTriggerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void DeleteTriggerRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.DeleteTriggerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && trigger_id_ != nullptr) { - delete trigger_id_; - } - trigger_id_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DeleteTriggerRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.Identifier trigger_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::Identifier::_InternalParse; - object = msg->mutable_trigger_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DeleteTriggerRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.DeleteTriggerRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.Identifier trigger_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_trigger_id())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.DeleteTriggerRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.DeleteTriggerRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void DeleteTriggerRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.DeleteTriggerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier trigger_id = 1; - if (this->has_trigger_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::trigger_id(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.DeleteTriggerRequest) -} - -::google::protobuf::uint8* DeleteTriggerRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.DeleteTriggerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier trigger_id = 1; - if (this->has_trigger_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::trigger_id(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.DeleteTriggerRequest) - return target; -} - -size_t DeleteTriggerRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.DeleteTriggerRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.core.Identifier trigger_id = 1; - if (this->has_trigger_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *trigger_id_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void DeleteTriggerRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.DeleteTriggerRequest) - GOOGLE_DCHECK_NE(&from, this); - const DeleteTriggerRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.DeleteTriggerRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.DeleteTriggerRequest) - MergeFrom(*source); - } -} - -void DeleteTriggerRequest::MergeFrom(const DeleteTriggerRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.DeleteTriggerRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_trigger_id()) { - mutable_trigger_id()->::flyteidl::core::Identifier::MergeFrom(from.trigger_id()); - } -} - -void DeleteTriggerRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.DeleteTriggerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void DeleteTriggerRequest::CopyFrom(const DeleteTriggerRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.DeleteTriggerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool DeleteTriggerRequest::IsInitialized() const { - return true; -} - -void DeleteTriggerRequest::Swap(DeleteTriggerRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void DeleteTriggerRequest::InternalSwap(DeleteTriggerRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(trigger_id_, other->trigger_id_); -} - -::google::protobuf::Metadata DeleteTriggerRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void DeleteTriggerResponse::InitAsDefaultInstance() { -} -class DeleteTriggerResponse::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -DeleteTriggerResponse::DeleteTriggerResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.DeleteTriggerResponse) -} -DeleteTriggerResponse::DeleteTriggerResponse(const DeleteTriggerResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.DeleteTriggerResponse) -} - -void DeleteTriggerResponse::SharedCtor() { -} - -DeleteTriggerResponse::~DeleteTriggerResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.DeleteTriggerResponse) - SharedDtor(); -} - -void DeleteTriggerResponse::SharedDtor() { -} - -void DeleteTriggerResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const DeleteTriggerResponse& DeleteTriggerResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_DeleteTriggerResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void DeleteTriggerResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.DeleteTriggerResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* DeleteTriggerResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - default: { - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool DeleteTriggerResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.DeleteTriggerResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.DeleteTriggerResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.DeleteTriggerResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void DeleteTriggerResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.DeleteTriggerResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.DeleteTriggerResponse) -} - -::google::protobuf::uint8* DeleteTriggerResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.DeleteTriggerResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.DeleteTriggerResponse) - return target; -} - -size_t DeleteTriggerResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.DeleteTriggerResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void DeleteTriggerResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.DeleteTriggerResponse) - GOOGLE_DCHECK_NE(&from, this); - const DeleteTriggerResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.DeleteTriggerResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.DeleteTriggerResponse) - MergeFrom(*source); - } -} - -void DeleteTriggerResponse::MergeFrom(const DeleteTriggerResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.DeleteTriggerResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - -} - -void DeleteTriggerResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.DeleteTriggerResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void DeleteTriggerResponse::CopyFrom(const DeleteTriggerResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.DeleteTriggerResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool DeleteTriggerResponse::IsInitialized() const { - return true; -} - -void DeleteTriggerResponse::Swap(DeleteTriggerResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void DeleteTriggerResponse::InternalSwap(DeleteTriggerResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); -} - -::google::protobuf::Metadata DeleteTriggerResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void ArtifactProducer::InitAsDefaultInstance() { - ::flyteidl::artifact::_ArtifactProducer_default_instance_._instance.get_mutable()->entity_id_ = const_cast< ::flyteidl::core::Identifier*>( - ::flyteidl::core::Identifier::internal_default_instance()); - ::flyteidl::artifact::_ArtifactProducer_default_instance_._instance.get_mutable()->outputs_ = const_cast< ::flyteidl::core::VariableMap*>( - ::flyteidl::core::VariableMap::internal_default_instance()); -} -class ArtifactProducer::HasBitSetters { - public: - static const ::flyteidl::core::Identifier& entity_id(const ArtifactProducer* msg); - static const ::flyteidl::core::VariableMap& outputs(const ArtifactProducer* msg); -}; - -const ::flyteidl::core::Identifier& -ArtifactProducer::HasBitSetters::entity_id(const ArtifactProducer* msg) { - return *msg->entity_id_; -} -const ::flyteidl::core::VariableMap& -ArtifactProducer::HasBitSetters::outputs(const ArtifactProducer* msg) { - return *msg->outputs_; -} -void ArtifactProducer::clear_entity_id() { - if (GetArenaNoVirtual() == nullptr && entity_id_ != nullptr) { - delete entity_id_; - } - entity_id_ = nullptr; -} -void ArtifactProducer::clear_outputs() { - if (GetArenaNoVirtual() == nullptr && outputs_ != nullptr) { - delete outputs_; - } - outputs_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int ArtifactProducer::kEntityIdFieldNumber; -const int ArtifactProducer::kOutputsFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -ArtifactProducer::ArtifactProducer() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.ArtifactProducer) -} -ArtifactProducer::ArtifactProducer(const ArtifactProducer& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_entity_id()) { - entity_id_ = new ::flyteidl::core::Identifier(*from.entity_id_); - } else { - entity_id_ = nullptr; - } - if (from.has_outputs()) { - outputs_ = new ::flyteidl::core::VariableMap(*from.outputs_); - } else { - outputs_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.ArtifactProducer) -} - -void ArtifactProducer::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_ArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::memset(&entity_id_, 0, static_cast( - reinterpret_cast(&outputs_) - - reinterpret_cast(&entity_id_)) + sizeof(outputs_)); -} - -ArtifactProducer::~ArtifactProducer() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.ArtifactProducer) - SharedDtor(); -} - -void ArtifactProducer::SharedDtor() { - if (this != internal_default_instance()) delete entity_id_; - if (this != internal_default_instance()) delete outputs_; -} - -void ArtifactProducer::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const ArtifactProducer& ArtifactProducer::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_ArtifactProducer_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void ArtifactProducer::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.ArtifactProducer) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && entity_id_ != nullptr) { - delete entity_id_; - } - entity_id_ = nullptr; - if (GetArenaNoVirtual() == nullptr && outputs_ != nullptr) { - delete outputs_; - } - outputs_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* ArtifactProducer::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.Identifier entity_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::Identifier::_InternalParse; - object = msg->mutable_entity_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.core.VariableMap outputs = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::VariableMap::_InternalParse; - object = msg->mutable_outputs(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool ArtifactProducer::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.ArtifactProducer) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.Identifier entity_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_entity_id())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.core.VariableMap outputs = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_outputs())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.ArtifactProducer) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.ArtifactProducer) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void ArtifactProducer::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.ArtifactProducer) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier entity_id = 1; - if (this->has_entity_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::entity_id(this), output); - } - - // .flyteidl.core.VariableMap outputs = 2; - if (this->has_outputs()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::outputs(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.ArtifactProducer) -} - -::google::protobuf::uint8* ArtifactProducer::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.ArtifactProducer) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier entity_id = 1; - if (this->has_entity_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::entity_id(this), target); - } - - // .flyteidl.core.VariableMap outputs = 2; - if (this->has_outputs()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, HasBitSetters::outputs(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.ArtifactProducer) - return target; -} - -size_t ArtifactProducer::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.ArtifactProducer) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.core.Identifier entity_id = 1; - if (this->has_entity_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *entity_id_); - } - - // .flyteidl.core.VariableMap outputs = 2; - if (this->has_outputs()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *outputs_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void ArtifactProducer::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.ArtifactProducer) - GOOGLE_DCHECK_NE(&from, this); - const ArtifactProducer* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.ArtifactProducer) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.ArtifactProducer) - MergeFrom(*source); - } -} - -void ArtifactProducer::MergeFrom(const ArtifactProducer& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.ArtifactProducer) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_entity_id()) { - mutable_entity_id()->::flyteidl::core::Identifier::MergeFrom(from.entity_id()); - } - if (from.has_outputs()) { - mutable_outputs()->::flyteidl::core::VariableMap::MergeFrom(from.outputs()); - } -} - -void ArtifactProducer::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.ArtifactProducer) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void ArtifactProducer::CopyFrom(const ArtifactProducer& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.ArtifactProducer) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool ArtifactProducer::IsInitialized() const { - return true; -} - -void ArtifactProducer::Swap(ArtifactProducer* other) { - if (other == this) return; - InternalSwap(other); -} -void ArtifactProducer::InternalSwap(ArtifactProducer* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(entity_id_, other->entity_id_); - swap(outputs_, other->outputs_); -} - -::google::protobuf::Metadata ArtifactProducer::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void RegisterProducerRequest::InitAsDefaultInstance() { -} -class RegisterProducerRequest::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int RegisterProducerRequest::kProducersFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -RegisterProducerRequest::RegisterProducerRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.RegisterProducerRequest) -} -RegisterProducerRequest::RegisterProducerRequest(const RegisterProducerRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr), - producers_(from.producers_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.RegisterProducerRequest) -} - -void RegisterProducerRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_RegisterProducerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); -} - -RegisterProducerRequest::~RegisterProducerRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.RegisterProducerRequest) - SharedDtor(); -} - -void RegisterProducerRequest::SharedDtor() { -} - -void RegisterProducerRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RegisterProducerRequest& RegisterProducerRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_RegisterProducerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void RegisterProducerRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.RegisterProducerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - producers_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RegisterProducerRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // repeated .flyteidl.artifact.ArtifactProducer producers = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - do { - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::ArtifactProducer::_InternalParse; - object = msg->add_producers(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 10 && (ptr += 1)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RegisterProducerRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.RegisterProducerRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // repeated .flyteidl.artifact.ArtifactProducer producers = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, add_producers())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.RegisterProducerRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.RegisterProducerRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RegisterProducerRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.RegisterProducerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .flyteidl.artifact.ArtifactProducer producers = 1; - for (unsigned int i = 0, - n = static_cast(this->producers_size()); i < n; i++) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, - this->producers(static_cast(i)), - output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.RegisterProducerRequest) -} - -::google::protobuf::uint8* RegisterProducerRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.RegisterProducerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .flyteidl.artifact.ArtifactProducer producers = 1; - for (unsigned int i = 0, - n = static_cast(this->producers_size()); i < n; i++) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, this->producers(static_cast(i)), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.RegisterProducerRequest) - return target; -} - -size_t RegisterProducerRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.RegisterProducerRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .flyteidl.artifact.ArtifactProducer producers = 1; - { - unsigned int count = static_cast(this->producers_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::google::protobuf::internal::WireFormatLite::MessageSize( - this->producers(static_cast(i))); - } - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RegisterProducerRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.RegisterProducerRequest) - GOOGLE_DCHECK_NE(&from, this); - const RegisterProducerRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.RegisterProducerRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.RegisterProducerRequest) - MergeFrom(*source); - } -} - -void RegisterProducerRequest::MergeFrom(const RegisterProducerRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.RegisterProducerRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - producers_.MergeFrom(from.producers_); -} - -void RegisterProducerRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.RegisterProducerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void RegisterProducerRequest::CopyFrom(const RegisterProducerRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.RegisterProducerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RegisterProducerRequest::IsInitialized() const { - return true; -} - -void RegisterProducerRequest::Swap(RegisterProducerRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void RegisterProducerRequest::InternalSwap(RegisterProducerRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - CastToBase(&producers_)->InternalSwap(CastToBase(&other->producers_)); -} - -::google::protobuf::Metadata RegisterProducerRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void ArtifactConsumer::InitAsDefaultInstance() { - ::flyteidl::artifact::_ArtifactConsumer_default_instance_._instance.get_mutable()->entity_id_ = const_cast< ::flyteidl::core::Identifier*>( - ::flyteidl::core::Identifier::internal_default_instance()); - ::flyteidl::artifact::_ArtifactConsumer_default_instance_._instance.get_mutable()->inputs_ = const_cast< ::flyteidl::core::ParameterMap*>( - ::flyteidl::core::ParameterMap::internal_default_instance()); -} -class ArtifactConsumer::HasBitSetters { - public: - static const ::flyteidl::core::Identifier& entity_id(const ArtifactConsumer* msg); - static const ::flyteidl::core::ParameterMap& inputs(const ArtifactConsumer* msg); -}; - -const ::flyteidl::core::Identifier& -ArtifactConsumer::HasBitSetters::entity_id(const ArtifactConsumer* msg) { - return *msg->entity_id_; -} -const ::flyteidl::core::ParameterMap& -ArtifactConsumer::HasBitSetters::inputs(const ArtifactConsumer* msg) { - return *msg->inputs_; -} -void ArtifactConsumer::clear_entity_id() { - if (GetArenaNoVirtual() == nullptr && entity_id_ != nullptr) { - delete entity_id_; - } - entity_id_ = nullptr; -} -void ArtifactConsumer::clear_inputs() { - if (GetArenaNoVirtual() == nullptr && inputs_ != nullptr) { - delete inputs_; - } - inputs_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int ArtifactConsumer::kEntityIdFieldNumber; -const int ArtifactConsumer::kInputsFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -ArtifactConsumer::ArtifactConsumer() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.ArtifactConsumer) -} -ArtifactConsumer::ArtifactConsumer(const ArtifactConsumer& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_entity_id()) { - entity_id_ = new ::flyteidl::core::Identifier(*from.entity_id_); - } else { - entity_id_ = nullptr; - } - if (from.has_inputs()) { - inputs_ = new ::flyteidl::core::ParameterMap(*from.inputs_); - } else { - inputs_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.ArtifactConsumer) -} - -void ArtifactConsumer::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_ArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto.base); - ::memset(&entity_id_, 0, static_cast( - reinterpret_cast(&inputs_) - - reinterpret_cast(&entity_id_)) + sizeof(inputs_)); -} - -ArtifactConsumer::~ArtifactConsumer() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.ArtifactConsumer) - SharedDtor(); -} - -void ArtifactConsumer::SharedDtor() { - if (this != internal_default_instance()) delete entity_id_; - if (this != internal_default_instance()) delete inputs_; -} - -void ArtifactConsumer::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const ArtifactConsumer& ArtifactConsumer::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_ArtifactConsumer_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void ArtifactConsumer::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.ArtifactConsumer) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - if (GetArenaNoVirtual() == nullptr && entity_id_ != nullptr) { - delete entity_id_; - } - entity_id_ = nullptr; - if (GetArenaNoVirtual() == nullptr && inputs_ != nullptr) { - delete inputs_; - } - inputs_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* ArtifactConsumer::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.Identifier entity_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::Identifier::_InternalParse; - object = msg->mutable_entity_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.core.ParameterMap inputs = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ParameterMap::_InternalParse; - object = msg->mutable_inputs(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool ArtifactConsumer::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.ArtifactConsumer) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.Identifier entity_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_entity_id())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.core.ParameterMap inputs = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_inputs())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.ArtifactConsumer) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.ArtifactConsumer) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void ArtifactConsumer::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.ArtifactConsumer) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier entity_id = 1; - if (this->has_entity_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::entity_id(this), output); - } - - // .flyteidl.core.ParameterMap inputs = 2; - if (this->has_inputs()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::inputs(this), output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.ArtifactConsumer) -} - -::google::protobuf::uint8* ArtifactConsumer::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.ArtifactConsumer) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier entity_id = 1; - if (this->has_entity_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::entity_id(this), target); - } - - // .flyteidl.core.ParameterMap inputs = 2; - if (this->has_inputs()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, HasBitSetters::inputs(this), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.ArtifactConsumer) - return target; -} - -size_t ArtifactConsumer::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.ArtifactConsumer) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // .flyteidl.core.Identifier entity_id = 1; - if (this->has_entity_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *entity_id_); - } - - // .flyteidl.core.ParameterMap inputs = 2; - if (this->has_inputs()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *inputs_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void ArtifactConsumer::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.ArtifactConsumer) - GOOGLE_DCHECK_NE(&from, this); - const ArtifactConsumer* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.ArtifactConsumer) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.ArtifactConsumer) - MergeFrom(*source); - } -} - -void ArtifactConsumer::MergeFrom(const ArtifactConsumer& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.ArtifactConsumer) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (from.has_entity_id()) { - mutable_entity_id()->::flyteidl::core::Identifier::MergeFrom(from.entity_id()); - } - if (from.has_inputs()) { - mutable_inputs()->::flyteidl::core::ParameterMap::MergeFrom(from.inputs()); - } -} - -void ArtifactConsumer::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.ArtifactConsumer) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void ArtifactConsumer::CopyFrom(const ArtifactConsumer& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.ArtifactConsumer) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool ArtifactConsumer::IsInitialized() const { - return true; -} - -void ArtifactConsumer::Swap(ArtifactConsumer* other) { - if (other == this) return; - InternalSwap(other); -} -void ArtifactConsumer::InternalSwap(ArtifactConsumer* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - swap(entity_id_, other->entity_id_); - swap(inputs_, other->inputs_); -} - -::google::protobuf::Metadata ArtifactConsumer::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void RegisterConsumerRequest::InitAsDefaultInstance() { -} -class RegisterConsumerRequest::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int RegisterConsumerRequest::kConsumersFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -RegisterConsumerRequest::RegisterConsumerRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.RegisterConsumerRequest) -} -RegisterConsumerRequest::RegisterConsumerRequest(const RegisterConsumerRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr), - consumers_(from.consumers_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.RegisterConsumerRequest) -} - -void RegisterConsumerRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_RegisterConsumerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); -} - -RegisterConsumerRequest::~RegisterConsumerRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.RegisterConsumerRequest) - SharedDtor(); -} - -void RegisterConsumerRequest::SharedDtor() { -} - -void RegisterConsumerRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RegisterConsumerRequest& RegisterConsumerRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_RegisterConsumerRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void RegisterConsumerRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.RegisterConsumerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - consumers_.Clear(); - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RegisterConsumerRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - do { - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::artifact::ArtifactConsumer::_InternalParse; - object = msg->add_consumers(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 10 && (ptr += 1)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RegisterConsumerRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.RegisterConsumerRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, add_consumers())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.RegisterConsumerRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.RegisterConsumerRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RegisterConsumerRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.RegisterConsumerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - for (unsigned int i = 0, - n = static_cast(this->consumers_size()); i < n; i++) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, - this->consumers(static_cast(i)), - output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.RegisterConsumerRequest) -} - -::google::protobuf::uint8* RegisterConsumerRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.RegisterConsumerRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - for (unsigned int i = 0, - n = static_cast(this->consumers_size()); i < n; i++) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, this->consumers(static_cast(i)), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.RegisterConsumerRequest) - return target; -} - -size_t RegisterConsumerRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.RegisterConsumerRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - { - unsigned int count = static_cast(this->consumers_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::google::protobuf::internal::WireFormatLite::MessageSize( - this->consumers(static_cast(i))); - } - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RegisterConsumerRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.RegisterConsumerRequest) - GOOGLE_DCHECK_NE(&from, this); - const RegisterConsumerRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.RegisterConsumerRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.RegisterConsumerRequest) - MergeFrom(*source); - } -} - -void RegisterConsumerRequest::MergeFrom(const RegisterConsumerRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.RegisterConsumerRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - consumers_.MergeFrom(from.consumers_); -} - -void RegisterConsumerRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.RegisterConsumerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void RegisterConsumerRequest::CopyFrom(const RegisterConsumerRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.RegisterConsumerRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RegisterConsumerRequest::IsInitialized() const { - return true; -} - -void RegisterConsumerRequest::Swap(RegisterConsumerRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void RegisterConsumerRequest::InternalSwap(RegisterConsumerRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - CastToBase(&consumers_)->InternalSwap(CastToBase(&other->consumers_)); -} - -::google::protobuf::Metadata RegisterConsumerRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void RegisterResponse::InitAsDefaultInstance() { -} -class RegisterResponse::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -RegisterResponse::RegisterResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.RegisterResponse) -} -RegisterResponse::RegisterResponse(const RegisterResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.RegisterResponse) -} - -void RegisterResponse::SharedCtor() { -} - -RegisterResponse::~RegisterResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.RegisterResponse) - SharedDtor(); -} - -void RegisterResponse::SharedDtor() { -} - -void RegisterResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const RegisterResponse& RegisterResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_RegisterResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void RegisterResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.RegisterResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* RegisterResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - default: { - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool RegisterResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.RegisterResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.RegisterResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.RegisterResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void RegisterResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.RegisterResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.RegisterResponse) -} - -::google::protobuf::uint8* RegisterResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.RegisterResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.RegisterResponse) - return target; -} - -size_t RegisterResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.RegisterResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void RegisterResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.RegisterResponse) - GOOGLE_DCHECK_NE(&from, this); - const RegisterResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.RegisterResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.RegisterResponse) - MergeFrom(*source); - } -} - -void RegisterResponse::MergeFrom(const RegisterResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.RegisterResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - -} - -void RegisterResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.RegisterResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void RegisterResponse::CopyFrom(const RegisterResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.RegisterResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool RegisterResponse::IsInitialized() const { - return true; -} - -void RegisterResponse::Swap(RegisterResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void RegisterResponse::InternalSwap(RegisterResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); -} - -::google::protobuf::Metadata RegisterResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void ExecutionInputsRequest::InitAsDefaultInstance() { - ::flyteidl::artifact::_ExecutionInputsRequest_default_instance_._instance.get_mutable()->execution_id_ = const_cast< ::flyteidl::core::WorkflowExecutionIdentifier*>( - ::flyteidl::core::WorkflowExecutionIdentifier::internal_default_instance()); -} -class ExecutionInputsRequest::HasBitSetters { - public: - static const ::flyteidl::core::WorkflowExecutionIdentifier& execution_id(const ExecutionInputsRequest* msg); -}; - -const ::flyteidl::core::WorkflowExecutionIdentifier& -ExecutionInputsRequest::HasBitSetters::execution_id(const ExecutionInputsRequest* msg) { - return *msg->execution_id_; -} -void ExecutionInputsRequest::clear_execution_id() { - if (GetArenaNoVirtual() == nullptr && execution_id_ != nullptr) { - delete execution_id_; - } - execution_id_ = nullptr; -} -void ExecutionInputsRequest::clear_inputs() { - inputs_.Clear(); -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int ExecutionInputsRequest::kExecutionIdFieldNumber; -const int ExecutionInputsRequest::kInputsFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -ExecutionInputsRequest::ExecutionInputsRequest() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.ExecutionInputsRequest) -} -ExecutionInputsRequest::ExecutionInputsRequest(const ExecutionInputsRequest& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr), - inputs_(from.inputs_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_execution_id()) { - execution_id_ = new ::flyteidl::core::WorkflowExecutionIdentifier(*from.execution_id_); - } else { - execution_id_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.ExecutionInputsRequest) -} - -void ExecutionInputsRequest::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_ExecutionInputsRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - execution_id_ = nullptr; -} - -ExecutionInputsRequest::~ExecutionInputsRequest() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.ExecutionInputsRequest) - SharedDtor(); -} - -void ExecutionInputsRequest::SharedDtor() { - if (this != internal_default_instance()) delete execution_id_; -} - -void ExecutionInputsRequest::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const ExecutionInputsRequest& ExecutionInputsRequest::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_ExecutionInputsRequest_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void ExecutionInputsRequest::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.ExecutionInputsRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - inputs_.Clear(); - if (GetArenaNoVirtual() == nullptr && execution_id_ != nullptr) { - delete execution_id_; - } - execution_id_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* ExecutionInputsRequest::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::WorkflowExecutionIdentifier::_InternalParse; - object = msg->mutable_execution_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // repeated .flyteidl.core.ArtifactID inputs = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - do { - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactID::_InternalParse; - object = msg->add_inputs(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 18 && (ptr += 1)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool ExecutionInputsRequest::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.ExecutionInputsRequest) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_execution_id())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .flyteidl.core.ArtifactID inputs = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, add_inputs())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.ExecutionInputsRequest) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.ExecutionInputsRequest) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void ExecutionInputsRequest::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.ExecutionInputsRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - if (this->has_execution_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::execution_id(this), output); - } - - // repeated .flyteidl.core.ArtifactID inputs = 2; - for (unsigned int i = 0, - n = static_cast(this->inputs_size()); i < n; i++) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, - this->inputs(static_cast(i)), - output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.ExecutionInputsRequest) -} - -::google::protobuf::uint8* ExecutionInputsRequest::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.ExecutionInputsRequest) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - if (this->has_execution_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::execution_id(this), target); - } - - // repeated .flyteidl.core.ArtifactID inputs = 2; - for (unsigned int i = 0, - n = static_cast(this->inputs_size()); i < n; i++) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, this->inputs(static_cast(i)), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.ExecutionInputsRequest) - return target; -} - -size_t ExecutionInputsRequest::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.ExecutionInputsRequest) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .flyteidl.core.ArtifactID inputs = 2; - { - unsigned int count = static_cast(this->inputs_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::google::protobuf::internal::WireFormatLite::MessageSize( - this->inputs(static_cast(i))); - } - } - - // .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - if (this->has_execution_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *execution_id_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void ExecutionInputsRequest::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.ExecutionInputsRequest) - GOOGLE_DCHECK_NE(&from, this); - const ExecutionInputsRequest* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.ExecutionInputsRequest) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.ExecutionInputsRequest) - MergeFrom(*source); - } -} - -void ExecutionInputsRequest::MergeFrom(const ExecutionInputsRequest& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.ExecutionInputsRequest) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - inputs_.MergeFrom(from.inputs_); - if (from.has_execution_id()) { - mutable_execution_id()->::flyteidl::core::WorkflowExecutionIdentifier::MergeFrom(from.execution_id()); - } -} - -void ExecutionInputsRequest::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.ExecutionInputsRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void ExecutionInputsRequest::CopyFrom(const ExecutionInputsRequest& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.ExecutionInputsRequest) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool ExecutionInputsRequest::IsInitialized() const { - return true; -} - -void ExecutionInputsRequest::Swap(ExecutionInputsRequest* other) { - if (other == this) return; - InternalSwap(other); -} -void ExecutionInputsRequest::InternalSwap(ExecutionInputsRequest* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - CastToBase(&inputs_)->InternalSwap(CastToBase(&other->inputs_)); - swap(execution_id_, other->execution_id_); -} - -::google::protobuf::Metadata ExecutionInputsRequest::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// =================================================================== - -void ExecutionInputsResponse::InitAsDefaultInstance() { -} -class ExecutionInputsResponse::HasBitSetters { - public: -}; - -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -ExecutionInputsResponse::ExecutionInputsResponse() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.artifact.ExecutionInputsResponse) -} -ExecutionInputsResponse::ExecutionInputsResponse(const ExecutionInputsResponse& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - // @@protoc_insertion_point(copy_constructor:flyteidl.artifact.ExecutionInputsResponse) -} - -void ExecutionInputsResponse::SharedCtor() { -} - -ExecutionInputsResponse::~ExecutionInputsResponse() { - // @@protoc_insertion_point(destructor:flyteidl.artifact.ExecutionInputsResponse) - SharedDtor(); -} - -void ExecutionInputsResponse::SharedDtor() { -} - -void ExecutionInputsResponse::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const ExecutionInputsResponse& ExecutionInputsResponse::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_ExecutionInputsResponse_flyteidl_2fartifact_2fartifacts_2eproto.base); - return *internal_default_instance(); -} - - -void ExecutionInputsResponse::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.artifact.ExecutionInputsResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* ExecutionInputsResponse::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - default: { - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool ExecutionInputsResponse::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.artifact.ExecutionInputsResponse) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.artifact.ExecutionInputsResponse) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.artifact.ExecutionInputsResponse) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void ExecutionInputsResponse::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.artifact.ExecutionInputsResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.artifact.ExecutionInputsResponse) -} - -::google::protobuf::uint8* ExecutionInputsResponse::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.artifact.ExecutionInputsResponse) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.artifact.ExecutionInputsResponse) - return target; -} - -size_t ExecutionInputsResponse::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.artifact.ExecutionInputsResponse) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void ExecutionInputsResponse::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.artifact.ExecutionInputsResponse) - GOOGLE_DCHECK_NE(&from, this); - const ExecutionInputsResponse* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.artifact.ExecutionInputsResponse) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.artifact.ExecutionInputsResponse) - MergeFrom(*source); - } -} - -void ExecutionInputsResponse::MergeFrom(const ExecutionInputsResponse& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.artifact.ExecutionInputsResponse) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - -} - -void ExecutionInputsResponse::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.artifact.ExecutionInputsResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void ExecutionInputsResponse::CopyFrom(const ExecutionInputsResponse& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.artifact.ExecutionInputsResponse) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool ExecutionInputsResponse::IsInitialized() const { - return true; -} - -void ExecutionInputsResponse::Swap(ExecutionInputsResponse* other) { - if (other == this) return; - InternalSwap(other); -} -void ExecutionInputsResponse::InternalSwap(ExecutionInputsResponse* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); -} - -::google::protobuf::Metadata ExecutionInputsResponse::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fartifact_2fartifacts_2eproto); - return ::file_level_metadata_flyteidl_2fartifact_2fartifacts_2eproto[kIndexInFileMessages]; -} - - -// @@protoc_insertion_point(namespace_scope) -} // namespace artifact -} // namespace flyteidl -namespace google { -namespace protobuf { -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::Artifact* Arena::CreateMaybeMessage< ::flyteidl::artifact::Artifact >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::Artifact >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse* Arena::CreateMaybeMessage< ::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::CreateArtifactRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::CreateArtifactRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::CreateArtifactRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::ArtifactSource* Arena::CreateMaybeMessage< ::flyteidl::artifact::ArtifactSource >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::ArtifactSource >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::ArtifactSpec* Arena::CreateMaybeMessage< ::flyteidl::artifact::ArtifactSpec >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::ArtifactSpec >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::CreateArtifactResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::CreateArtifactResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::CreateArtifactResponse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::GetArtifactRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::GetArtifactRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::GetArtifactRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::GetArtifactResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::GetArtifactResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::GetArtifactResponse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::SearchOptions* Arena::CreateMaybeMessage< ::flyteidl::artifact::SearchOptions >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::SearchOptions >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::SearchArtifactsRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::SearchArtifactsRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::SearchArtifactsRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::SearchArtifactsResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::SearchArtifactsResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::SearchArtifactsResponse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::FindByWorkflowExecRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::FindByWorkflowExecRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::FindByWorkflowExecRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::AddTagRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::AddTagRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::AddTagRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::AddTagResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::AddTagResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::AddTagResponse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::CreateTriggerRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::CreateTriggerRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::CreateTriggerRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::CreateTriggerResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::CreateTriggerResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::CreateTriggerResponse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::DeleteTriggerRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::DeleteTriggerRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::DeleteTriggerRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::DeleteTriggerResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::DeleteTriggerResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::DeleteTriggerResponse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::ArtifactProducer* Arena::CreateMaybeMessage< ::flyteidl::artifact::ArtifactProducer >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::ArtifactProducer >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::RegisterProducerRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::RegisterProducerRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::RegisterProducerRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::ArtifactConsumer* Arena::CreateMaybeMessage< ::flyteidl::artifact::ArtifactConsumer >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::ArtifactConsumer >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::RegisterConsumerRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::RegisterConsumerRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::RegisterConsumerRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::RegisterResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::RegisterResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::RegisterResponse >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::ExecutionInputsRequest* Arena::CreateMaybeMessage< ::flyteidl::artifact::ExecutionInputsRequest >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::ExecutionInputsRequest >(arena); -} -template<> PROTOBUF_NOINLINE ::flyteidl::artifact::ExecutionInputsResponse* Arena::CreateMaybeMessage< ::flyteidl::artifact::ExecutionInputsResponse >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::artifact::ExecutionInputsResponse >(arena); -} -} // namespace protobuf -} // namespace google - -// @@protoc_insertion_point(global_scope) -#include diff --git a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.pb.h b/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.pb.h deleted file mode 100644 index bbdb47cfa8..0000000000 --- a/flyteidl/gen/pb-cpp/flyteidl/artifact/artifacts.pb.h +++ /dev/null @@ -1,5605 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: flyteidl/artifact/artifacts.proto - -#ifndef PROTOBUF_INCLUDED_flyteidl_2fartifact_2fartifacts_2eproto -#define PROTOBUF_INCLUDED_flyteidl_2fartifact_2fartifacts_2eproto - -#include -#include - -#include -#if PROTOBUF_VERSION < 3007000 -#error This file was generated by a newer version of protoc which is -#error incompatible with your Protocol Buffer headers. Please update -#error your headers. -#endif -#if 3007000 < PROTOBUF_MIN_PROTOC_VERSION -#error This file was generated by an older version of protoc which is -#error incompatible with your Protocol Buffer headers. Please -#error regenerate this file with a newer version of protoc. -#endif - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include // IWYU pragma: export -#include // IWYU pragma: export -#include // IWYU pragma: export -#include -#include -#include -#include -#include -#include "google/api/annotations.pb.h" -#include "flyteidl/admin/launch_plan.pb.h" -#include "flyteidl/core/literals.pb.h" -#include "flyteidl/core/types.pb.h" -#include "flyteidl/core/identifier.pb.h" -#include "flyteidl/core/artifact_id.pb.h" -#include "flyteidl/core/interface.pb.h" -#include "flyteidl/event/cloudevents.pb.h" -// @@protoc_insertion_point(includes) -#include -#define PROTOBUF_INTERNAL_EXPORT_flyteidl_2fartifact_2fartifacts_2eproto - -// Internal implementation detail -- do not use these members. -struct TableStruct_flyteidl_2fartifact_2fartifacts_2eproto { - static const ::google::protobuf::internal::ParseTableField entries[] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::google::protobuf::internal::AuxillaryParseTableField aux[] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::google::protobuf::internal::ParseTable schema[25] - PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::google::protobuf::internal::FieldMetadata field_metadata[]; - static const ::google::protobuf::internal::SerializationTable serialization_table[]; - static const ::google::protobuf::uint32 offsets[]; -}; -void AddDescriptors_flyteidl_2fartifact_2fartifacts_2eproto(); -namespace flyteidl { -namespace artifact { -class AddTagRequest; -class AddTagRequestDefaultTypeInternal; -extern AddTagRequestDefaultTypeInternal _AddTagRequest_default_instance_; -class AddTagResponse; -class AddTagResponseDefaultTypeInternal; -extern AddTagResponseDefaultTypeInternal _AddTagResponse_default_instance_; -class Artifact; -class ArtifactDefaultTypeInternal; -extern ArtifactDefaultTypeInternal _Artifact_default_instance_; -class ArtifactConsumer; -class ArtifactConsumerDefaultTypeInternal; -extern ArtifactConsumerDefaultTypeInternal _ArtifactConsumer_default_instance_; -class ArtifactProducer; -class ArtifactProducerDefaultTypeInternal; -extern ArtifactProducerDefaultTypeInternal _ArtifactProducer_default_instance_; -class ArtifactSource; -class ArtifactSourceDefaultTypeInternal; -extern ArtifactSourceDefaultTypeInternal _ArtifactSource_default_instance_; -class ArtifactSpec; -class ArtifactSpecDefaultTypeInternal; -extern ArtifactSpecDefaultTypeInternal _ArtifactSpec_default_instance_; -class CreateArtifactRequest; -class CreateArtifactRequestDefaultTypeInternal; -extern CreateArtifactRequestDefaultTypeInternal _CreateArtifactRequest_default_instance_; -class CreateArtifactRequest_PartitionsEntry_DoNotUse; -class CreateArtifactRequest_PartitionsEntry_DoNotUseDefaultTypeInternal; -extern CreateArtifactRequest_PartitionsEntry_DoNotUseDefaultTypeInternal _CreateArtifactRequest_PartitionsEntry_DoNotUse_default_instance_; -class CreateArtifactResponse; -class CreateArtifactResponseDefaultTypeInternal; -extern CreateArtifactResponseDefaultTypeInternal _CreateArtifactResponse_default_instance_; -class CreateTriggerRequest; -class CreateTriggerRequestDefaultTypeInternal; -extern CreateTriggerRequestDefaultTypeInternal _CreateTriggerRequest_default_instance_; -class CreateTriggerResponse; -class CreateTriggerResponseDefaultTypeInternal; -extern CreateTriggerResponseDefaultTypeInternal _CreateTriggerResponse_default_instance_; -class DeleteTriggerRequest; -class DeleteTriggerRequestDefaultTypeInternal; -extern DeleteTriggerRequestDefaultTypeInternal _DeleteTriggerRequest_default_instance_; -class DeleteTriggerResponse; -class DeleteTriggerResponseDefaultTypeInternal; -extern DeleteTriggerResponseDefaultTypeInternal _DeleteTriggerResponse_default_instance_; -class ExecutionInputsRequest; -class ExecutionInputsRequestDefaultTypeInternal; -extern ExecutionInputsRequestDefaultTypeInternal _ExecutionInputsRequest_default_instance_; -class ExecutionInputsResponse; -class ExecutionInputsResponseDefaultTypeInternal; -extern ExecutionInputsResponseDefaultTypeInternal _ExecutionInputsResponse_default_instance_; -class FindByWorkflowExecRequest; -class FindByWorkflowExecRequestDefaultTypeInternal; -extern FindByWorkflowExecRequestDefaultTypeInternal _FindByWorkflowExecRequest_default_instance_; -class GetArtifactRequest; -class GetArtifactRequestDefaultTypeInternal; -extern GetArtifactRequestDefaultTypeInternal _GetArtifactRequest_default_instance_; -class GetArtifactResponse; -class GetArtifactResponseDefaultTypeInternal; -extern GetArtifactResponseDefaultTypeInternal _GetArtifactResponse_default_instance_; -class RegisterConsumerRequest; -class RegisterConsumerRequestDefaultTypeInternal; -extern RegisterConsumerRequestDefaultTypeInternal _RegisterConsumerRequest_default_instance_; -class RegisterProducerRequest; -class RegisterProducerRequestDefaultTypeInternal; -extern RegisterProducerRequestDefaultTypeInternal _RegisterProducerRequest_default_instance_; -class RegisterResponse; -class RegisterResponseDefaultTypeInternal; -extern RegisterResponseDefaultTypeInternal _RegisterResponse_default_instance_; -class SearchArtifactsRequest; -class SearchArtifactsRequestDefaultTypeInternal; -extern SearchArtifactsRequestDefaultTypeInternal _SearchArtifactsRequest_default_instance_; -class SearchArtifactsResponse; -class SearchArtifactsResponseDefaultTypeInternal; -extern SearchArtifactsResponseDefaultTypeInternal _SearchArtifactsResponse_default_instance_; -class SearchOptions; -class SearchOptionsDefaultTypeInternal; -extern SearchOptionsDefaultTypeInternal _SearchOptions_default_instance_; -} // namespace artifact -} // namespace flyteidl -namespace google { -namespace protobuf { -template<> ::flyteidl::artifact::AddTagRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::AddTagRequest>(Arena*); -template<> ::flyteidl::artifact::AddTagResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::AddTagResponse>(Arena*); -template<> ::flyteidl::artifact::Artifact* Arena::CreateMaybeMessage<::flyteidl::artifact::Artifact>(Arena*); -template<> ::flyteidl::artifact::ArtifactConsumer* Arena::CreateMaybeMessage<::flyteidl::artifact::ArtifactConsumer>(Arena*); -template<> ::flyteidl::artifact::ArtifactProducer* Arena::CreateMaybeMessage<::flyteidl::artifact::ArtifactProducer>(Arena*); -template<> ::flyteidl::artifact::ArtifactSource* Arena::CreateMaybeMessage<::flyteidl::artifact::ArtifactSource>(Arena*); -template<> ::flyteidl::artifact::ArtifactSpec* Arena::CreateMaybeMessage<::flyteidl::artifact::ArtifactSpec>(Arena*); -template<> ::flyteidl::artifact::CreateArtifactRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::CreateArtifactRequest>(Arena*); -template<> ::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse* Arena::CreateMaybeMessage<::flyteidl::artifact::CreateArtifactRequest_PartitionsEntry_DoNotUse>(Arena*); -template<> ::flyteidl::artifact::CreateArtifactResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::CreateArtifactResponse>(Arena*); -template<> ::flyteidl::artifact::CreateTriggerRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::CreateTriggerRequest>(Arena*); -template<> ::flyteidl::artifact::CreateTriggerResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::CreateTriggerResponse>(Arena*); -template<> ::flyteidl::artifact::DeleteTriggerRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::DeleteTriggerRequest>(Arena*); -template<> ::flyteidl::artifact::DeleteTriggerResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::DeleteTriggerResponse>(Arena*); -template<> ::flyteidl::artifact::ExecutionInputsRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::ExecutionInputsRequest>(Arena*); -template<> ::flyteidl::artifact::ExecutionInputsResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::ExecutionInputsResponse>(Arena*); -template<> ::flyteidl::artifact::FindByWorkflowExecRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::FindByWorkflowExecRequest>(Arena*); -template<> ::flyteidl::artifact::GetArtifactRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::GetArtifactRequest>(Arena*); -template<> ::flyteidl::artifact::GetArtifactResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::GetArtifactResponse>(Arena*); -template<> ::flyteidl::artifact::RegisterConsumerRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::RegisterConsumerRequest>(Arena*); -template<> ::flyteidl::artifact::RegisterProducerRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::RegisterProducerRequest>(Arena*); -template<> ::flyteidl::artifact::RegisterResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::RegisterResponse>(Arena*); -template<> ::flyteidl::artifact::SearchArtifactsRequest* Arena::CreateMaybeMessage<::flyteidl::artifact::SearchArtifactsRequest>(Arena*); -template<> ::flyteidl::artifact::SearchArtifactsResponse* Arena::CreateMaybeMessage<::flyteidl::artifact::SearchArtifactsResponse>(Arena*); -template<> ::flyteidl::artifact::SearchOptions* Arena::CreateMaybeMessage<::flyteidl::artifact::SearchOptions>(Arena*); -} // namespace protobuf -} // namespace google -namespace flyteidl { -namespace artifact { - -enum FindByWorkflowExecRequest_Direction { - FindByWorkflowExecRequest_Direction_INPUTS = 0, - FindByWorkflowExecRequest_Direction_OUTPUTS = 1, - FindByWorkflowExecRequest_Direction_FindByWorkflowExecRequest_Direction_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::google::protobuf::int32>::min(), - FindByWorkflowExecRequest_Direction_FindByWorkflowExecRequest_Direction_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::google::protobuf::int32>::max() -}; -bool FindByWorkflowExecRequest_Direction_IsValid(int value); -const FindByWorkflowExecRequest_Direction FindByWorkflowExecRequest_Direction_Direction_MIN = FindByWorkflowExecRequest_Direction_INPUTS; -const FindByWorkflowExecRequest_Direction FindByWorkflowExecRequest_Direction_Direction_MAX = FindByWorkflowExecRequest_Direction_OUTPUTS; -const int FindByWorkflowExecRequest_Direction_Direction_ARRAYSIZE = FindByWorkflowExecRequest_Direction_Direction_MAX + 1; - -const ::google::protobuf::EnumDescriptor* FindByWorkflowExecRequest_Direction_descriptor(); -inline const ::std::string& FindByWorkflowExecRequest_Direction_Name(FindByWorkflowExecRequest_Direction value) { - return ::google::protobuf::internal::NameOfEnum( - FindByWorkflowExecRequest_Direction_descriptor(), value); -} -inline bool FindByWorkflowExecRequest_Direction_Parse( - const ::std::string& name, FindByWorkflowExecRequest_Direction* value) { - return ::google::protobuf::internal::ParseNamedEnum( - FindByWorkflowExecRequest_Direction_descriptor(), name, value); -} -// =================================================================== - -class Artifact final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.Artifact) */ { - public: - Artifact(); - virtual ~Artifact(); - - Artifact(const Artifact& from); - - inline Artifact& operator=(const Artifact& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - Artifact(Artifact&& from) noexcept - : Artifact() { - *this = ::std::move(from); - } - - inline Artifact& operator=(Artifact&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const Artifact& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const Artifact* internal_default_instance() { - return reinterpret_cast( - &_Artifact_default_instance_); - } - static constexpr int kIndexInFileMessages = - 0; - - void Swap(Artifact* other); - friend void swap(Artifact& a, Artifact& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline Artifact* New() const final { - return CreateMaybeMessage(nullptr); - } - - Artifact* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const Artifact& from); - void MergeFrom(const Artifact& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(Artifact* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // repeated string tags = 3; - int tags_size() const; - void clear_tags(); - static const int kTagsFieldNumber = 3; - const ::std::string& tags(int index) const; - ::std::string* mutable_tags(int index); - void set_tags(int index, const ::std::string& value); - #if LANG_CXX11 - void set_tags(int index, ::std::string&& value); - #endif - void set_tags(int index, const char* value); - void set_tags(int index, const char* value, size_t size); - ::std::string* add_tags(); - void add_tags(const ::std::string& value); - #if LANG_CXX11 - void add_tags(::std::string&& value); - #endif - void add_tags(const char* value); - void add_tags(const char* value, size_t size); - const ::google::protobuf::RepeatedPtrField<::std::string>& tags() const; - ::google::protobuf::RepeatedPtrField<::std::string>* mutable_tags(); - - // .flyteidl.core.ArtifactID artifact_id = 1; - bool has_artifact_id() const; - void clear_artifact_id(); - static const int kArtifactIdFieldNumber = 1; - const ::flyteidl::core::ArtifactID& artifact_id() const; - ::flyteidl::core::ArtifactID* release_artifact_id(); - ::flyteidl::core::ArtifactID* mutable_artifact_id(); - void set_allocated_artifact_id(::flyteidl::core::ArtifactID* artifact_id); - - // .flyteidl.artifact.ArtifactSpec spec = 2; - bool has_spec() const; - void clear_spec(); - static const int kSpecFieldNumber = 2; - const ::flyteidl::artifact::ArtifactSpec& spec() const; - ::flyteidl::artifact::ArtifactSpec* release_spec(); - ::flyteidl::artifact::ArtifactSpec* mutable_spec(); - void set_allocated_spec(::flyteidl::artifact::ArtifactSpec* spec); - - // .flyteidl.artifact.ArtifactSource source = 4; - bool has_source() const; - void clear_source(); - static const int kSourceFieldNumber = 4; - const ::flyteidl::artifact::ArtifactSource& source() const; - ::flyteidl::artifact::ArtifactSource* release_source(); - ::flyteidl::artifact::ArtifactSource* mutable_source(); - void set_allocated_source(::flyteidl::artifact::ArtifactSource* source); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.Artifact) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::RepeatedPtrField<::std::string> tags_; - ::flyteidl::core::ArtifactID* artifact_id_; - ::flyteidl::artifact::ArtifactSpec* spec_; - ::flyteidl::artifact::ArtifactSource* source_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class CreateArtifactRequest_PartitionsEntry_DoNotUse : public ::google::protobuf::internal::MapEntry { -public: -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -static bool _ParseMap(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - typedef ::google::protobuf::internal::MapEntry SuperType; - CreateArtifactRequest_PartitionsEntry_DoNotUse(); - CreateArtifactRequest_PartitionsEntry_DoNotUse(::google::protobuf::Arena* arena); - void MergeFrom(const CreateArtifactRequest_PartitionsEntry_DoNotUse& other); - static const CreateArtifactRequest_PartitionsEntry_DoNotUse* internal_default_instance() { return reinterpret_cast(&_CreateArtifactRequest_PartitionsEntry_DoNotUse_default_instance_); } - void MergeFrom(const ::google::protobuf::Message& other) final; - ::google::protobuf::Metadata GetMetadata() const; -}; - -// ------------------------------------------------------------------- - -class CreateArtifactRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.CreateArtifactRequest) */ { - public: - CreateArtifactRequest(); - virtual ~CreateArtifactRequest(); - - CreateArtifactRequest(const CreateArtifactRequest& from); - - inline CreateArtifactRequest& operator=(const CreateArtifactRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - CreateArtifactRequest(CreateArtifactRequest&& from) noexcept - : CreateArtifactRequest() { - *this = ::std::move(from); - } - - inline CreateArtifactRequest& operator=(CreateArtifactRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const CreateArtifactRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const CreateArtifactRequest* internal_default_instance() { - return reinterpret_cast( - &_CreateArtifactRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 2; - - void Swap(CreateArtifactRequest* other); - friend void swap(CreateArtifactRequest& a, CreateArtifactRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline CreateArtifactRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - CreateArtifactRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const CreateArtifactRequest& from); - void MergeFrom(const CreateArtifactRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(CreateArtifactRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - - // accessors ------------------------------------------------------- - - // map partitions = 4; - int partitions_size() const; - void clear_partitions(); - static const int kPartitionsFieldNumber = 4; - const ::google::protobuf::Map< ::std::string, ::std::string >& - partitions() const; - ::google::protobuf::Map< ::std::string, ::std::string >* - mutable_partitions(); - - // string version = 3; - void clear_version(); - static const int kVersionFieldNumber = 3; - const ::std::string& version() const; - void set_version(const ::std::string& value); - #if LANG_CXX11 - void set_version(::std::string&& value); - #endif - void set_version(const char* value); - void set_version(const char* value, size_t size); - ::std::string* mutable_version(); - ::std::string* release_version(); - void set_allocated_version(::std::string* version); - - // string tag = 5; - void clear_tag(); - static const int kTagFieldNumber = 5; - const ::std::string& tag() const; - void set_tag(const ::std::string& value); - #if LANG_CXX11 - void set_tag(::std::string&& value); - #endif - void set_tag(const char* value); - void set_tag(const char* value, size_t size); - ::std::string* mutable_tag(); - ::std::string* release_tag(); - void set_allocated_tag(::std::string* tag); - - // .flyteidl.core.ArtifactKey artifact_key = 1; - bool has_artifact_key() const; - void clear_artifact_key(); - static const int kArtifactKeyFieldNumber = 1; - const ::flyteidl::core::ArtifactKey& artifact_key() const; - ::flyteidl::core::ArtifactKey* release_artifact_key(); - ::flyteidl::core::ArtifactKey* mutable_artifact_key(); - void set_allocated_artifact_key(::flyteidl::core::ArtifactKey* artifact_key); - - // .flyteidl.artifact.ArtifactSpec spec = 2; - bool has_spec() const; - void clear_spec(); - static const int kSpecFieldNumber = 2; - const ::flyteidl::artifact::ArtifactSpec& spec() const; - ::flyteidl::artifact::ArtifactSpec* release_spec(); - ::flyteidl::artifact::ArtifactSpec* mutable_spec(); - void set_allocated_spec(::flyteidl::artifact::ArtifactSpec* spec); - - // .flyteidl.artifact.ArtifactSource source = 6; - bool has_source() const; - void clear_source(); - static const int kSourceFieldNumber = 6; - const ::flyteidl::artifact::ArtifactSource& source() const; - ::flyteidl::artifact::ArtifactSource* release_source(); - ::flyteidl::artifact::ArtifactSource* mutable_source(); - void set_allocated_source(::flyteidl::artifact::ArtifactSource* source); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateArtifactRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::internal::MapField< - CreateArtifactRequest_PartitionsEntry_DoNotUse, - ::std::string, ::std::string, - ::google::protobuf::internal::WireFormatLite::TYPE_STRING, - ::google::protobuf::internal::WireFormatLite::TYPE_STRING, - 0 > partitions_; - ::google::protobuf::internal::ArenaStringPtr version_; - ::google::protobuf::internal::ArenaStringPtr tag_; - ::flyteidl::core::ArtifactKey* artifact_key_; - ::flyteidl::artifact::ArtifactSpec* spec_; - ::flyteidl::artifact::ArtifactSource* source_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class ArtifactSource final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.ArtifactSource) */ { - public: - ArtifactSource(); - virtual ~ArtifactSource(); - - ArtifactSource(const ArtifactSource& from); - - inline ArtifactSource& operator=(const ArtifactSource& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - ArtifactSource(ArtifactSource&& from) noexcept - : ArtifactSource() { - *this = ::std::move(from); - } - - inline ArtifactSource& operator=(ArtifactSource&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const ArtifactSource& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const ArtifactSource* internal_default_instance() { - return reinterpret_cast( - &_ArtifactSource_default_instance_); - } - static constexpr int kIndexInFileMessages = - 3; - - void Swap(ArtifactSource* other); - friend void swap(ArtifactSource& a, ArtifactSource& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline ArtifactSource* New() const final { - return CreateMaybeMessage(nullptr); - } - - ArtifactSource* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const ArtifactSource& from); - void MergeFrom(const ArtifactSource& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(ArtifactSource* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // string node_id = 2; - void clear_node_id(); - static const int kNodeIdFieldNumber = 2; - const ::std::string& node_id() const; - void set_node_id(const ::std::string& value); - #if LANG_CXX11 - void set_node_id(::std::string&& value); - #endif - void set_node_id(const char* value); - void set_node_id(const char* value, size_t size); - ::std::string* mutable_node_id(); - ::std::string* release_node_id(); - void set_allocated_node_id(::std::string* node_id); - - // string principal = 5; - void clear_principal(); - static const int kPrincipalFieldNumber = 5; - const ::std::string& principal() const; - void set_principal(const ::std::string& value); - #if LANG_CXX11 - void set_principal(::std::string&& value); - #endif - void set_principal(const char* value); - void set_principal(const char* value, size_t size); - ::std::string* mutable_principal(); - ::std::string* release_principal(); - void set_allocated_principal(::std::string* principal); - - // .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - bool has_workflow_execution() const; - void clear_workflow_execution(); - static const int kWorkflowExecutionFieldNumber = 1; - const ::flyteidl::core::WorkflowExecutionIdentifier& workflow_execution() const; - ::flyteidl::core::WorkflowExecutionIdentifier* release_workflow_execution(); - ::flyteidl::core::WorkflowExecutionIdentifier* mutable_workflow_execution(); - void set_allocated_workflow_execution(::flyteidl::core::WorkflowExecutionIdentifier* workflow_execution); - - // .flyteidl.core.Identifier task_id = 3; - bool has_task_id() const; - void clear_task_id(); - static const int kTaskIdFieldNumber = 3; - const ::flyteidl::core::Identifier& task_id() const; - ::flyteidl::core::Identifier* release_task_id(); - ::flyteidl::core::Identifier* mutable_task_id(); - void set_allocated_task_id(::flyteidl::core::Identifier* task_id); - - // uint32 retry_attempt = 4; - void clear_retry_attempt(); - static const int kRetryAttemptFieldNumber = 4; - ::google::protobuf::uint32 retry_attempt() const; - void set_retry_attempt(::google::protobuf::uint32 value); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactSource) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::internal::ArenaStringPtr node_id_; - ::google::protobuf::internal::ArenaStringPtr principal_; - ::flyteidl::core::WorkflowExecutionIdentifier* workflow_execution_; - ::flyteidl::core::Identifier* task_id_; - ::google::protobuf::uint32 retry_attempt_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class ArtifactSpec final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.ArtifactSpec) */ { - public: - ArtifactSpec(); - virtual ~ArtifactSpec(); - - ArtifactSpec(const ArtifactSpec& from); - - inline ArtifactSpec& operator=(const ArtifactSpec& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - ArtifactSpec(ArtifactSpec&& from) noexcept - : ArtifactSpec() { - *this = ::std::move(from); - } - - inline ArtifactSpec& operator=(ArtifactSpec&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const ArtifactSpec& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const ArtifactSpec* internal_default_instance() { - return reinterpret_cast( - &_ArtifactSpec_default_instance_); - } - static constexpr int kIndexInFileMessages = - 4; - - void Swap(ArtifactSpec* other); - friend void swap(ArtifactSpec& a, ArtifactSpec& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline ArtifactSpec* New() const final { - return CreateMaybeMessage(nullptr); - } - - ArtifactSpec* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const ArtifactSpec& from); - void MergeFrom(const ArtifactSpec& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(ArtifactSpec* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // string short_description = 3; - void clear_short_description(); - static const int kShortDescriptionFieldNumber = 3; - const ::std::string& short_description() const; - void set_short_description(const ::std::string& value); - #if LANG_CXX11 - void set_short_description(::std::string&& value); - #endif - void set_short_description(const char* value); - void set_short_description(const char* value, size_t size); - ::std::string* mutable_short_description(); - ::std::string* release_short_description(); - void set_allocated_short_description(::std::string* short_description); - - // string metadata_type = 5; - void clear_metadata_type(); - static const int kMetadataTypeFieldNumber = 5; - const ::std::string& metadata_type() const; - void set_metadata_type(const ::std::string& value); - #if LANG_CXX11 - void set_metadata_type(::std::string&& value); - #endif - void set_metadata_type(const char* value); - void set_metadata_type(const char* value, size_t size); - ::std::string* mutable_metadata_type(); - ::std::string* release_metadata_type(); - void set_allocated_metadata_type(::std::string* metadata_type); - - // .flyteidl.core.Literal value = 1; - bool has_value() const; - void clear_value(); - static const int kValueFieldNumber = 1; - const ::flyteidl::core::Literal& value() const; - ::flyteidl::core::Literal* release_value(); - ::flyteidl::core::Literal* mutable_value(); - void set_allocated_value(::flyteidl::core::Literal* value); - - // .flyteidl.core.LiteralType type = 2; - bool has_type() const; - void clear_type(); - static const int kTypeFieldNumber = 2; - const ::flyteidl::core::LiteralType& type() const; - ::flyteidl::core::LiteralType* release_type(); - ::flyteidl::core::LiteralType* mutable_type(); - void set_allocated_type(::flyteidl::core::LiteralType* type); - - // .google.protobuf.Any user_metadata = 4; - bool has_user_metadata() const; - void clear_user_metadata(); - static const int kUserMetadataFieldNumber = 4; - const ::google::protobuf::Any& user_metadata() const; - ::google::protobuf::Any* release_user_metadata(); - ::google::protobuf::Any* mutable_user_metadata(); - void set_allocated_user_metadata(::google::protobuf::Any* user_metadata); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactSpec) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::internal::ArenaStringPtr short_description_; - ::google::protobuf::internal::ArenaStringPtr metadata_type_; - ::flyteidl::core::Literal* value_; - ::flyteidl::core::LiteralType* type_; - ::google::protobuf::Any* user_metadata_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class CreateArtifactResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.CreateArtifactResponse) */ { - public: - CreateArtifactResponse(); - virtual ~CreateArtifactResponse(); - - CreateArtifactResponse(const CreateArtifactResponse& from); - - inline CreateArtifactResponse& operator=(const CreateArtifactResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - CreateArtifactResponse(CreateArtifactResponse&& from) noexcept - : CreateArtifactResponse() { - *this = ::std::move(from); - } - - inline CreateArtifactResponse& operator=(CreateArtifactResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const CreateArtifactResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const CreateArtifactResponse* internal_default_instance() { - return reinterpret_cast( - &_CreateArtifactResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 5; - - void Swap(CreateArtifactResponse* other); - friend void swap(CreateArtifactResponse& a, CreateArtifactResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline CreateArtifactResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - CreateArtifactResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const CreateArtifactResponse& from); - void MergeFrom(const CreateArtifactResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(CreateArtifactResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // .flyteidl.artifact.Artifact artifact = 1; - bool has_artifact() const; - void clear_artifact(); - static const int kArtifactFieldNumber = 1; - const ::flyteidl::artifact::Artifact& artifact() const; - ::flyteidl::artifact::Artifact* release_artifact(); - ::flyteidl::artifact::Artifact* mutable_artifact(); - void set_allocated_artifact(::flyteidl::artifact::Artifact* artifact); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateArtifactResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::artifact::Artifact* artifact_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class GetArtifactRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.GetArtifactRequest) */ { - public: - GetArtifactRequest(); - virtual ~GetArtifactRequest(); - - GetArtifactRequest(const GetArtifactRequest& from); - - inline GetArtifactRequest& operator=(const GetArtifactRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - GetArtifactRequest(GetArtifactRequest&& from) noexcept - : GetArtifactRequest() { - *this = ::std::move(from); - } - - inline GetArtifactRequest& operator=(GetArtifactRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const GetArtifactRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const GetArtifactRequest* internal_default_instance() { - return reinterpret_cast( - &_GetArtifactRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 6; - - void Swap(GetArtifactRequest* other); - friend void swap(GetArtifactRequest& a, GetArtifactRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline GetArtifactRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - GetArtifactRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const GetArtifactRequest& from); - void MergeFrom(const GetArtifactRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(GetArtifactRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // .flyteidl.core.ArtifactQuery query = 1; - bool has_query() const; - void clear_query(); - static const int kQueryFieldNumber = 1; - const ::flyteidl::core::ArtifactQuery& query() const; - ::flyteidl::core::ArtifactQuery* release_query(); - ::flyteidl::core::ArtifactQuery* mutable_query(); - void set_allocated_query(::flyteidl::core::ArtifactQuery* query); - - // bool details = 2; - void clear_details(); - static const int kDetailsFieldNumber = 2; - bool details() const; - void set_details(bool value); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.GetArtifactRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::core::ArtifactQuery* query_; - bool details_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class GetArtifactResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.GetArtifactResponse) */ { - public: - GetArtifactResponse(); - virtual ~GetArtifactResponse(); - - GetArtifactResponse(const GetArtifactResponse& from); - - inline GetArtifactResponse& operator=(const GetArtifactResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - GetArtifactResponse(GetArtifactResponse&& from) noexcept - : GetArtifactResponse() { - *this = ::std::move(from); - } - - inline GetArtifactResponse& operator=(GetArtifactResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const GetArtifactResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const GetArtifactResponse* internal_default_instance() { - return reinterpret_cast( - &_GetArtifactResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 7; - - void Swap(GetArtifactResponse* other); - friend void swap(GetArtifactResponse& a, GetArtifactResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline GetArtifactResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - GetArtifactResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const GetArtifactResponse& from); - void MergeFrom(const GetArtifactResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(GetArtifactResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // .flyteidl.artifact.Artifact artifact = 1; - bool has_artifact() const; - void clear_artifact(); - static const int kArtifactFieldNumber = 1; - const ::flyteidl::artifact::Artifact& artifact() const; - ::flyteidl::artifact::Artifact* release_artifact(); - ::flyteidl::artifact::Artifact* mutable_artifact(); - void set_allocated_artifact(::flyteidl::artifact::Artifact* artifact); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.GetArtifactResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::artifact::Artifact* artifact_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class SearchOptions final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.SearchOptions) */ { - public: - SearchOptions(); - virtual ~SearchOptions(); - - SearchOptions(const SearchOptions& from); - - inline SearchOptions& operator=(const SearchOptions& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - SearchOptions(SearchOptions&& from) noexcept - : SearchOptions() { - *this = ::std::move(from); - } - - inline SearchOptions& operator=(SearchOptions&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const SearchOptions& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const SearchOptions* internal_default_instance() { - return reinterpret_cast( - &_SearchOptions_default_instance_); - } - static constexpr int kIndexInFileMessages = - 8; - - void Swap(SearchOptions* other); - friend void swap(SearchOptions& a, SearchOptions& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline SearchOptions* New() const final { - return CreateMaybeMessage(nullptr); - } - - SearchOptions* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const SearchOptions& from); - void MergeFrom(const SearchOptions& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(SearchOptions* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // bool strict_partitions = 1; - void clear_strict_partitions(); - static const int kStrictPartitionsFieldNumber = 1; - bool strict_partitions() const; - void set_strict_partitions(bool value); - - // bool latest_by_key = 2; - void clear_latest_by_key(); - static const int kLatestByKeyFieldNumber = 2; - bool latest_by_key() const; - void set_latest_by_key(bool value); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.SearchOptions) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - bool strict_partitions_; - bool latest_by_key_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class SearchArtifactsRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.SearchArtifactsRequest) */ { - public: - SearchArtifactsRequest(); - virtual ~SearchArtifactsRequest(); - - SearchArtifactsRequest(const SearchArtifactsRequest& from); - - inline SearchArtifactsRequest& operator=(const SearchArtifactsRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - SearchArtifactsRequest(SearchArtifactsRequest&& from) noexcept - : SearchArtifactsRequest() { - *this = ::std::move(from); - } - - inline SearchArtifactsRequest& operator=(SearchArtifactsRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const SearchArtifactsRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const SearchArtifactsRequest* internal_default_instance() { - return reinterpret_cast( - &_SearchArtifactsRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 9; - - void Swap(SearchArtifactsRequest* other); - friend void swap(SearchArtifactsRequest& a, SearchArtifactsRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline SearchArtifactsRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - SearchArtifactsRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const SearchArtifactsRequest& from); - void MergeFrom(const SearchArtifactsRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(SearchArtifactsRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // string principal = 3; - void clear_principal(); - static const int kPrincipalFieldNumber = 3; - const ::std::string& principal() const; - void set_principal(const ::std::string& value); - #if LANG_CXX11 - void set_principal(::std::string&& value); - #endif - void set_principal(const char* value); - void set_principal(const char* value, size_t size); - ::std::string* mutable_principal(); - ::std::string* release_principal(); - void set_allocated_principal(::std::string* principal); - - // string version = 4; - void clear_version(); - static const int kVersionFieldNumber = 4; - const ::std::string& version() const; - void set_version(const ::std::string& value); - #if LANG_CXX11 - void set_version(::std::string&& value); - #endif - void set_version(const char* value); - void set_version(const char* value, size_t size); - ::std::string* mutable_version(); - ::std::string* release_version(); - void set_allocated_version(::std::string* version); - - // string token = 6; - void clear_token(); - static const int kTokenFieldNumber = 6; - const ::std::string& token() const; - void set_token(const ::std::string& value); - #if LANG_CXX11 - void set_token(::std::string&& value); - #endif - void set_token(const char* value); - void set_token(const char* value, size_t size); - ::std::string* mutable_token(); - ::std::string* release_token(); - void set_allocated_token(::std::string* token); - - // .flyteidl.core.ArtifactKey artifact_key = 1; - bool has_artifact_key() const; - void clear_artifact_key(); - static const int kArtifactKeyFieldNumber = 1; - const ::flyteidl::core::ArtifactKey& artifact_key() const; - ::flyteidl::core::ArtifactKey* release_artifact_key(); - ::flyteidl::core::ArtifactKey* mutable_artifact_key(); - void set_allocated_artifact_key(::flyteidl::core::ArtifactKey* artifact_key); - - // .flyteidl.core.Partitions partitions = 2; - bool has_partitions() const; - void clear_partitions(); - static const int kPartitionsFieldNumber = 2; - const ::flyteidl::core::Partitions& partitions() const; - ::flyteidl::core::Partitions* release_partitions(); - ::flyteidl::core::Partitions* mutable_partitions(); - void set_allocated_partitions(::flyteidl::core::Partitions* partitions); - - // .flyteidl.artifact.SearchOptions options = 5; - bool has_options() const; - void clear_options(); - static const int kOptionsFieldNumber = 5; - const ::flyteidl::artifact::SearchOptions& options() const; - ::flyteidl::artifact::SearchOptions* release_options(); - ::flyteidl::artifact::SearchOptions* mutable_options(); - void set_allocated_options(::flyteidl::artifact::SearchOptions* options); - - // int32 limit = 7; - void clear_limit(); - static const int kLimitFieldNumber = 7; - ::google::protobuf::int32 limit() const; - void set_limit(::google::protobuf::int32 value); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.SearchArtifactsRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::internal::ArenaStringPtr principal_; - ::google::protobuf::internal::ArenaStringPtr version_; - ::google::protobuf::internal::ArenaStringPtr token_; - ::flyteidl::core::ArtifactKey* artifact_key_; - ::flyteidl::core::Partitions* partitions_; - ::flyteidl::artifact::SearchOptions* options_; - ::google::protobuf::int32 limit_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class SearchArtifactsResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.SearchArtifactsResponse) */ { - public: - SearchArtifactsResponse(); - virtual ~SearchArtifactsResponse(); - - SearchArtifactsResponse(const SearchArtifactsResponse& from); - - inline SearchArtifactsResponse& operator=(const SearchArtifactsResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - SearchArtifactsResponse(SearchArtifactsResponse&& from) noexcept - : SearchArtifactsResponse() { - *this = ::std::move(from); - } - - inline SearchArtifactsResponse& operator=(SearchArtifactsResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const SearchArtifactsResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const SearchArtifactsResponse* internal_default_instance() { - return reinterpret_cast( - &_SearchArtifactsResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 10; - - void Swap(SearchArtifactsResponse* other); - friend void swap(SearchArtifactsResponse& a, SearchArtifactsResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline SearchArtifactsResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - SearchArtifactsResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const SearchArtifactsResponse& from); - void MergeFrom(const SearchArtifactsResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(SearchArtifactsResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // repeated .flyteidl.artifact.Artifact artifacts = 1; - int artifacts_size() const; - void clear_artifacts(); - static const int kArtifactsFieldNumber = 1; - ::flyteidl::artifact::Artifact* mutable_artifacts(int index); - ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::Artifact >* - mutable_artifacts(); - const ::flyteidl::artifact::Artifact& artifacts(int index) const; - ::flyteidl::artifact::Artifact* add_artifacts(); - const ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::Artifact >& - artifacts() const; - - // string token = 2; - void clear_token(); - static const int kTokenFieldNumber = 2; - const ::std::string& token() const; - void set_token(const ::std::string& value); - #if LANG_CXX11 - void set_token(::std::string&& value); - #endif - void set_token(const char* value); - void set_token(const char* value, size_t size); - ::std::string* mutable_token(); - ::std::string* release_token(); - void set_allocated_token(::std::string* token); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.SearchArtifactsResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::Artifact > artifacts_; - ::google::protobuf::internal::ArenaStringPtr token_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class FindByWorkflowExecRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.FindByWorkflowExecRequest) */ { - public: - FindByWorkflowExecRequest(); - virtual ~FindByWorkflowExecRequest(); - - FindByWorkflowExecRequest(const FindByWorkflowExecRequest& from); - - inline FindByWorkflowExecRequest& operator=(const FindByWorkflowExecRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - FindByWorkflowExecRequest(FindByWorkflowExecRequest&& from) noexcept - : FindByWorkflowExecRequest() { - *this = ::std::move(from); - } - - inline FindByWorkflowExecRequest& operator=(FindByWorkflowExecRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const FindByWorkflowExecRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const FindByWorkflowExecRequest* internal_default_instance() { - return reinterpret_cast( - &_FindByWorkflowExecRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 11; - - void Swap(FindByWorkflowExecRequest* other); - friend void swap(FindByWorkflowExecRequest& a, FindByWorkflowExecRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline FindByWorkflowExecRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - FindByWorkflowExecRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const FindByWorkflowExecRequest& from); - void MergeFrom(const FindByWorkflowExecRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(FindByWorkflowExecRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - typedef FindByWorkflowExecRequest_Direction Direction; - static const Direction INPUTS = - FindByWorkflowExecRequest_Direction_INPUTS; - static const Direction OUTPUTS = - FindByWorkflowExecRequest_Direction_OUTPUTS; - static inline bool Direction_IsValid(int value) { - return FindByWorkflowExecRequest_Direction_IsValid(value); - } - static const Direction Direction_MIN = - FindByWorkflowExecRequest_Direction_Direction_MIN; - static const Direction Direction_MAX = - FindByWorkflowExecRequest_Direction_Direction_MAX; - static const int Direction_ARRAYSIZE = - FindByWorkflowExecRequest_Direction_Direction_ARRAYSIZE; - static inline const ::google::protobuf::EnumDescriptor* - Direction_descriptor() { - return FindByWorkflowExecRequest_Direction_descriptor(); - } - static inline const ::std::string& Direction_Name(Direction value) { - return FindByWorkflowExecRequest_Direction_Name(value); - } - static inline bool Direction_Parse(const ::std::string& name, - Direction* value) { - return FindByWorkflowExecRequest_Direction_Parse(name, value); - } - - // accessors ------------------------------------------------------- - - // .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - bool has_exec_id() const; - void clear_exec_id(); - static const int kExecIdFieldNumber = 1; - const ::flyteidl::core::WorkflowExecutionIdentifier& exec_id() const; - ::flyteidl::core::WorkflowExecutionIdentifier* release_exec_id(); - ::flyteidl::core::WorkflowExecutionIdentifier* mutable_exec_id(); - void set_allocated_exec_id(::flyteidl::core::WorkflowExecutionIdentifier* exec_id); - - // .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - void clear_direction(); - static const int kDirectionFieldNumber = 2; - ::flyteidl::artifact::FindByWorkflowExecRequest_Direction direction() const; - void set_direction(::flyteidl::artifact::FindByWorkflowExecRequest_Direction value); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.FindByWorkflowExecRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::core::WorkflowExecutionIdentifier* exec_id_; - int direction_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class AddTagRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.AddTagRequest) */ { - public: - AddTagRequest(); - virtual ~AddTagRequest(); - - AddTagRequest(const AddTagRequest& from); - - inline AddTagRequest& operator=(const AddTagRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - AddTagRequest(AddTagRequest&& from) noexcept - : AddTagRequest() { - *this = ::std::move(from); - } - - inline AddTagRequest& operator=(AddTagRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const AddTagRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AddTagRequest* internal_default_instance() { - return reinterpret_cast( - &_AddTagRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 12; - - void Swap(AddTagRequest* other); - friend void swap(AddTagRequest& a, AddTagRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline AddTagRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - AddTagRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const AddTagRequest& from); - void MergeFrom(const AddTagRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(AddTagRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // string value = 2; - void clear_value(); - static const int kValueFieldNumber = 2; - const ::std::string& value() const; - void set_value(const ::std::string& value); - #if LANG_CXX11 - void set_value(::std::string&& value); - #endif - void set_value(const char* value); - void set_value(const char* value, size_t size); - ::std::string* mutable_value(); - ::std::string* release_value(); - void set_allocated_value(::std::string* value); - - // .flyteidl.core.ArtifactID artifact_id = 1; - bool has_artifact_id() const; - void clear_artifact_id(); - static const int kArtifactIdFieldNumber = 1; - const ::flyteidl::core::ArtifactID& artifact_id() const; - ::flyteidl::core::ArtifactID* release_artifact_id(); - ::flyteidl::core::ArtifactID* mutable_artifact_id(); - void set_allocated_artifact_id(::flyteidl::core::ArtifactID* artifact_id); - - // bool overwrite = 3; - void clear_overwrite(); - static const int kOverwriteFieldNumber = 3; - bool overwrite() const; - void set_overwrite(bool value); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.AddTagRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::internal::ArenaStringPtr value_; - ::flyteidl::core::ArtifactID* artifact_id_; - bool overwrite_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class AddTagResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.AddTagResponse) */ { - public: - AddTagResponse(); - virtual ~AddTagResponse(); - - AddTagResponse(const AddTagResponse& from); - - inline AddTagResponse& operator=(const AddTagResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - AddTagResponse(AddTagResponse&& from) noexcept - : AddTagResponse() { - *this = ::std::move(from); - } - - inline AddTagResponse& operator=(AddTagResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const AddTagResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const AddTagResponse* internal_default_instance() { - return reinterpret_cast( - &_AddTagResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 13; - - void Swap(AddTagResponse* other); - friend void swap(AddTagResponse& a, AddTagResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline AddTagResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - AddTagResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const AddTagResponse& from); - void MergeFrom(const AddTagResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(AddTagResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.AddTagResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class CreateTriggerRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.CreateTriggerRequest) */ { - public: - CreateTriggerRequest(); - virtual ~CreateTriggerRequest(); - - CreateTriggerRequest(const CreateTriggerRequest& from); - - inline CreateTriggerRequest& operator=(const CreateTriggerRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - CreateTriggerRequest(CreateTriggerRequest&& from) noexcept - : CreateTriggerRequest() { - *this = ::std::move(from); - } - - inline CreateTriggerRequest& operator=(CreateTriggerRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const CreateTriggerRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const CreateTriggerRequest* internal_default_instance() { - return reinterpret_cast( - &_CreateTriggerRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 14; - - void Swap(CreateTriggerRequest* other); - friend void swap(CreateTriggerRequest& a, CreateTriggerRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline CreateTriggerRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - CreateTriggerRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const CreateTriggerRequest& from); - void MergeFrom(const CreateTriggerRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(CreateTriggerRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - bool has_trigger_launch_plan() const; - void clear_trigger_launch_plan(); - static const int kTriggerLaunchPlanFieldNumber = 1; - const ::flyteidl::admin::LaunchPlan& trigger_launch_plan() const; - ::flyteidl::admin::LaunchPlan* release_trigger_launch_plan(); - ::flyteidl::admin::LaunchPlan* mutable_trigger_launch_plan(); - void set_allocated_trigger_launch_plan(::flyteidl::admin::LaunchPlan* trigger_launch_plan); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateTriggerRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::admin::LaunchPlan* trigger_launch_plan_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class CreateTriggerResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.CreateTriggerResponse) */ { - public: - CreateTriggerResponse(); - virtual ~CreateTriggerResponse(); - - CreateTriggerResponse(const CreateTriggerResponse& from); - - inline CreateTriggerResponse& operator=(const CreateTriggerResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - CreateTriggerResponse(CreateTriggerResponse&& from) noexcept - : CreateTriggerResponse() { - *this = ::std::move(from); - } - - inline CreateTriggerResponse& operator=(CreateTriggerResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const CreateTriggerResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const CreateTriggerResponse* internal_default_instance() { - return reinterpret_cast( - &_CreateTriggerResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 15; - - void Swap(CreateTriggerResponse* other); - friend void swap(CreateTriggerResponse& a, CreateTriggerResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline CreateTriggerResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - CreateTriggerResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const CreateTriggerResponse& from); - void MergeFrom(const CreateTriggerResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(CreateTriggerResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateTriggerResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class DeleteTriggerRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.DeleteTriggerRequest) */ { - public: - DeleteTriggerRequest(); - virtual ~DeleteTriggerRequest(); - - DeleteTriggerRequest(const DeleteTriggerRequest& from); - - inline DeleteTriggerRequest& operator=(const DeleteTriggerRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - DeleteTriggerRequest(DeleteTriggerRequest&& from) noexcept - : DeleteTriggerRequest() { - *this = ::std::move(from); - } - - inline DeleteTriggerRequest& operator=(DeleteTriggerRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const DeleteTriggerRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DeleteTriggerRequest* internal_default_instance() { - return reinterpret_cast( - &_DeleteTriggerRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 16; - - void Swap(DeleteTriggerRequest* other); - friend void swap(DeleteTriggerRequest& a, DeleteTriggerRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline DeleteTriggerRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - DeleteTriggerRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const DeleteTriggerRequest& from); - void MergeFrom(const DeleteTriggerRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(DeleteTriggerRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // .flyteidl.core.Identifier trigger_id = 1; - bool has_trigger_id() const; - void clear_trigger_id(); - static const int kTriggerIdFieldNumber = 1; - const ::flyteidl::core::Identifier& trigger_id() const; - ::flyteidl::core::Identifier* release_trigger_id(); - ::flyteidl::core::Identifier* mutable_trigger_id(); - void set_allocated_trigger_id(::flyteidl::core::Identifier* trigger_id); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.DeleteTriggerRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::core::Identifier* trigger_id_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class DeleteTriggerResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.DeleteTriggerResponse) */ { - public: - DeleteTriggerResponse(); - virtual ~DeleteTriggerResponse(); - - DeleteTriggerResponse(const DeleteTriggerResponse& from); - - inline DeleteTriggerResponse& operator=(const DeleteTriggerResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - DeleteTriggerResponse(DeleteTriggerResponse&& from) noexcept - : DeleteTriggerResponse() { - *this = ::std::move(from); - } - - inline DeleteTriggerResponse& operator=(DeleteTriggerResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const DeleteTriggerResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const DeleteTriggerResponse* internal_default_instance() { - return reinterpret_cast( - &_DeleteTriggerResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 17; - - void Swap(DeleteTriggerResponse* other); - friend void swap(DeleteTriggerResponse& a, DeleteTriggerResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline DeleteTriggerResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - DeleteTriggerResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const DeleteTriggerResponse& from); - void MergeFrom(const DeleteTriggerResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(DeleteTriggerResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.DeleteTriggerResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class ArtifactProducer final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.ArtifactProducer) */ { - public: - ArtifactProducer(); - virtual ~ArtifactProducer(); - - ArtifactProducer(const ArtifactProducer& from); - - inline ArtifactProducer& operator=(const ArtifactProducer& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - ArtifactProducer(ArtifactProducer&& from) noexcept - : ArtifactProducer() { - *this = ::std::move(from); - } - - inline ArtifactProducer& operator=(ArtifactProducer&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const ArtifactProducer& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const ArtifactProducer* internal_default_instance() { - return reinterpret_cast( - &_ArtifactProducer_default_instance_); - } - static constexpr int kIndexInFileMessages = - 18; - - void Swap(ArtifactProducer* other); - friend void swap(ArtifactProducer& a, ArtifactProducer& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline ArtifactProducer* New() const final { - return CreateMaybeMessage(nullptr); - } - - ArtifactProducer* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const ArtifactProducer& from); - void MergeFrom(const ArtifactProducer& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(ArtifactProducer* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // .flyteidl.core.Identifier entity_id = 1; - bool has_entity_id() const; - void clear_entity_id(); - static const int kEntityIdFieldNumber = 1; - const ::flyteidl::core::Identifier& entity_id() const; - ::flyteidl::core::Identifier* release_entity_id(); - ::flyteidl::core::Identifier* mutable_entity_id(); - void set_allocated_entity_id(::flyteidl::core::Identifier* entity_id); - - // .flyteidl.core.VariableMap outputs = 2; - bool has_outputs() const; - void clear_outputs(); - static const int kOutputsFieldNumber = 2; - const ::flyteidl::core::VariableMap& outputs() const; - ::flyteidl::core::VariableMap* release_outputs(); - ::flyteidl::core::VariableMap* mutable_outputs(); - void set_allocated_outputs(::flyteidl::core::VariableMap* outputs); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactProducer) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::core::Identifier* entity_id_; - ::flyteidl::core::VariableMap* outputs_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class RegisterProducerRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.RegisterProducerRequest) */ { - public: - RegisterProducerRequest(); - virtual ~RegisterProducerRequest(); - - RegisterProducerRequest(const RegisterProducerRequest& from); - - inline RegisterProducerRequest& operator=(const RegisterProducerRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - RegisterProducerRequest(RegisterProducerRequest&& from) noexcept - : RegisterProducerRequest() { - *this = ::std::move(from); - } - - inline RegisterProducerRequest& operator=(RegisterProducerRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const RegisterProducerRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RegisterProducerRequest* internal_default_instance() { - return reinterpret_cast( - &_RegisterProducerRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 19; - - void Swap(RegisterProducerRequest* other); - friend void swap(RegisterProducerRequest& a, RegisterProducerRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline RegisterProducerRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - RegisterProducerRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const RegisterProducerRequest& from); - void MergeFrom(const RegisterProducerRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(RegisterProducerRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // repeated .flyteidl.artifact.ArtifactProducer producers = 1; - int producers_size() const; - void clear_producers(); - static const int kProducersFieldNumber = 1; - ::flyteidl::artifact::ArtifactProducer* mutable_producers(int index); - ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactProducer >* - mutable_producers(); - const ::flyteidl::artifact::ArtifactProducer& producers(int index) const; - ::flyteidl::artifact::ArtifactProducer* add_producers(); - const ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactProducer >& - producers() const; - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.RegisterProducerRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactProducer > producers_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class ArtifactConsumer final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.ArtifactConsumer) */ { - public: - ArtifactConsumer(); - virtual ~ArtifactConsumer(); - - ArtifactConsumer(const ArtifactConsumer& from); - - inline ArtifactConsumer& operator=(const ArtifactConsumer& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - ArtifactConsumer(ArtifactConsumer&& from) noexcept - : ArtifactConsumer() { - *this = ::std::move(from); - } - - inline ArtifactConsumer& operator=(ArtifactConsumer&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const ArtifactConsumer& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const ArtifactConsumer* internal_default_instance() { - return reinterpret_cast( - &_ArtifactConsumer_default_instance_); - } - static constexpr int kIndexInFileMessages = - 20; - - void Swap(ArtifactConsumer* other); - friend void swap(ArtifactConsumer& a, ArtifactConsumer& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline ArtifactConsumer* New() const final { - return CreateMaybeMessage(nullptr); - } - - ArtifactConsumer* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const ArtifactConsumer& from); - void MergeFrom(const ArtifactConsumer& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(ArtifactConsumer* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // .flyteidl.core.Identifier entity_id = 1; - bool has_entity_id() const; - void clear_entity_id(); - static const int kEntityIdFieldNumber = 1; - const ::flyteidl::core::Identifier& entity_id() const; - ::flyteidl::core::Identifier* release_entity_id(); - ::flyteidl::core::Identifier* mutable_entity_id(); - void set_allocated_entity_id(::flyteidl::core::Identifier* entity_id); - - // .flyteidl.core.ParameterMap inputs = 2; - bool has_inputs() const; - void clear_inputs(); - static const int kInputsFieldNumber = 2; - const ::flyteidl::core::ParameterMap& inputs() const; - ::flyteidl::core::ParameterMap* release_inputs(); - ::flyteidl::core::ParameterMap* mutable_inputs(); - void set_allocated_inputs(::flyteidl::core::ParameterMap* inputs); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactConsumer) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::flyteidl::core::Identifier* entity_id_; - ::flyteidl::core::ParameterMap* inputs_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class RegisterConsumerRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.RegisterConsumerRequest) */ { - public: - RegisterConsumerRequest(); - virtual ~RegisterConsumerRequest(); - - RegisterConsumerRequest(const RegisterConsumerRequest& from); - - inline RegisterConsumerRequest& operator=(const RegisterConsumerRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - RegisterConsumerRequest(RegisterConsumerRequest&& from) noexcept - : RegisterConsumerRequest() { - *this = ::std::move(from); - } - - inline RegisterConsumerRequest& operator=(RegisterConsumerRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const RegisterConsumerRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RegisterConsumerRequest* internal_default_instance() { - return reinterpret_cast( - &_RegisterConsumerRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 21; - - void Swap(RegisterConsumerRequest* other); - friend void swap(RegisterConsumerRequest& a, RegisterConsumerRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline RegisterConsumerRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - RegisterConsumerRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const RegisterConsumerRequest& from); - void MergeFrom(const RegisterConsumerRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(RegisterConsumerRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - int consumers_size() const; - void clear_consumers(); - static const int kConsumersFieldNumber = 1; - ::flyteidl::artifact::ArtifactConsumer* mutable_consumers(int index); - ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactConsumer >* - mutable_consumers(); - const ::flyteidl::artifact::ArtifactConsumer& consumers(int index) const; - ::flyteidl::artifact::ArtifactConsumer* add_consumers(); - const ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactConsumer >& - consumers() const; - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.RegisterConsumerRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactConsumer > consumers_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class RegisterResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.RegisterResponse) */ { - public: - RegisterResponse(); - virtual ~RegisterResponse(); - - RegisterResponse(const RegisterResponse& from); - - inline RegisterResponse& operator=(const RegisterResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - RegisterResponse(RegisterResponse&& from) noexcept - : RegisterResponse() { - *this = ::std::move(from); - } - - inline RegisterResponse& operator=(RegisterResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const RegisterResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const RegisterResponse* internal_default_instance() { - return reinterpret_cast( - &_RegisterResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 22; - - void Swap(RegisterResponse* other); - friend void swap(RegisterResponse& a, RegisterResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline RegisterResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - RegisterResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const RegisterResponse& from); - void MergeFrom(const RegisterResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(RegisterResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.RegisterResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class ExecutionInputsRequest final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.ExecutionInputsRequest) */ { - public: - ExecutionInputsRequest(); - virtual ~ExecutionInputsRequest(); - - ExecutionInputsRequest(const ExecutionInputsRequest& from); - - inline ExecutionInputsRequest& operator=(const ExecutionInputsRequest& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - ExecutionInputsRequest(ExecutionInputsRequest&& from) noexcept - : ExecutionInputsRequest() { - *this = ::std::move(from); - } - - inline ExecutionInputsRequest& operator=(ExecutionInputsRequest&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const ExecutionInputsRequest& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const ExecutionInputsRequest* internal_default_instance() { - return reinterpret_cast( - &_ExecutionInputsRequest_default_instance_); - } - static constexpr int kIndexInFileMessages = - 23; - - void Swap(ExecutionInputsRequest* other); - friend void swap(ExecutionInputsRequest& a, ExecutionInputsRequest& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline ExecutionInputsRequest* New() const final { - return CreateMaybeMessage(nullptr); - } - - ExecutionInputsRequest* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const ExecutionInputsRequest& from); - void MergeFrom(const ExecutionInputsRequest& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(ExecutionInputsRequest* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // repeated .flyteidl.core.ArtifactID inputs = 2; - int inputs_size() const; - void clear_inputs(); - static const int kInputsFieldNumber = 2; - ::flyteidl::core::ArtifactID* mutable_inputs(int index); - ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >* - mutable_inputs(); - const ::flyteidl::core::ArtifactID& inputs(int index) const; - ::flyteidl::core::ArtifactID* add_inputs(); - const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >& - inputs() const; - - // .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - bool has_execution_id() const; - void clear_execution_id(); - static const int kExecutionIdFieldNumber = 1; - const ::flyteidl::core::WorkflowExecutionIdentifier& execution_id() const; - ::flyteidl::core::WorkflowExecutionIdentifier* release_execution_id(); - ::flyteidl::core::WorkflowExecutionIdentifier* mutable_execution_id(); - void set_allocated_execution_id(::flyteidl::core::WorkflowExecutionIdentifier* execution_id); - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ExecutionInputsRequest) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID > inputs_; - ::flyteidl::core::WorkflowExecutionIdentifier* execution_id_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// ------------------------------------------------------------------- - -class ExecutionInputsResponse final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.artifact.ExecutionInputsResponse) */ { - public: - ExecutionInputsResponse(); - virtual ~ExecutionInputsResponse(); - - ExecutionInputsResponse(const ExecutionInputsResponse& from); - - inline ExecutionInputsResponse& operator=(const ExecutionInputsResponse& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - ExecutionInputsResponse(ExecutionInputsResponse&& from) noexcept - : ExecutionInputsResponse() { - *this = ::std::move(from); - } - - inline ExecutionInputsResponse& operator=(ExecutionInputsResponse&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const ExecutionInputsResponse& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const ExecutionInputsResponse* internal_default_instance() { - return reinterpret_cast( - &_ExecutionInputsResponse_default_instance_); - } - static constexpr int kIndexInFileMessages = - 24; - - void Swap(ExecutionInputsResponse* other); - friend void swap(ExecutionInputsResponse& a, ExecutionInputsResponse& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline ExecutionInputsResponse* New() const final { - return CreateMaybeMessage(nullptr); - } - - ExecutionInputsResponse* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const ExecutionInputsResponse& from); - void MergeFrom(const ExecutionInputsResponse& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(ExecutionInputsResponse* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ExecutionInputsResponse) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fartifact_2fartifacts_2eproto; -}; -// =================================================================== - - -// =================================================================== - -#ifdef __GNUC__ - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wstrict-aliasing" -#endif // __GNUC__ -// Artifact - -// .flyteidl.core.ArtifactID artifact_id = 1; -inline bool Artifact::has_artifact_id() const { - return this != internal_default_instance() && artifact_id_ != nullptr; -} -inline const ::flyteidl::core::ArtifactID& Artifact::artifact_id() const { - const ::flyteidl::core::ArtifactID* p = artifact_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.Artifact.artifact_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_ArtifactID_default_instance_); -} -inline ::flyteidl::core::ArtifactID* Artifact::release_artifact_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.Artifact.artifact_id) - - ::flyteidl::core::ArtifactID* temp = artifact_id_; - artifact_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::ArtifactID* Artifact::mutable_artifact_id() { - - if (artifact_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::ArtifactID>(GetArenaNoVirtual()); - artifact_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.Artifact.artifact_id) - return artifact_id_; -} -inline void Artifact::set_allocated_artifact_id(::flyteidl::core::ArtifactID* artifact_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(artifact_id_); - } - if (artifact_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - artifact_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, artifact_id, submessage_arena); - } - - } else { - - } - artifact_id_ = artifact_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.Artifact.artifact_id) -} - -// .flyteidl.artifact.ArtifactSpec spec = 2; -inline bool Artifact::has_spec() const { - return this != internal_default_instance() && spec_ != nullptr; -} -inline void Artifact::clear_spec() { - if (GetArenaNoVirtual() == nullptr && spec_ != nullptr) { - delete spec_; - } - spec_ = nullptr; -} -inline const ::flyteidl::artifact::ArtifactSpec& Artifact::spec() const { - const ::flyteidl::artifact::ArtifactSpec* p = spec_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.Artifact.spec) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::artifact::_ArtifactSpec_default_instance_); -} -inline ::flyteidl::artifact::ArtifactSpec* Artifact::release_spec() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.Artifact.spec) - - ::flyteidl::artifact::ArtifactSpec* temp = spec_; - spec_ = nullptr; - return temp; -} -inline ::flyteidl::artifact::ArtifactSpec* Artifact::mutable_spec() { - - if (spec_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::artifact::ArtifactSpec>(GetArenaNoVirtual()); - spec_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.Artifact.spec) - return spec_; -} -inline void Artifact::set_allocated_spec(::flyteidl::artifact::ArtifactSpec* spec) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete spec_; - } - if (spec) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - spec = ::google::protobuf::internal::GetOwnedMessage( - message_arena, spec, submessage_arena); - } - - } else { - - } - spec_ = spec; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.Artifact.spec) -} - -// repeated string tags = 3; -inline int Artifact::tags_size() const { - return tags_.size(); -} -inline void Artifact::clear_tags() { - tags_.Clear(); -} -inline const ::std::string& Artifact::tags(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.Artifact.tags) - return tags_.Get(index); -} -inline ::std::string* Artifact::mutable_tags(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.Artifact.tags) - return tags_.Mutable(index); -} -inline void Artifact::set_tags(int index, const ::std::string& value) { - // @@protoc_insertion_point(field_set:flyteidl.artifact.Artifact.tags) - tags_.Mutable(index)->assign(value); -} -#if LANG_CXX11 -inline void Artifact::set_tags(int index, ::std::string&& value) { - // @@protoc_insertion_point(field_set:flyteidl.artifact.Artifact.tags) - tags_.Mutable(index)->assign(std::move(value)); -} -#endif -inline void Artifact::set_tags(int index, const char* value) { - GOOGLE_DCHECK(value != nullptr); - tags_.Mutable(index)->assign(value); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.Artifact.tags) -} -inline void Artifact::set_tags(int index, const char* value, size_t size) { - tags_.Mutable(index)->assign( - reinterpret_cast(value), size); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.Artifact.tags) -} -inline ::std::string* Artifact::add_tags() { - // @@protoc_insertion_point(field_add_mutable:flyteidl.artifact.Artifact.tags) - return tags_.Add(); -} -inline void Artifact::add_tags(const ::std::string& value) { - tags_.Add()->assign(value); - // @@protoc_insertion_point(field_add:flyteidl.artifact.Artifact.tags) -} -#if LANG_CXX11 -inline void Artifact::add_tags(::std::string&& value) { - tags_.Add(std::move(value)); - // @@protoc_insertion_point(field_add:flyteidl.artifact.Artifact.tags) -} -#endif -inline void Artifact::add_tags(const char* value) { - GOOGLE_DCHECK(value != nullptr); - tags_.Add()->assign(value); - // @@protoc_insertion_point(field_add_char:flyteidl.artifact.Artifact.tags) -} -inline void Artifact::add_tags(const char* value, size_t size) { - tags_.Add()->assign(reinterpret_cast(value), size); - // @@protoc_insertion_point(field_add_pointer:flyteidl.artifact.Artifact.tags) -} -inline const ::google::protobuf::RepeatedPtrField<::std::string>& -Artifact::tags() const { - // @@protoc_insertion_point(field_list:flyteidl.artifact.Artifact.tags) - return tags_; -} -inline ::google::protobuf::RepeatedPtrField<::std::string>* -Artifact::mutable_tags() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.artifact.Artifact.tags) - return &tags_; -} - -// .flyteidl.artifact.ArtifactSource source = 4; -inline bool Artifact::has_source() const { - return this != internal_default_instance() && source_ != nullptr; -} -inline void Artifact::clear_source() { - if (GetArenaNoVirtual() == nullptr && source_ != nullptr) { - delete source_; - } - source_ = nullptr; -} -inline const ::flyteidl::artifact::ArtifactSource& Artifact::source() const { - const ::flyteidl::artifact::ArtifactSource* p = source_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.Artifact.source) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::artifact::_ArtifactSource_default_instance_); -} -inline ::flyteidl::artifact::ArtifactSource* Artifact::release_source() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.Artifact.source) - - ::flyteidl::artifact::ArtifactSource* temp = source_; - source_ = nullptr; - return temp; -} -inline ::flyteidl::artifact::ArtifactSource* Artifact::mutable_source() { - - if (source_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::artifact::ArtifactSource>(GetArenaNoVirtual()); - source_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.Artifact.source) - return source_; -} -inline void Artifact::set_allocated_source(::flyteidl::artifact::ArtifactSource* source) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete source_; - } - if (source) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - source = ::google::protobuf::internal::GetOwnedMessage( - message_arena, source, submessage_arena); - } - - } else { - - } - source_ = source; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.Artifact.source) -} - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// CreateArtifactRequest - -// .flyteidl.core.ArtifactKey artifact_key = 1; -inline bool CreateArtifactRequest::has_artifact_key() const { - return this != internal_default_instance() && artifact_key_ != nullptr; -} -inline const ::flyteidl::core::ArtifactKey& CreateArtifactRequest::artifact_key() const { - const ::flyteidl::core::ArtifactKey* p = artifact_key_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.CreateArtifactRequest.artifact_key) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_ArtifactKey_default_instance_); -} -inline ::flyteidl::core::ArtifactKey* CreateArtifactRequest::release_artifact_key() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.CreateArtifactRequest.artifact_key) - - ::flyteidl::core::ArtifactKey* temp = artifact_key_; - artifact_key_ = nullptr; - return temp; -} -inline ::flyteidl::core::ArtifactKey* CreateArtifactRequest::mutable_artifact_key() { - - if (artifact_key_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::ArtifactKey>(GetArenaNoVirtual()); - artifact_key_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.CreateArtifactRequest.artifact_key) - return artifact_key_; -} -inline void CreateArtifactRequest::set_allocated_artifact_key(::flyteidl::core::ArtifactKey* artifact_key) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(artifact_key_); - } - if (artifact_key) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - artifact_key = ::google::protobuf::internal::GetOwnedMessage( - message_arena, artifact_key, submessage_arena); - } - - } else { - - } - artifact_key_ = artifact_key; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.CreateArtifactRequest.artifact_key) -} - -// string version = 3; -inline void CreateArtifactRequest::clear_version() { - version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& CreateArtifactRequest::version() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.CreateArtifactRequest.version) - return version_.GetNoArena(); -} -inline void CreateArtifactRequest::set_version(const ::std::string& value) { - - version_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.CreateArtifactRequest.version) -} -#if LANG_CXX11 -inline void CreateArtifactRequest::set_version(::std::string&& value) { - - version_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.CreateArtifactRequest.version) -} -#endif -inline void CreateArtifactRequest::set_version(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - version_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.CreateArtifactRequest.version) -} -inline void CreateArtifactRequest::set_version(const char* value, size_t size) { - - version_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.CreateArtifactRequest.version) -} -inline ::std::string* CreateArtifactRequest::mutable_version() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.CreateArtifactRequest.version) - return version_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* CreateArtifactRequest::release_version() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.CreateArtifactRequest.version) - - return version_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void CreateArtifactRequest::set_allocated_version(::std::string* version) { - if (version != nullptr) { - - } else { - - } - version_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), version); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.CreateArtifactRequest.version) -} - -// .flyteidl.artifact.ArtifactSpec spec = 2; -inline bool CreateArtifactRequest::has_spec() const { - return this != internal_default_instance() && spec_ != nullptr; -} -inline void CreateArtifactRequest::clear_spec() { - if (GetArenaNoVirtual() == nullptr && spec_ != nullptr) { - delete spec_; - } - spec_ = nullptr; -} -inline const ::flyteidl::artifact::ArtifactSpec& CreateArtifactRequest::spec() const { - const ::flyteidl::artifact::ArtifactSpec* p = spec_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.CreateArtifactRequest.spec) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::artifact::_ArtifactSpec_default_instance_); -} -inline ::flyteidl::artifact::ArtifactSpec* CreateArtifactRequest::release_spec() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.CreateArtifactRequest.spec) - - ::flyteidl::artifact::ArtifactSpec* temp = spec_; - spec_ = nullptr; - return temp; -} -inline ::flyteidl::artifact::ArtifactSpec* CreateArtifactRequest::mutable_spec() { - - if (spec_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::artifact::ArtifactSpec>(GetArenaNoVirtual()); - spec_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.CreateArtifactRequest.spec) - return spec_; -} -inline void CreateArtifactRequest::set_allocated_spec(::flyteidl::artifact::ArtifactSpec* spec) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete spec_; - } - if (spec) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - spec = ::google::protobuf::internal::GetOwnedMessage( - message_arena, spec, submessage_arena); - } - - } else { - - } - spec_ = spec; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.CreateArtifactRequest.spec) -} - -// map partitions = 4; -inline int CreateArtifactRequest::partitions_size() const { - return partitions_.size(); -} -inline void CreateArtifactRequest::clear_partitions() { - partitions_.Clear(); -} -inline const ::google::protobuf::Map< ::std::string, ::std::string >& -CreateArtifactRequest::partitions() const { - // @@protoc_insertion_point(field_map:flyteidl.artifact.CreateArtifactRequest.partitions) - return partitions_.GetMap(); -} -inline ::google::protobuf::Map< ::std::string, ::std::string >* -CreateArtifactRequest::mutable_partitions() { - // @@protoc_insertion_point(field_mutable_map:flyteidl.artifact.CreateArtifactRequest.partitions) - return partitions_.MutableMap(); -} - -// string tag = 5; -inline void CreateArtifactRequest::clear_tag() { - tag_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& CreateArtifactRequest::tag() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.CreateArtifactRequest.tag) - return tag_.GetNoArena(); -} -inline void CreateArtifactRequest::set_tag(const ::std::string& value) { - - tag_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.CreateArtifactRequest.tag) -} -#if LANG_CXX11 -inline void CreateArtifactRequest::set_tag(::std::string&& value) { - - tag_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.CreateArtifactRequest.tag) -} -#endif -inline void CreateArtifactRequest::set_tag(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - tag_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.CreateArtifactRequest.tag) -} -inline void CreateArtifactRequest::set_tag(const char* value, size_t size) { - - tag_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.CreateArtifactRequest.tag) -} -inline ::std::string* CreateArtifactRequest::mutable_tag() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.CreateArtifactRequest.tag) - return tag_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* CreateArtifactRequest::release_tag() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.CreateArtifactRequest.tag) - - return tag_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void CreateArtifactRequest::set_allocated_tag(::std::string* tag) { - if (tag != nullptr) { - - } else { - - } - tag_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), tag); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.CreateArtifactRequest.tag) -} - -// .flyteidl.artifact.ArtifactSource source = 6; -inline bool CreateArtifactRequest::has_source() const { - return this != internal_default_instance() && source_ != nullptr; -} -inline void CreateArtifactRequest::clear_source() { - if (GetArenaNoVirtual() == nullptr && source_ != nullptr) { - delete source_; - } - source_ = nullptr; -} -inline const ::flyteidl::artifact::ArtifactSource& CreateArtifactRequest::source() const { - const ::flyteidl::artifact::ArtifactSource* p = source_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.CreateArtifactRequest.source) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::artifact::_ArtifactSource_default_instance_); -} -inline ::flyteidl::artifact::ArtifactSource* CreateArtifactRequest::release_source() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.CreateArtifactRequest.source) - - ::flyteidl::artifact::ArtifactSource* temp = source_; - source_ = nullptr; - return temp; -} -inline ::flyteidl::artifact::ArtifactSource* CreateArtifactRequest::mutable_source() { - - if (source_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::artifact::ArtifactSource>(GetArenaNoVirtual()); - source_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.CreateArtifactRequest.source) - return source_; -} -inline void CreateArtifactRequest::set_allocated_source(::flyteidl::artifact::ArtifactSource* source) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete source_; - } - if (source) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - source = ::google::protobuf::internal::GetOwnedMessage( - message_arena, source, submessage_arena); - } - - } else { - - } - source_ = source; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.CreateArtifactRequest.source) -} - -// ------------------------------------------------------------------- - -// ArtifactSource - -// .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; -inline bool ArtifactSource::has_workflow_execution() const { - return this != internal_default_instance() && workflow_execution_ != nullptr; -} -inline const ::flyteidl::core::WorkflowExecutionIdentifier& ArtifactSource::workflow_execution() const { - const ::flyteidl::core::WorkflowExecutionIdentifier* p = workflow_execution_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSource.workflow_execution) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_WorkflowExecutionIdentifier_default_instance_); -} -inline ::flyteidl::core::WorkflowExecutionIdentifier* ArtifactSource::release_workflow_execution() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSource.workflow_execution) - - ::flyteidl::core::WorkflowExecutionIdentifier* temp = workflow_execution_; - workflow_execution_ = nullptr; - return temp; -} -inline ::flyteidl::core::WorkflowExecutionIdentifier* ArtifactSource::mutable_workflow_execution() { - - if (workflow_execution_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::WorkflowExecutionIdentifier>(GetArenaNoVirtual()); - workflow_execution_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSource.workflow_execution) - return workflow_execution_; -} -inline void ArtifactSource::set_allocated_workflow_execution(::flyteidl::core::WorkflowExecutionIdentifier* workflow_execution) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(workflow_execution_); - } - if (workflow_execution) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - workflow_execution = ::google::protobuf::internal::GetOwnedMessage( - message_arena, workflow_execution, submessage_arena); - } - - } else { - - } - workflow_execution_ = workflow_execution; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSource.workflow_execution) -} - -// string node_id = 2; -inline void ArtifactSource::clear_node_id() { - node_id_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& ArtifactSource::node_id() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSource.node_id) - return node_id_.GetNoArena(); -} -inline void ArtifactSource::set_node_id(const ::std::string& value) { - - node_id_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.ArtifactSource.node_id) -} -#if LANG_CXX11 -inline void ArtifactSource::set_node_id(::std::string&& value) { - - node_id_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.ArtifactSource.node_id) -} -#endif -inline void ArtifactSource::set_node_id(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - node_id_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.ArtifactSource.node_id) -} -inline void ArtifactSource::set_node_id(const char* value, size_t size) { - - node_id_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.ArtifactSource.node_id) -} -inline ::std::string* ArtifactSource::mutable_node_id() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSource.node_id) - return node_id_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* ArtifactSource::release_node_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSource.node_id) - - return node_id_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void ArtifactSource::set_allocated_node_id(::std::string* node_id) { - if (node_id != nullptr) { - - } else { - - } - node_id_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), node_id); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSource.node_id) -} - -// .flyteidl.core.Identifier task_id = 3; -inline bool ArtifactSource::has_task_id() const { - return this != internal_default_instance() && task_id_ != nullptr; -} -inline const ::flyteidl::core::Identifier& ArtifactSource::task_id() const { - const ::flyteidl::core::Identifier* p = task_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSource.task_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_Identifier_default_instance_); -} -inline ::flyteidl::core::Identifier* ArtifactSource::release_task_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSource.task_id) - - ::flyteidl::core::Identifier* temp = task_id_; - task_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::Identifier* ArtifactSource::mutable_task_id() { - - if (task_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::Identifier>(GetArenaNoVirtual()); - task_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSource.task_id) - return task_id_; -} -inline void ArtifactSource::set_allocated_task_id(::flyteidl::core::Identifier* task_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(task_id_); - } - if (task_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - task_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, task_id, submessage_arena); - } - - } else { - - } - task_id_ = task_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSource.task_id) -} - -// uint32 retry_attempt = 4; -inline void ArtifactSource::clear_retry_attempt() { - retry_attempt_ = 0u; -} -inline ::google::protobuf::uint32 ArtifactSource::retry_attempt() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSource.retry_attempt) - return retry_attempt_; -} -inline void ArtifactSource::set_retry_attempt(::google::protobuf::uint32 value) { - - retry_attempt_ = value; - // @@protoc_insertion_point(field_set:flyteidl.artifact.ArtifactSource.retry_attempt) -} - -// string principal = 5; -inline void ArtifactSource::clear_principal() { - principal_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& ArtifactSource::principal() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSource.principal) - return principal_.GetNoArena(); -} -inline void ArtifactSource::set_principal(const ::std::string& value) { - - principal_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.ArtifactSource.principal) -} -#if LANG_CXX11 -inline void ArtifactSource::set_principal(::std::string&& value) { - - principal_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.ArtifactSource.principal) -} -#endif -inline void ArtifactSource::set_principal(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - principal_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.ArtifactSource.principal) -} -inline void ArtifactSource::set_principal(const char* value, size_t size) { - - principal_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.ArtifactSource.principal) -} -inline ::std::string* ArtifactSource::mutable_principal() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSource.principal) - return principal_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* ArtifactSource::release_principal() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSource.principal) - - return principal_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void ArtifactSource::set_allocated_principal(::std::string* principal) { - if (principal != nullptr) { - - } else { - - } - principal_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), principal); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSource.principal) -} - -// ------------------------------------------------------------------- - -// ArtifactSpec - -// .flyteidl.core.Literal value = 1; -inline bool ArtifactSpec::has_value() const { - return this != internal_default_instance() && value_ != nullptr; -} -inline const ::flyteidl::core::Literal& ArtifactSpec::value() const { - const ::flyteidl::core::Literal* p = value_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSpec.value) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_Literal_default_instance_); -} -inline ::flyteidl::core::Literal* ArtifactSpec::release_value() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSpec.value) - - ::flyteidl::core::Literal* temp = value_; - value_ = nullptr; - return temp; -} -inline ::flyteidl::core::Literal* ArtifactSpec::mutable_value() { - - if (value_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::Literal>(GetArenaNoVirtual()); - value_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSpec.value) - return value_; -} -inline void ArtifactSpec::set_allocated_value(::flyteidl::core::Literal* value) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(value_); - } - if (value) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - value = ::google::protobuf::internal::GetOwnedMessage( - message_arena, value, submessage_arena); - } - - } else { - - } - value_ = value; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSpec.value) -} - -// .flyteidl.core.LiteralType type = 2; -inline bool ArtifactSpec::has_type() const { - return this != internal_default_instance() && type_ != nullptr; -} -inline const ::flyteidl::core::LiteralType& ArtifactSpec::type() const { - const ::flyteidl::core::LiteralType* p = type_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSpec.type) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_LiteralType_default_instance_); -} -inline ::flyteidl::core::LiteralType* ArtifactSpec::release_type() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSpec.type) - - ::flyteidl::core::LiteralType* temp = type_; - type_ = nullptr; - return temp; -} -inline ::flyteidl::core::LiteralType* ArtifactSpec::mutable_type() { - - if (type_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::LiteralType>(GetArenaNoVirtual()); - type_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSpec.type) - return type_; -} -inline void ArtifactSpec::set_allocated_type(::flyteidl::core::LiteralType* type) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(type_); - } - if (type) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - type = ::google::protobuf::internal::GetOwnedMessage( - message_arena, type, submessage_arena); - } - - } else { - - } - type_ = type; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSpec.type) -} - -// string short_description = 3; -inline void ArtifactSpec::clear_short_description() { - short_description_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& ArtifactSpec::short_description() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSpec.short_description) - return short_description_.GetNoArena(); -} -inline void ArtifactSpec::set_short_description(const ::std::string& value) { - - short_description_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.ArtifactSpec.short_description) -} -#if LANG_CXX11 -inline void ArtifactSpec::set_short_description(::std::string&& value) { - - short_description_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.ArtifactSpec.short_description) -} -#endif -inline void ArtifactSpec::set_short_description(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - short_description_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.ArtifactSpec.short_description) -} -inline void ArtifactSpec::set_short_description(const char* value, size_t size) { - - short_description_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.ArtifactSpec.short_description) -} -inline ::std::string* ArtifactSpec::mutable_short_description() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSpec.short_description) - return short_description_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* ArtifactSpec::release_short_description() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSpec.short_description) - - return short_description_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void ArtifactSpec::set_allocated_short_description(::std::string* short_description) { - if (short_description != nullptr) { - - } else { - - } - short_description_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), short_description); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSpec.short_description) -} - -// .google.protobuf.Any user_metadata = 4; -inline bool ArtifactSpec::has_user_metadata() const { - return this != internal_default_instance() && user_metadata_ != nullptr; -} -inline const ::google::protobuf::Any& ArtifactSpec::user_metadata() const { - const ::google::protobuf::Any* p = user_metadata_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSpec.user_metadata) - return p != nullptr ? *p : *reinterpret_cast( - &::google::protobuf::_Any_default_instance_); -} -inline ::google::protobuf::Any* ArtifactSpec::release_user_metadata() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSpec.user_metadata) - - ::google::protobuf::Any* temp = user_metadata_; - user_metadata_ = nullptr; - return temp; -} -inline ::google::protobuf::Any* ArtifactSpec::mutable_user_metadata() { - - if (user_metadata_ == nullptr) { - auto* p = CreateMaybeMessage<::google::protobuf::Any>(GetArenaNoVirtual()); - user_metadata_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSpec.user_metadata) - return user_metadata_; -} -inline void ArtifactSpec::set_allocated_user_metadata(::google::protobuf::Any* user_metadata) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(user_metadata_); - } - if (user_metadata) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - user_metadata = ::google::protobuf::internal::GetOwnedMessage( - message_arena, user_metadata, submessage_arena); - } - - } else { - - } - user_metadata_ = user_metadata; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSpec.user_metadata) -} - -// string metadata_type = 5; -inline void ArtifactSpec::clear_metadata_type() { - metadata_type_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& ArtifactSpec::metadata_type() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactSpec.metadata_type) - return metadata_type_.GetNoArena(); -} -inline void ArtifactSpec::set_metadata_type(const ::std::string& value) { - - metadata_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.ArtifactSpec.metadata_type) -} -#if LANG_CXX11 -inline void ArtifactSpec::set_metadata_type(::std::string&& value) { - - metadata_type_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.ArtifactSpec.metadata_type) -} -#endif -inline void ArtifactSpec::set_metadata_type(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - metadata_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.ArtifactSpec.metadata_type) -} -inline void ArtifactSpec::set_metadata_type(const char* value, size_t size) { - - metadata_type_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.ArtifactSpec.metadata_type) -} -inline ::std::string* ArtifactSpec::mutable_metadata_type() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactSpec.metadata_type) - return metadata_type_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* ArtifactSpec::release_metadata_type() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactSpec.metadata_type) - - return metadata_type_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void ArtifactSpec::set_allocated_metadata_type(::std::string* metadata_type) { - if (metadata_type != nullptr) { - - } else { - - } - metadata_type_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), metadata_type); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactSpec.metadata_type) -} - -// ------------------------------------------------------------------- - -// CreateArtifactResponse - -// .flyteidl.artifact.Artifact artifact = 1; -inline bool CreateArtifactResponse::has_artifact() const { - return this != internal_default_instance() && artifact_ != nullptr; -} -inline void CreateArtifactResponse::clear_artifact() { - if (GetArenaNoVirtual() == nullptr && artifact_ != nullptr) { - delete artifact_; - } - artifact_ = nullptr; -} -inline const ::flyteidl::artifact::Artifact& CreateArtifactResponse::artifact() const { - const ::flyteidl::artifact::Artifact* p = artifact_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.CreateArtifactResponse.artifact) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::artifact::_Artifact_default_instance_); -} -inline ::flyteidl::artifact::Artifact* CreateArtifactResponse::release_artifact() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.CreateArtifactResponse.artifact) - - ::flyteidl::artifact::Artifact* temp = artifact_; - artifact_ = nullptr; - return temp; -} -inline ::flyteidl::artifact::Artifact* CreateArtifactResponse::mutable_artifact() { - - if (artifact_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::artifact::Artifact>(GetArenaNoVirtual()); - artifact_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.CreateArtifactResponse.artifact) - return artifact_; -} -inline void CreateArtifactResponse::set_allocated_artifact(::flyteidl::artifact::Artifact* artifact) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete artifact_; - } - if (artifact) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - artifact = ::google::protobuf::internal::GetOwnedMessage( - message_arena, artifact, submessage_arena); - } - - } else { - - } - artifact_ = artifact; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.CreateArtifactResponse.artifact) -} - -// ------------------------------------------------------------------- - -// GetArtifactRequest - -// .flyteidl.core.ArtifactQuery query = 1; -inline bool GetArtifactRequest::has_query() const { - return this != internal_default_instance() && query_ != nullptr; -} -inline const ::flyteidl::core::ArtifactQuery& GetArtifactRequest::query() const { - const ::flyteidl::core::ArtifactQuery* p = query_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.GetArtifactRequest.query) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_ArtifactQuery_default_instance_); -} -inline ::flyteidl::core::ArtifactQuery* GetArtifactRequest::release_query() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.GetArtifactRequest.query) - - ::flyteidl::core::ArtifactQuery* temp = query_; - query_ = nullptr; - return temp; -} -inline ::flyteidl::core::ArtifactQuery* GetArtifactRequest::mutable_query() { - - if (query_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::ArtifactQuery>(GetArenaNoVirtual()); - query_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.GetArtifactRequest.query) - return query_; -} -inline void GetArtifactRequest::set_allocated_query(::flyteidl::core::ArtifactQuery* query) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(query_); - } - if (query) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - query = ::google::protobuf::internal::GetOwnedMessage( - message_arena, query, submessage_arena); - } - - } else { - - } - query_ = query; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.GetArtifactRequest.query) -} - -// bool details = 2; -inline void GetArtifactRequest::clear_details() { - details_ = false; -} -inline bool GetArtifactRequest::details() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.GetArtifactRequest.details) - return details_; -} -inline void GetArtifactRequest::set_details(bool value) { - - details_ = value; - // @@protoc_insertion_point(field_set:flyteidl.artifact.GetArtifactRequest.details) -} - -// ------------------------------------------------------------------- - -// GetArtifactResponse - -// .flyteidl.artifact.Artifact artifact = 1; -inline bool GetArtifactResponse::has_artifact() const { - return this != internal_default_instance() && artifact_ != nullptr; -} -inline void GetArtifactResponse::clear_artifact() { - if (GetArenaNoVirtual() == nullptr && artifact_ != nullptr) { - delete artifact_; - } - artifact_ = nullptr; -} -inline const ::flyteidl::artifact::Artifact& GetArtifactResponse::artifact() const { - const ::flyteidl::artifact::Artifact* p = artifact_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.GetArtifactResponse.artifact) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::artifact::_Artifact_default_instance_); -} -inline ::flyteidl::artifact::Artifact* GetArtifactResponse::release_artifact() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.GetArtifactResponse.artifact) - - ::flyteidl::artifact::Artifact* temp = artifact_; - artifact_ = nullptr; - return temp; -} -inline ::flyteidl::artifact::Artifact* GetArtifactResponse::mutable_artifact() { - - if (artifact_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::artifact::Artifact>(GetArenaNoVirtual()); - artifact_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.GetArtifactResponse.artifact) - return artifact_; -} -inline void GetArtifactResponse::set_allocated_artifact(::flyteidl::artifact::Artifact* artifact) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete artifact_; - } - if (artifact) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - artifact = ::google::protobuf::internal::GetOwnedMessage( - message_arena, artifact, submessage_arena); - } - - } else { - - } - artifact_ = artifact; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.GetArtifactResponse.artifact) -} - -// ------------------------------------------------------------------- - -// SearchOptions - -// bool strict_partitions = 1; -inline void SearchOptions::clear_strict_partitions() { - strict_partitions_ = false; -} -inline bool SearchOptions::strict_partitions() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchOptions.strict_partitions) - return strict_partitions_; -} -inline void SearchOptions::set_strict_partitions(bool value) { - - strict_partitions_ = value; - // @@protoc_insertion_point(field_set:flyteidl.artifact.SearchOptions.strict_partitions) -} - -// bool latest_by_key = 2; -inline void SearchOptions::clear_latest_by_key() { - latest_by_key_ = false; -} -inline bool SearchOptions::latest_by_key() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchOptions.latest_by_key) - return latest_by_key_; -} -inline void SearchOptions::set_latest_by_key(bool value) { - - latest_by_key_ = value; - // @@protoc_insertion_point(field_set:flyteidl.artifact.SearchOptions.latest_by_key) -} - -// ------------------------------------------------------------------- - -// SearchArtifactsRequest - -// .flyteidl.core.ArtifactKey artifact_key = 1; -inline bool SearchArtifactsRequest::has_artifact_key() const { - return this != internal_default_instance() && artifact_key_ != nullptr; -} -inline const ::flyteidl::core::ArtifactKey& SearchArtifactsRequest::artifact_key() const { - const ::flyteidl::core::ArtifactKey* p = artifact_key_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsRequest.artifact_key) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_ArtifactKey_default_instance_); -} -inline ::flyteidl::core::ArtifactKey* SearchArtifactsRequest::release_artifact_key() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.SearchArtifactsRequest.artifact_key) - - ::flyteidl::core::ArtifactKey* temp = artifact_key_; - artifact_key_ = nullptr; - return temp; -} -inline ::flyteidl::core::ArtifactKey* SearchArtifactsRequest::mutable_artifact_key() { - - if (artifact_key_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::ArtifactKey>(GetArenaNoVirtual()); - artifact_key_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsRequest.artifact_key) - return artifact_key_; -} -inline void SearchArtifactsRequest::set_allocated_artifact_key(::flyteidl::core::ArtifactKey* artifact_key) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(artifact_key_); - } - if (artifact_key) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - artifact_key = ::google::protobuf::internal::GetOwnedMessage( - message_arena, artifact_key, submessage_arena); - } - - } else { - - } - artifact_key_ = artifact_key; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.SearchArtifactsRequest.artifact_key) -} - -// .flyteidl.core.Partitions partitions = 2; -inline bool SearchArtifactsRequest::has_partitions() const { - return this != internal_default_instance() && partitions_ != nullptr; -} -inline const ::flyteidl::core::Partitions& SearchArtifactsRequest::partitions() const { - const ::flyteidl::core::Partitions* p = partitions_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsRequest.partitions) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_Partitions_default_instance_); -} -inline ::flyteidl::core::Partitions* SearchArtifactsRequest::release_partitions() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.SearchArtifactsRequest.partitions) - - ::flyteidl::core::Partitions* temp = partitions_; - partitions_ = nullptr; - return temp; -} -inline ::flyteidl::core::Partitions* SearchArtifactsRequest::mutable_partitions() { - - if (partitions_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::Partitions>(GetArenaNoVirtual()); - partitions_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsRequest.partitions) - return partitions_; -} -inline void SearchArtifactsRequest::set_allocated_partitions(::flyteidl::core::Partitions* partitions) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(partitions_); - } - if (partitions) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - partitions = ::google::protobuf::internal::GetOwnedMessage( - message_arena, partitions, submessage_arena); - } - - } else { - - } - partitions_ = partitions; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.SearchArtifactsRequest.partitions) -} - -// string principal = 3; -inline void SearchArtifactsRequest::clear_principal() { - principal_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& SearchArtifactsRequest::principal() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsRequest.principal) - return principal_.GetNoArena(); -} -inline void SearchArtifactsRequest::set_principal(const ::std::string& value) { - - principal_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.SearchArtifactsRequest.principal) -} -#if LANG_CXX11 -inline void SearchArtifactsRequest::set_principal(::std::string&& value) { - - principal_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.SearchArtifactsRequest.principal) -} -#endif -inline void SearchArtifactsRequest::set_principal(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - principal_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.SearchArtifactsRequest.principal) -} -inline void SearchArtifactsRequest::set_principal(const char* value, size_t size) { - - principal_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.SearchArtifactsRequest.principal) -} -inline ::std::string* SearchArtifactsRequest::mutable_principal() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsRequest.principal) - return principal_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* SearchArtifactsRequest::release_principal() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.SearchArtifactsRequest.principal) - - return principal_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void SearchArtifactsRequest::set_allocated_principal(::std::string* principal) { - if (principal != nullptr) { - - } else { - - } - principal_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), principal); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.SearchArtifactsRequest.principal) -} - -// string version = 4; -inline void SearchArtifactsRequest::clear_version() { - version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& SearchArtifactsRequest::version() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsRequest.version) - return version_.GetNoArena(); -} -inline void SearchArtifactsRequest::set_version(const ::std::string& value) { - - version_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.SearchArtifactsRequest.version) -} -#if LANG_CXX11 -inline void SearchArtifactsRequest::set_version(::std::string&& value) { - - version_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.SearchArtifactsRequest.version) -} -#endif -inline void SearchArtifactsRequest::set_version(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - version_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.SearchArtifactsRequest.version) -} -inline void SearchArtifactsRequest::set_version(const char* value, size_t size) { - - version_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.SearchArtifactsRequest.version) -} -inline ::std::string* SearchArtifactsRequest::mutable_version() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsRequest.version) - return version_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* SearchArtifactsRequest::release_version() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.SearchArtifactsRequest.version) - - return version_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void SearchArtifactsRequest::set_allocated_version(::std::string* version) { - if (version != nullptr) { - - } else { - - } - version_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), version); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.SearchArtifactsRequest.version) -} - -// .flyteidl.artifact.SearchOptions options = 5; -inline bool SearchArtifactsRequest::has_options() const { - return this != internal_default_instance() && options_ != nullptr; -} -inline void SearchArtifactsRequest::clear_options() { - if (GetArenaNoVirtual() == nullptr && options_ != nullptr) { - delete options_; - } - options_ = nullptr; -} -inline const ::flyteidl::artifact::SearchOptions& SearchArtifactsRequest::options() const { - const ::flyteidl::artifact::SearchOptions* p = options_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsRequest.options) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::artifact::_SearchOptions_default_instance_); -} -inline ::flyteidl::artifact::SearchOptions* SearchArtifactsRequest::release_options() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.SearchArtifactsRequest.options) - - ::flyteidl::artifact::SearchOptions* temp = options_; - options_ = nullptr; - return temp; -} -inline ::flyteidl::artifact::SearchOptions* SearchArtifactsRequest::mutable_options() { - - if (options_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::artifact::SearchOptions>(GetArenaNoVirtual()); - options_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsRequest.options) - return options_; -} -inline void SearchArtifactsRequest::set_allocated_options(::flyteidl::artifact::SearchOptions* options) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete options_; - } - if (options) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - options = ::google::protobuf::internal::GetOwnedMessage( - message_arena, options, submessage_arena); - } - - } else { - - } - options_ = options; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.SearchArtifactsRequest.options) -} - -// string token = 6; -inline void SearchArtifactsRequest::clear_token() { - token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& SearchArtifactsRequest::token() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsRequest.token) - return token_.GetNoArena(); -} -inline void SearchArtifactsRequest::set_token(const ::std::string& value) { - - token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.SearchArtifactsRequest.token) -} -#if LANG_CXX11 -inline void SearchArtifactsRequest::set_token(::std::string&& value) { - - token_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.SearchArtifactsRequest.token) -} -#endif -inline void SearchArtifactsRequest::set_token(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.SearchArtifactsRequest.token) -} -inline void SearchArtifactsRequest::set_token(const char* value, size_t size) { - - token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.SearchArtifactsRequest.token) -} -inline ::std::string* SearchArtifactsRequest::mutable_token() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsRequest.token) - return token_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* SearchArtifactsRequest::release_token() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.SearchArtifactsRequest.token) - - return token_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void SearchArtifactsRequest::set_allocated_token(::std::string* token) { - if (token != nullptr) { - - } else { - - } - token_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), token); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.SearchArtifactsRequest.token) -} - -// int32 limit = 7; -inline void SearchArtifactsRequest::clear_limit() { - limit_ = 0; -} -inline ::google::protobuf::int32 SearchArtifactsRequest::limit() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsRequest.limit) - return limit_; -} -inline void SearchArtifactsRequest::set_limit(::google::protobuf::int32 value) { - - limit_ = value; - // @@protoc_insertion_point(field_set:flyteidl.artifact.SearchArtifactsRequest.limit) -} - -// ------------------------------------------------------------------- - -// SearchArtifactsResponse - -// repeated .flyteidl.artifact.Artifact artifacts = 1; -inline int SearchArtifactsResponse::artifacts_size() const { - return artifacts_.size(); -} -inline void SearchArtifactsResponse::clear_artifacts() { - artifacts_.Clear(); -} -inline ::flyteidl::artifact::Artifact* SearchArtifactsResponse::mutable_artifacts(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsResponse.artifacts) - return artifacts_.Mutable(index); -} -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::Artifact >* -SearchArtifactsResponse::mutable_artifacts() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.artifact.SearchArtifactsResponse.artifacts) - return &artifacts_; -} -inline const ::flyteidl::artifact::Artifact& SearchArtifactsResponse::artifacts(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsResponse.artifacts) - return artifacts_.Get(index); -} -inline ::flyteidl::artifact::Artifact* SearchArtifactsResponse::add_artifacts() { - // @@protoc_insertion_point(field_add:flyteidl.artifact.SearchArtifactsResponse.artifacts) - return artifacts_.Add(); -} -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::Artifact >& -SearchArtifactsResponse::artifacts() const { - // @@protoc_insertion_point(field_list:flyteidl.artifact.SearchArtifactsResponse.artifacts) - return artifacts_; -} - -// string token = 2; -inline void SearchArtifactsResponse::clear_token() { - token_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& SearchArtifactsResponse::token() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.SearchArtifactsResponse.token) - return token_.GetNoArena(); -} -inline void SearchArtifactsResponse::set_token(const ::std::string& value) { - - token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.SearchArtifactsResponse.token) -} -#if LANG_CXX11 -inline void SearchArtifactsResponse::set_token(::std::string&& value) { - - token_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.SearchArtifactsResponse.token) -} -#endif -inline void SearchArtifactsResponse::set_token(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.SearchArtifactsResponse.token) -} -inline void SearchArtifactsResponse::set_token(const char* value, size_t size) { - - token_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.SearchArtifactsResponse.token) -} -inline ::std::string* SearchArtifactsResponse::mutable_token() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.SearchArtifactsResponse.token) - return token_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* SearchArtifactsResponse::release_token() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.SearchArtifactsResponse.token) - - return token_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void SearchArtifactsResponse::set_allocated_token(::std::string* token) { - if (token != nullptr) { - - } else { - - } - token_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), token); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.SearchArtifactsResponse.token) -} - -// ------------------------------------------------------------------- - -// FindByWorkflowExecRequest - -// .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; -inline bool FindByWorkflowExecRequest::has_exec_id() const { - return this != internal_default_instance() && exec_id_ != nullptr; -} -inline const ::flyteidl::core::WorkflowExecutionIdentifier& FindByWorkflowExecRequest::exec_id() const { - const ::flyteidl::core::WorkflowExecutionIdentifier* p = exec_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.FindByWorkflowExecRequest.exec_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_WorkflowExecutionIdentifier_default_instance_); -} -inline ::flyteidl::core::WorkflowExecutionIdentifier* FindByWorkflowExecRequest::release_exec_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.FindByWorkflowExecRequest.exec_id) - - ::flyteidl::core::WorkflowExecutionIdentifier* temp = exec_id_; - exec_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::WorkflowExecutionIdentifier* FindByWorkflowExecRequest::mutable_exec_id() { - - if (exec_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::WorkflowExecutionIdentifier>(GetArenaNoVirtual()); - exec_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.FindByWorkflowExecRequest.exec_id) - return exec_id_; -} -inline void FindByWorkflowExecRequest::set_allocated_exec_id(::flyteidl::core::WorkflowExecutionIdentifier* exec_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(exec_id_); - } - if (exec_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - exec_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, exec_id, submessage_arena); - } - - } else { - - } - exec_id_ = exec_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.FindByWorkflowExecRequest.exec_id) -} - -// .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; -inline void FindByWorkflowExecRequest::clear_direction() { - direction_ = 0; -} -inline ::flyteidl::artifact::FindByWorkflowExecRequest_Direction FindByWorkflowExecRequest::direction() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.FindByWorkflowExecRequest.direction) - return static_cast< ::flyteidl::artifact::FindByWorkflowExecRequest_Direction >(direction_); -} -inline void FindByWorkflowExecRequest::set_direction(::flyteidl::artifact::FindByWorkflowExecRequest_Direction value) { - - direction_ = value; - // @@protoc_insertion_point(field_set:flyteidl.artifact.FindByWorkflowExecRequest.direction) -} - -// ------------------------------------------------------------------- - -// AddTagRequest - -// .flyteidl.core.ArtifactID artifact_id = 1; -inline bool AddTagRequest::has_artifact_id() const { - return this != internal_default_instance() && artifact_id_ != nullptr; -} -inline const ::flyteidl::core::ArtifactID& AddTagRequest::artifact_id() const { - const ::flyteidl::core::ArtifactID* p = artifact_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.AddTagRequest.artifact_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_ArtifactID_default_instance_); -} -inline ::flyteidl::core::ArtifactID* AddTagRequest::release_artifact_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.AddTagRequest.artifact_id) - - ::flyteidl::core::ArtifactID* temp = artifact_id_; - artifact_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::ArtifactID* AddTagRequest::mutable_artifact_id() { - - if (artifact_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::ArtifactID>(GetArenaNoVirtual()); - artifact_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.AddTagRequest.artifact_id) - return artifact_id_; -} -inline void AddTagRequest::set_allocated_artifact_id(::flyteidl::core::ArtifactID* artifact_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(artifact_id_); - } - if (artifact_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - artifact_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, artifact_id, submessage_arena); - } - - } else { - - } - artifact_id_ = artifact_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.AddTagRequest.artifact_id) -} - -// string value = 2; -inline void AddTagRequest::clear_value() { - value_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline const ::std::string& AddTagRequest::value() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.AddTagRequest.value) - return value_.GetNoArena(); -} -inline void AddTagRequest::set_value(const ::std::string& value) { - - value_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set:flyteidl.artifact.AddTagRequest.value) -} -#if LANG_CXX11 -inline void AddTagRequest::set_value(::std::string&& value) { - - value_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); - // @@protoc_insertion_point(field_set_rvalue:flyteidl.artifact.AddTagRequest.value) -} -#endif -inline void AddTagRequest::set_value(const char* value) { - GOOGLE_DCHECK(value != nullptr); - - value_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); - // @@protoc_insertion_point(field_set_char:flyteidl.artifact.AddTagRequest.value) -} -inline void AddTagRequest::set_value(const char* value, size_t size) { - - value_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); - // @@protoc_insertion_point(field_set_pointer:flyteidl.artifact.AddTagRequest.value) -} -inline ::std::string* AddTagRequest::mutable_value() { - - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.AddTagRequest.value) - return value_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline ::std::string* AddTagRequest::release_value() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.AddTagRequest.value) - - return value_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); -} -inline void AddTagRequest::set_allocated_value(::std::string* value) { - if (value != nullptr) { - - } else { - - } - value_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.AddTagRequest.value) -} - -// bool overwrite = 3; -inline void AddTagRequest::clear_overwrite() { - overwrite_ = false; -} -inline bool AddTagRequest::overwrite() const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.AddTagRequest.overwrite) - return overwrite_; -} -inline void AddTagRequest::set_overwrite(bool value) { - - overwrite_ = value; - // @@protoc_insertion_point(field_set:flyteidl.artifact.AddTagRequest.overwrite) -} - -// ------------------------------------------------------------------- - -// AddTagResponse - -// ------------------------------------------------------------------- - -// CreateTriggerRequest - -// .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; -inline bool CreateTriggerRequest::has_trigger_launch_plan() const { - return this != internal_default_instance() && trigger_launch_plan_ != nullptr; -} -inline const ::flyteidl::admin::LaunchPlan& CreateTriggerRequest::trigger_launch_plan() const { - const ::flyteidl::admin::LaunchPlan* p = trigger_launch_plan_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.CreateTriggerRequest.trigger_launch_plan) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::admin::_LaunchPlan_default_instance_); -} -inline ::flyteidl::admin::LaunchPlan* CreateTriggerRequest::release_trigger_launch_plan() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.CreateTriggerRequest.trigger_launch_plan) - - ::flyteidl::admin::LaunchPlan* temp = trigger_launch_plan_; - trigger_launch_plan_ = nullptr; - return temp; -} -inline ::flyteidl::admin::LaunchPlan* CreateTriggerRequest::mutable_trigger_launch_plan() { - - if (trigger_launch_plan_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::admin::LaunchPlan>(GetArenaNoVirtual()); - trigger_launch_plan_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.CreateTriggerRequest.trigger_launch_plan) - return trigger_launch_plan_; -} -inline void CreateTriggerRequest::set_allocated_trigger_launch_plan(::flyteidl::admin::LaunchPlan* trigger_launch_plan) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(trigger_launch_plan_); - } - if (trigger_launch_plan) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - trigger_launch_plan = ::google::protobuf::internal::GetOwnedMessage( - message_arena, trigger_launch_plan, submessage_arena); - } - - } else { - - } - trigger_launch_plan_ = trigger_launch_plan; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.CreateTriggerRequest.trigger_launch_plan) -} - -// ------------------------------------------------------------------- - -// CreateTriggerResponse - -// ------------------------------------------------------------------- - -// DeleteTriggerRequest - -// .flyteidl.core.Identifier trigger_id = 1; -inline bool DeleteTriggerRequest::has_trigger_id() const { - return this != internal_default_instance() && trigger_id_ != nullptr; -} -inline const ::flyteidl::core::Identifier& DeleteTriggerRequest::trigger_id() const { - const ::flyteidl::core::Identifier* p = trigger_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.DeleteTriggerRequest.trigger_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_Identifier_default_instance_); -} -inline ::flyteidl::core::Identifier* DeleteTriggerRequest::release_trigger_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.DeleteTriggerRequest.trigger_id) - - ::flyteidl::core::Identifier* temp = trigger_id_; - trigger_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::Identifier* DeleteTriggerRequest::mutable_trigger_id() { - - if (trigger_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::Identifier>(GetArenaNoVirtual()); - trigger_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.DeleteTriggerRequest.trigger_id) - return trigger_id_; -} -inline void DeleteTriggerRequest::set_allocated_trigger_id(::flyteidl::core::Identifier* trigger_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(trigger_id_); - } - if (trigger_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - trigger_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, trigger_id, submessage_arena); - } - - } else { - - } - trigger_id_ = trigger_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.DeleteTriggerRequest.trigger_id) -} - -// ------------------------------------------------------------------- - -// DeleteTriggerResponse - -// ------------------------------------------------------------------- - -// ArtifactProducer - -// .flyteidl.core.Identifier entity_id = 1; -inline bool ArtifactProducer::has_entity_id() const { - return this != internal_default_instance() && entity_id_ != nullptr; -} -inline const ::flyteidl::core::Identifier& ArtifactProducer::entity_id() const { - const ::flyteidl::core::Identifier* p = entity_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactProducer.entity_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_Identifier_default_instance_); -} -inline ::flyteidl::core::Identifier* ArtifactProducer::release_entity_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactProducer.entity_id) - - ::flyteidl::core::Identifier* temp = entity_id_; - entity_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::Identifier* ArtifactProducer::mutable_entity_id() { - - if (entity_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::Identifier>(GetArenaNoVirtual()); - entity_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactProducer.entity_id) - return entity_id_; -} -inline void ArtifactProducer::set_allocated_entity_id(::flyteidl::core::Identifier* entity_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(entity_id_); - } - if (entity_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - entity_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, entity_id, submessage_arena); - } - - } else { - - } - entity_id_ = entity_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactProducer.entity_id) -} - -// .flyteidl.core.VariableMap outputs = 2; -inline bool ArtifactProducer::has_outputs() const { - return this != internal_default_instance() && outputs_ != nullptr; -} -inline const ::flyteidl::core::VariableMap& ArtifactProducer::outputs() const { - const ::flyteidl::core::VariableMap* p = outputs_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactProducer.outputs) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_VariableMap_default_instance_); -} -inline ::flyteidl::core::VariableMap* ArtifactProducer::release_outputs() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactProducer.outputs) - - ::flyteidl::core::VariableMap* temp = outputs_; - outputs_ = nullptr; - return temp; -} -inline ::flyteidl::core::VariableMap* ArtifactProducer::mutable_outputs() { - - if (outputs_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::VariableMap>(GetArenaNoVirtual()); - outputs_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactProducer.outputs) - return outputs_; -} -inline void ArtifactProducer::set_allocated_outputs(::flyteidl::core::VariableMap* outputs) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(outputs_); - } - if (outputs) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - outputs = ::google::protobuf::internal::GetOwnedMessage( - message_arena, outputs, submessage_arena); - } - - } else { - - } - outputs_ = outputs; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactProducer.outputs) -} - -// ------------------------------------------------------------------- - -// RegisterProducerRequest - -// repeated .flyteidl.artifact.ArtifactProducer producers = 1; -inline int RegisterProducerRequest::producers_size() const { - return producers_.size(); -} -inline void RegisterProducerRequest::clear_producers() { - producers_.Clear(); -} -inline ::flyteidl::artifact::ArtifactProducer* RegisterProducerRequest::mutable_producers(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.RegisterProducerRequest.producers) - return producers_.Mutable(index); -} -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactProducer >* -RegisterProducerRequest::mutable_producers() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.artifact.RegisterProducerRequest.producers) - return &producers_; -} -inline const ::flyteidl::artifact::ArtifactProducer& RegisterProducerRequest::producers(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.RegisterProducerRequest.producers) - return producers_.Get(index); -} -inline ::flyteidl::artifact::ArtifactProducer* RegisterProducerRequest::add_producers() { - // @@protoc_insertion_point(field_add:flyteidl.artifact.RegisterProducerRequest.producers) - return producers_.Add(); -} -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactProducer >& -RegisterProducerRequest::producers() const { - // @@protoc_insertion_point(field_list:flyteidl.artifact.RegisterProducerRequest.producers) - return producers_; -} - -// ------------------------------------------------------------------- - -// ArtifactConsumer - -// .flyteidl.core.Identifier entity_id = 1; -inline bool ArtifactConsumer::has_entity_id() const { - return this != internal_default_instance() && entity_id_ != nullptr; -} -inline const ::flyteidl::core::Identifier& ArtifactConsumer::entity_id() const { - const ::flyteidl::core::Identifier* p = entity_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactConsumer.entity_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_Identifier_default_instance_); -} -inline ::flyteidl::core::Identifier* ArtifactConsumer::release_entity_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactConsumer.entity_id) - - ::flyteidl::core::Identifier* temp = entity_id_; - entity_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::Identifier* ArtifactConsumer::mutable_entity_id() { - - if (entity_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::Identifier>(GetArenaNoVirtual()); - entity_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactConsumer.entity_id) - return entity_id_; -} -inline void ArtifactConsumer::set_allocated_entity_id(::flyteidl::core::Identifier* entity_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(entity_id_); - } - if (entity_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - entity_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, entity_id, submessage_arena); - } - - } else { - - } - entity_id_ = entity_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactConsumer.entity_id) -} - -// .flyteidl.core.ParameterMap inputs = 2; -inline bool ArtifactConsumer::has_inputs() const { - return this != internal_default_instance() && inputs_ != nullptr; -} -inline const ::flyteidl::core::ParameterMap& ArtifactConsumer::inputs() const { - const ::flyteidl::core::ParameterMap* p = inputs_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ArtifactConsumer.inputs) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_ParameterMap_default_instance_); -} -inline ::flyteidl::core::ParameterMap* ArtifactConsumer::release_inputs() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ArtifactConsumer.inputs) - - ::flyteidl::core::ParameterMap* temp = inputs_; - inputs_ = nullptr; - return temp; -} -inline ::flyteidl::core::ParameterMap* ArtifactConsumer::mutable_inputs() { - - if (inputs_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::ParameterMap>(GetArenaNoVirtual()); - inputs_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ArtifactConsumer.inputs) - return inputs_; -} -inline void ArtifactConsumer::set_allocated_inputs(::flyteidl::core::ParameterMap* inputs) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(inputs_); - } - if (inputs) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - inputs = ::google::protobuf::internal::GetOwnedMessage( - message_arena, inputs, submessage_arena); - } - - } else { - - } - inputs_ = inputs; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ArtifactConsumer.inputs) -} - -// ------------------------------------------------------------------- - -// RegisterConsumerRequest - -// repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; -inline int RegisterConsumerRequest::consumers_size() const { - return consumers_.size(); -} -inline void RegisterConsumerRequest::clear_consumers() { - consumers_.Clear(); -} -inline ::flyteidl::artifact::ArtifactConsumer* RegisterConsumerRequest::mutable_consumers(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.RegisterConsumerRequest.consumers) - return consumers_.Mutable(index); -} -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactConsumer >* -RegisterConsumerRequest::mutable_consumers() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.artifact.RegisterConsumerRequest.consumers) - return &consumers_; -} -inline const ::flyteidl::artifact::ArtifactConsumer& RegisterConsumerRequest::consumers(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.RegisterConsumerRequest.consumers) - return consumers_.Get(index); -} -inline ::flyteidl::artifact::ArtifactConsumer* RegisterConsumerRequest::add_consumers() { - // @@protoc_insertion_point(field_add:flyteidl.artifact.RegisterConsumerRequest.consumers) - return consumers_.Add(); -} -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::artifact::ArtifactConsumer >& -RegisterConsumerRequest::consumers() const { - // @@protoc_insertion_point(field_list:flyteidl.artifact.RegisterConsumerRequest.consumers) - return consumers_; -} - -// ------------------------------------------------------------------- - -// RegisterResponse - -// ------------------------------------------------------------------- - -// ExecutionInputsRequest - -// .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; -inline bool ExecutionInputsRequest::has_execution_id() const { - return this != internal_default_instance() && execution_id_ != nullptr; -} -inline const ::flyteidl::core::WorkflowExecutionIdentifier& ExecutionInputsRequest::execution_id() const { - const ::flyteidl::core::WorkflowExecutionIdentifier* p = execution_id_; - // @@protoc_insertion_point(field_get:flyteidl.artifact.ExecutionInputsRequest.execution_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_WorkflowExecutionIdentifier_default_instance_); -} -inline ::flyteidl::core::WorkflowExecutionIdentifier* ExecutionInputsRequest::release_execution_id() { - // @@protoc_insertion_point(field_release:flyteidl.artifact.ExecutionInputsRequest.execution_id) - - ::flyteidl::core::WorkflowExecutionIdentifier* temp = execution_id_; - execution_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::WorkflowExecutionIdentifier* ExecutionInputsRequest::mutable_execution_id() { - - if (execution_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::WorkflowExecutionIdentifier>(GetArenaNoVirtual()); - execution_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ExecutionInputsRequest.execution_id) - return execution_id_; -} -inline void ExecutionInputsRequest::set_allocated_execution_id(::flyteidl::core::WorkflowExecutionIdentifier* execution_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(execution_id_); - } - if (execution_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - execution_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, execution_id, submessage_arena); - } - - } else { - - } - execution_id_ = execution_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.artifact.ExecutionInputsRequest.execution_id) -} - -// repeated .flyteidl.core.ArtifactID inputs = 2; -inline int ExecutionInputsRequest::inputs_size() const { - return inputs_.size(); -} -inline ::flyteidl::core::ArtifactID* ExecutionInputsRequest::mutable_inputs(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.artifact.ExecutionInputsRequest.inputs) - return inputs_.Mutable(index); -} -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >* -ExecutionInputsRequest::mutable_inputs() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.artifact.ExecutionInputsRequest.inputs) - return &inputs_; -} -inline const ::flyteidl::core::ArtifactID& ExecutionInputsRequest::inputs(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.artifact.ExecutionInputsRequest.inputs) - return inputs_.Get(index); -} -inline ::flyteidl::core::ArtifactID* ExecutionInputsRequest::add_inputs() { - // @@protoc_insertion_point(field_add:flyteidl.artifact.ExecutionInputsRequest.inputs) - return inputs_.Add(); -} -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >& -ExecutionInputsRequest::inputs() const { - // @@protoc_insertion_point(field_list:flyteidl.artifact.ExecutionInputsRequest.inputs) - return inputs_; -} - -// ------------------------------------------------------------------- - -// ExecutionInputsResponse - -#ifdef __GNUC__ - #pragma GCC diagnostic pop -#endif // __GNUC__ -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - -// ------------------------------------------------------------------- - - -// @@protoc_insertion_point(namespace_scope) - -} // namespace artifact -} // namespace flyteidl - -namespace google { -namespace protobuf { - -template <> struct is_proto_enum< ::flyteidl::artifact::FindByWorkflowExecRequest_Direction> : ::std::true_type {}; -template <> -inline const EnumDescriptor* GetEnumDescriptor< ::flyteidl::artifact::FindByWorkflowExecRequest_Direction>() { - return ::flyteidl::artifact::FindByWorkflowExecRequest_Direction_descriptor(); -} - -} // namespace protobuf -} // namespace google - -// @@protoc_insertion_point(global_scope) - -#include -#endif // PROTOBUF_INCLUDED_flyteidl_2fartifact_2fartifacts_2eproto diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.cc index 16b41157e4..29b1b207ed 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.cc @@ -21,10 +21,11 @@ extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_InputBindingData_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_Partitions_ValueEntry_DoNotUse_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_Partitions_flyteidl_2fcore_2fartifact_5fid_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_TimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactTag_flyteidl_2fcore_2fartifact_5fid_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_LabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_LabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2ftimestamp_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Timestamp_google_2fprotobuf_2ftimestamp_2eproto; namespace flyteidl { namespace core { class ArtifactKeyDefaultTypeInternal { @@ -34,6 +35,8 @@ class ArtifactKeyDefaultTypeInternal { class ArtifactBindingDataDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; + ::google::protobuf::internal::ArenaStringPtr partition_key_; + bool bind_to_time_partition_; } _ArtifactBindingData_default_instance_; class InputBindingDataDefaultTypeInternal { public: @@ -43,6 +46,7 @@ class LabelValueDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; ::google::protobuf::internal::ArenaStringPtr static_value_; + const ::google::protobuf::Timestamp* time_value_; const ::flyteidl::core::ArtifactBindingData* triggered_binding_; const ::flyteidl::core::InputBindingData* input_binding_; } _LabelValue_default_instance_; @@ -54,10 +58,13 @@ class PartitionsDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; } _Partitions_default_instance_; +class TimePartitionDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _TimePartition_default_instance_; class ArtifactIDDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed _instance; - const ::flyteidl::core::Partitions* partitions_; } _ArtifactID_default_instance_; class ArtifactTagDefaultTypeInternal { public: @@ -71,10 +78,6 @@ class ArtifactQueryDefaultTypeInternal { ::google::protobuf::internal::ArenaStringPtr uri_; const ::flyteidl::core::ArtifactBindingData* binding_; } _ArtifactQuery_default_instance_; -class TriggerDefaultTypeInternal { - public: - ::google::protobuf::internal::ExplicitlyConstructed _instance; -} _Trigger_default_instance_; } // namespace core } // namespace flyteidl static void InitDefaultsArtifactKey_flyteidl_2fcore_2fartifact_5fid_2eproto() { @@ -130,8 +133,9 @@ static void InitDefaultsLabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto() { ::flyteidl::core::LabelValue::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<2> scc_info_LabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsLabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto}, { +::google::protobuf::internal::SCCInfo<3> scc_info_LabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsLabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto}, { + &scc_info_Timestamp_google_2fprotobuf_2ftimestamp_2eproto.base, &scc_info_ArtifactBindingData_flyteidl_2fcore_2fartifact_5fid_2eproto.base, &scc_info_InputBindingData_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; @@ -164,6 +168,21 @@ ::google::protobuf::internal::SCCInfo<1> scc_info_Partitions_flyteidl_2fcore_2fa {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsPartitions_flyteidl_2fcore_2fartifact_5fid_2eproto}, { &scc_info_Partitions_ValueEntry_DoNotUse_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; +static void InitDefaultsTimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::core::_TimePartition_default_instance_; + new (ptr) ::flyteidl::core::TimePartition(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::core::TimePartition::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<1> scc_info_TimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsTimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto}, { + &scc_info_LabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; + static void InitDefaultsArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto() { GOOGLE_PROTOBUF_VERIFY_VERSION; @@ -175,10 +194,11 @@ static void InitDefaultsArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto() { ::flyteidl::core::ArtifactID::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto}, { +::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 3, InitDefaultsArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto}, { &scc_info_ArtifactKey_flyteidl_2fcore_2fartifact_5fid_2eproto.base, - &scc_info_Partitions_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; + &scc_info_Partitions_flyteidl_2fcore_2fartifact_5fid_2eproto.base, + &scc_info_TimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; static void InitDefaultsArtifactTag_flyteidl_2fcore_2fartifact_5fid_2eproto() { GOOGLE_PROTOBUF_VERIFY_VERSION; @@ -213,22 +233,6 @@ ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactQuery_flyteidl_2fcore_ &scc_info_ArtifactTag_flyteidl_2fcore_2fartifact_5fid_2eproto.base, &scc_info_ArtifactBindingData_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; -static void InitDefaultsTrigger_flyteidl_2fcore_2fartifact_5fid_2eproto() { - GOOGLE_PROTOBUF_VERIFY_VERSION; - - { - void* ptr = &::flyteidl::core::_Trigger_default_instance_; - new (ptr) ::flyteidl::core::Trigger(); - ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); - } - ::flyteidl::core::Trigger::InitAsDefaultInstance(); -} - -::google::protobuf::internal::SCCInfo<2> scc_info_Trigger_flyteidl_2fcore_2fartifact_5fid_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 2, InitDefaultsTrigger_flyteidl_2fcore_2fartifact_5fid_2eproto}, { - &scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto.base, - &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base,}}; - void InitDefaults_flyteidl_2fcore_2fartifact_5fid_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_ArtifactKey_flyteidl_2fcore_2fartifact_5fid_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_ArtifactBindingData_flyteidl_2fcore_2fartifact_5fid_2eproto.base); @@ -236,10 +240,10 @@ void InitDefaults_flyteidl_2fcore_2fartifact_5fid_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_LabelValue_flyteidl_2fcore_2fartifact_5fid_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_Partitions_ValueEntry_DoNotUse_flyteidl_2fcore_2fartifact_5fid_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_Partitions_flyteidl_2fcore_2fartifact_5fid_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_TimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_ArtifactTag_flyteidl_2fcore_2fartifact_5fid_2eproto.base); ::google::protobuf::internal::InitSCC(&scc_info_ArtifactQuery_flyteidl_2fcore_2fartifact_5fid_2eproto.base); - ::google::protobuf::internal::InitSCC(&scc_info_Trigger_flyteidl_2fcore_2fartifact_5fid_2eproto.base); } ::google::protobuf::Metadata file_level_metadata_flyteidl_2fcore_2fartifact_5fid_2eproto[10]; @@ -258,11 +262,13 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fartifact_5fid_2ep ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactBindingData, _internal_metadata_), ~0u, // no _extensions_ - ~0u, // no _oneof_case_ + PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactBindingData, _oneof_case_[0]), ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactBindingData, index_), - PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactBindingData, partition_key_), + offsetof(::flyteidl::core::ArtifactBindingDataDefaultTypeInternal, partition_key_), + offsetof(::flyteidl::core::ArtifactBindingDataDefaultTypeInternal, bind_to_time_partition_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactBindingData, transform_), + PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactBindingData, partition_data_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::InputBindingData, _internal_metadata_), ~0u, // no _extensions_ @@ -275,6 +281,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fartifact_5fid_2ep PROTOBUF_FIELD_OFFSET(::flyteidl::core::LabelValue, _oneof_case_[0]), ~0u, // no _weak_field_map_ offsetof(::flyteidl::core::LabelValueDefaultTypeInternal, static_value_), + offsetof(::flyteidl::core::LabelValueDefaultTypeInternal, time_value_), offsetof(::flyteidl::core::LabelValueDefaultTypeInternal, triggered_binding_), offsetof(::flyteidl::core::LabelValueDefaultTypeInternal, input_binding_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::LabelValue, value_), @@ -294,14 +301,20 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fartifact_5fid_2ep ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::Partitions, value_), ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::core::TimePartition, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::core::TimePartition, value_), + ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactID, _internal_metadata_), ~0u, // no _extensions_ - PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactID, _oneof_case_[0]), + ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactID, artifact_key_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactID, version_), - offsetof(::flyteidl::core::ArtifactIDDefaultTypeInternal, partitions_), - PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactID, dimensions_), + PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactID, partitions_), + PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactID, time_partition_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactTag, _internal_metadata_), ~0u, // no _extensions_ @@ -319,25 +332,18 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fartifact_5fid_2ep offsetof(::flyteidl::core::ArtifactQueryDefaultTypeInternal, uri_), offsetof(::flyteidl::core::ArtifactQueryDefaultTypeInternal, binding_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::ArtifactQuery, identifier_), - ~0u, // no _has_bits_ - PROTOBUF_FIELD_OFFSET(::flyteidl::core::Trigger, _internal_metadata_), - ~0u, // no _extensions_ - ~0u, // no _oneof_case_ - ~0u, // no _weak_field_map_ - PROTOBUF_FIELD_OFFSET(::flyteidl::core::Trigger, trigger_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::core::Trigger, triggers_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::core::ArtifactKey)}, { 8, -1, sizeof(::flyteidl::core::ArtifactBindingData)}, - { 16, -1, sizeof(::flyteidl::core::InputBindingData)}, - { 22, -1, sizeof(::flyteidl::core::LabelValue)}, - { 31, 38, sizeof(::flyteidl::core::Partitions_ValueEntry_DoNotUse)}, - { 40, -1, sizeof(::flyteidl::core::Partitions)}, - { 46, -1, sizeof(::flyteidl::core::ArtifactID)}, - { 55, -1, sizeof(::flyteidl::core::ArtifactTag)}, - { 62, -1, sizeof(::flyteidl::core::ArtifactQuery)}, - { 72, -1, sizeof(::flyteidl::core::Trigger)}, + { 18, -1, sizeof(::flyteidl::core::InputBindingData)}, + { 24, -1, sizeof(::flyteidl::core::LabelValue)}, + { 34, 41, sizeof(::flyteidl::core::Partitions_ValueEntry_DoNotUse)}, + { 43, -1, sizeof(::flyteidl::core::Partitions)}, + { 49, -1, sizeof(::flyteidl::core::TimePartition)}, + { 55, -1, sizeof(::flyteidl::core::ArtifactID)}, + { 64, -1, sizeof(::flyteidl::core::ArtifactTag)}, + { 71, -1, sizeof(::flyteidl::core::ArtifactQuery)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -347,10 +353,10 @@ static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast(&::flyteidl::core::_LabelValue_default_instance_), reinterpret_cast(&::flyteidl::core::_Partitions_ValueEntry_DoNotUse_default_instance_), reinterpret_cast(&::flyteidl::core::_Partitions_default_instance_), + reinterpret_cast(&::flyteidl::core::_TimePartition_default_instance_), reinterpret_cast(&::flyteidl::core::_ArtifactID_default_instance_), reinterpret_cast(&::flyteidl::core::_ArtifactTag_default_instance_), reinterpret_cast(&::flyteidl::core::_ArtifactQuery_default_instance_), - reinterpret_cast(&::flyteidl::core::_Trigger_default_instance_), }; ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_flyteidl_2fcore_2fartifact_5fid_2eproto = { @@ -361,48 +367,53 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fcore_2fartifact_5fid_2eproto[] = "\n\037flyteidl/core/artifact_id.proto\022\rflyte" - "idl.core\032\036flyteidl/core/identifier.proto" - "\"<\n\013ArtifactKey\022\017\n\007project\030\001 \001(\t\022\016\n\006doma" - "in\030\002 \001(\t\022\014\n\004name\030\003 \001(\t\"N\n\023ArtifactBindin" - "gData\022\r\n\005index\030\001 \001(\r\022\025\n\rpartition_key\030\002 " - "\001(\t\022\021\n\ttransform\030\003 \001(\t\"\037\n\020InputBindingDa" - "ta\022\013\n\003var\030\001 \001(\t\"\250\001\n\nLabelValue\022\026\n\014static" - "_value\030\001 \001(\tH\000\022\?\n\021triggered_binding\030\002 \001(" - "\0132\".flyteidl.core.ArtifactBindingDataH\000\022" - "8\n\rinput_binding\030\003 \001(\0132\037.flyteidl.core.I" - "nputBindingDataH\000B\007\n\005value\"\212\001\n\nPartition" - "s\0223\n\005value\030\001 \003(\0132$.flyteidl.core.Partiti" - "ons.ValueEntry\032G\n\nValueEntry\022\013\n\003key\030\001 \001(" - "\t\022(\n\005value\030\002 \001(\0132\031.flyteidl.core.LabelVa" - "lue:\0028\001\"\216\001\n\nArtifactID\0220\n\014artifact_key\030\001" - " \001(\0132\032.flyteidl.core.ArtifactKey\022\017\n\007vers" - "ion\030\002 \001(\t\022/\n\npartitions\030\003 \001(\0132\031.flyteidl" - ".core.PartitionsH\000B\014\n\ndimensions\"i\n\013Arti" - "factTag\0220\n\014artifact_key\030\001 \001(\0132\032.flyteidl" - ".core.ArtifactKey\022(\n\005value\030\002 \001(\0132\031.flyte" - "idl.core.LabelValue\"\311\001\n\rArtifactQuery\0220\n" - "\013artifact_id\030\001 \001(\0132\031.flyteidl.core.Artif" - "actIDH\000\0222\n\014artifact_tag\030\002 \001(\0132\032.flyteidl" - ".core.ArtifactTagH\000\022\r\n\003uri\030\003 \001(\tH\000\0225\n\007bi" - "nding\030\004 \001(\0132\".flyteidl.core.ArtifactBind" - "ingDataH\000B\014\n\nidentifier\"e\n\007Trigger\022-\n\ntr" - "igger_id\030\001 \001(\0132\031.flyteidl.core.Identifie" - "r\022+\n\010triggers\030\002 \003(\0132\031.flyteidl.core.Arti" - "factIDB= 1900 const int ArtifactBindingData::kIndexFieldNumber; const int ArtifactBindingData::kPartitionKeyFieldNumber; +const int ArtifactBindingData::kBindToTimePartitionFieldNumber; const int ArtifactBindingData::kTransformFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 @@ -876,24 +891,34 @@ ArtifactBindingData::ArtifactBindingData(const ArtifactBindingData& from) : ::google::protobuf::Message(), _internal_metadata_(nullptr) { _internal_metadata_.MergeFrom(from._internal_metadata_); - partition_key_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.partition_key().size() > 0) { - partition_key_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.partition_key_); - } transform_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (from.transform().size() > 0) { transform_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.transform_); } index_ = from.index_; + clear_has_partition_data(); + switch (from.partition_data_case()) { + case kPartitionKey: { + set_partition_key(from.partition_key()); + break; + } + case kBindToTimePartition: { + set_bind_to_time_partition(from.bind_to_time_partition()); + break; + } + case PARTITION_DATA_NOT_SET: { + break; + } + } // @@protoc_insertion_point(copy_constructor:flyteidl.core.ArtifactBindingData) } void ArtifactBindingData::SharedCtor() { ::google::protobuf::internal::InitSCC( &scc_info_ArtifactBindingData_flyteidl_2fcore_2fartifact_5fid_2eproto.base); - partition_key_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); transform_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); index_ = 0u; + clear_has_partition_data(); } ArtifactBindingData::~ArtifactBindingData() { @@ -902,8 +927,10 @@ ArtifactBindingData::~ArtifactBindingData() { } void ArtifactBindingData::SharedDtor() { - partition_key_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); transform_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (has_partition_data()) { + clear_partition_data(); + } } void ArtifactBindingData::SetCachedSize(int size) const { @@ -915,15 +942,34 @@ const ArtifactBindingData& ArtifactBindingData::default_instance() { } +void ArtifactBindingData::clear_partition_data() { +// @@protoc_insertion_point(one_of_clear_start:flyteidl.core.ArtifactBindingData) + switch (partition_data_case()) { + case kPartitionKey: { + partition_data_.partition_key_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + break; + } + case kBindToTimePartition: { + // No need to clear + break; + } + case PARTITION_DATA_NOT_SET: { + break; + } + } + _oneof_case_[0] = PARTITION_DATA_NOT_SET; +} + + void ArtifactBindingData::Clear() { // @@protoc_insertion_point(message_clear_start:flyteidl.core.ArtifactBindingData) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - partition_key_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); transform_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); index_ = 0u; + clear_partition_data(); _internal_metadata_.Clear(); } @@ -963,9 +1009,16 @@ const char* ArtifactBindingData::_InternalParse(const char* begin, const char* e ptr += size; break; } - // string transform = 3; + // bool bind_to_time_partition = 3; case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; + if (static_cast<::google::protobuf::uint8>(tag) != 24) goto handle_unusual; + msg->set_bind_to_time_partition(::google::protobuf::internal::ReadVarint(&ptr)); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + break; + } + // string transform = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); ctx->extra_parse_data().SetFieldName("flyteidl.core.ArtifactBindingData.transform"); @@ -1041,9 +1094,23 @@ bool ArtifactBindingData::MergePartialFromCodedStream( break; } - // string transform = 3; + // bool bind_to_time_partition = 3; case 3: { - if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { + if (static_cast< ::google::protobuf::uint8>(tag) == (24 & 0xFF)) { + clear_partition_data(); + DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< + bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>( + input, &partition_data_.bind_to_time_partition_))); + set_has_bind_to_time_partition(); + } else { + goto handle_unusual; + } + break; + } + + // string transform = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadString( input, this->mutable_transform())); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( @@ -1089,7 +1156,7 @@ void ArtifactBindingData::SerializeWithCachedSizes( } // string partition_key = 2; - if (this->partition_key().size() > 0) { + if (has_partition_key()) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->partition_key().data(), static_cast(this->partition_key().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, @@ -1098,14 +1165,19 @@ void ArtifactBindingData::SerializeWithCachedSizes( 2, this->partition_key(), output); } - // string transform = 3; + // bool bind_to_time_partition = 3; + if (has_bind_to_time_partition()) { + ::google::protobuf::internal::WireFormatLite::WriteBool(3, this->bind_to_time_partition(), output); + } + + // string transform = 4; if (this->transform().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->transform().data(), static_cast(this->transform().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "flyteidl.core.ArtifactBindingData.transform"); ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 3, this->transform(), output); + 4, this->transform(), output); } if (_internal_metadata_.have_unknown_fields()) { @@ -1127,7 +1199,7 @@ ::google::protobuf::uint8* ArtifactBindingData::InternalSerializeWithCachedSizes } // string partition_key = 2; - if (this->partition_key().size() > 0) { + if (has_partition_key()) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->partition_key().data(), static_cast(this->partition_key().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, @@ -1137,7 +1209,12 @@ ::google::protobuf::uint8* ArtifactBindingData::InternalSerializeWithCachedSizes 2, this->partition_key(), target); } - // string transform = 3; + // bool bind_to_time_partition = 3; + if (has_bind_to_time_partition()) { + target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(3, this->bind_to_time_partition(), target); + } + + // string transform = 4; if (this->transform().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->transform().data(), static_cast(this->transform().length()), @@ -1145,7 +1222,7 @@ ::google::protobuf::uint8* ArtifactBindingData::InternalSerializeWithCachedSizes "flyteidl.core.ArtifactBindingData.transform"); target = ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 3, this->transform(), target); + 4, this->transform(), target); } if (_internal_metadata_.have_unknown_fields()) { @@ -1169,14 +1246,7 @@ size_t ArtifactBindingData::ByteSizeLong() const { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - // string partition_key = 2; - if (this->partition_key().size() > 0) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::StringSize( - this->partition_key()); - } - - // string transform = 3; + // string transform = 4; if (this->transform().size() > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize( @@ -1190,6 +1260,23 @@ size_t ArtifactBindingData::ByteSizeLong() const { this->index()); } + switch (partition_data_case()) { + // string partition_key = 2; + case kPartitionKey: { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->partition_key()); + break; + } + // bool bind_to_time_partition = 3; + case kBindToTimePartition: { + total_size += 1 + 1; + break; + } + case PARTITION_DATA_NOT_SET: { + break; + } + } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -1217,10 +1304,6 @@ void ArtifactBindingData::MergeFrom(const ArtifactBindingData& from) { ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; - if (from.partition_key().size() > 0) { - - partition_key_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.partition_key_); - } if (from.transform().size() > 0) { transform_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.transform_); @@ -1228,6 +1311,19 @@ void ArtifactBindingData::MergeFrom(const ArtifactBindingData& from) { if (from.index() != 0) { set_index(from.index()); } + switch (from.partition_data_case()) { + case kPartitionKey: { + set_partition_key(from.partition_key()); + break; + } + case kBindToTimePartition: { + set_bind_to_time_partition(from.bind_to_time_partition()); + break; + } + case PARTITION_DATA_NOT_SET: { + break; + } + } } void ArtifactBindingData::CopyFrom(const ::google::protobuf::Message& from) { @@ -1255,11 +1351,11 @@ void ArtifactBindingData::Swap(ArtifactBindingData* other) { void ArtifactBindingData::InternalSwap(ArtifactBindingData* other) { using std::swap; _internal_metadata_.Swap(&other->_internal_metadata_); - partition_key_.Swap(&other->partition_key_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), - GetArenaNoVirtual()); transform_.Swap(&other->transform_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); swap(index_, other->index_); + swap(partition_data_, other->partition_data_); + swap(_oneof_case_[0], other->_oneof_case_[0]); } ::google::protobuf::Metadata ArtifactBindingData::GetMetadata() const { @@ -1570,6 +1666,8 @@ ::google::protobuf::Metadata InputBindingData::GetMetadata() const { void LabelValue::InitAsDefaultInstance() { ::flyteidl::core::_LabelValue_default_instance_.static_value_.UnsafeSetDefault( &::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::flyteidl::core::_LabelValue_default_instance_.time_value_ = const_cast< ::google::protobuf::Timestamp*>( + ::google::protobuf::Timestamp::internal_default_instance()); ::flyteidl::core::_LabelValue_default_instance_.triggered_binding_ = const_cast< ::flyteidl::core::ArtifactBindingData*>( ::flyteidl::core::ArtifactBindingData::internal_default_instance()); ::flyteidl::core::_LabelValue_default_instance_.input_binding_ = const_cast< ::flyteidl::core::InputBindingData*>( @@ -1577,10 +1675,15 @@ void LabelValue::InitAsDefaultInstance() { } class LabelValue::HasBitSetters { public: + static const ::google::protobuf::Timestamp& time_value(const LabelValue* msg); static const ::flyteidl::core::ArtifactBindingData& triggered_binding(const LabelValue* msg); static const ::flyteidl::core::InputBindingData& input_binding(const LabelValue* msg); }; +const ::google::protobuf::Timestamp& +LabelValue::HasBitSetters::time_value(const LabelValue* msg) { + return *msg->value_.time_value_; +} const ::flyteidl::core::ArtifactBindingData& LabelValue::HasBitSetters::triggered_binding(const LabelValue* msg) { return *msg->value_.triggered_binding_; @@ -1589,6 +1692,27 @@ const ::flyteidl::core::InputBindingData& LabelValue::HasBitSetters::input_binding(const LabelValue* msg) { return *msg->value_.input_binding_; } +void LabelValue::set_allocated_time_value(::google::protobuf::Timestamp* time_value) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + clear_value(); + if (time_value) { + ::google::protobuf::Arena* submessage_arena = + reinterpret_cast<::google::protobuf::MessageLite*>(time_value)->GetArena(); + if (message_arena != submessage_arena) { + time_value = ::google::protobuf::internal::GetOwnedMessage( + message_arena, time_value, submessage_arena); + } + set_has_time_value(); + value_.time_value_ = time_value; + } + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.LabelValue.time_value) +} +void LabelValue::clear_time_value() { + if (has_time_value()) { + delete value_.time_value_; + clear_has_value(); + } +} void LabelValue::set_allocated_triggered_binding(::flyteidl::core::ArtifactBindingData* triggered_binding) { ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); clear_value(); @@ -1619,6 +1743,7 @@ void LabelValue::set_allocated_input_binding(::flyteidl::core::InputBindingData* } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int LabelValue::kStaticValueFieldNumber; +const int LabelValue::kTimeValueFieldNumber; const int LabelValue::kTriggeredBindingFieldNumber; const int LabelValue::kInputBindingFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 @@ -1638,6 +1763,10 @@ LabelValue::LabelValue(const LabelValue& from) set_static_value(from.static_value()); break; } + case kTimeValue: { + mutable_time_value()->::google::protobuf::Timestamp::MergeFrom(from.time_value()); + break; + } case kTriggeredBinding: { mutable_triggered_binding()->::flyteidl::core::ArtifactBindingData::MergeFrom(from.triggered_binding()); break; @@ -1686,6 +1815,10 @@ void LabelValue::clear_value() { value_.static_value_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); break; } + case kTimeValue: { + delete value_.time_value_; + break; + } case kTriggeredBinding: { delete value_.triggered_binding_; break; @@ -1741,24 +1874,37 @@ const char* LabelValue::_InternalParse(const char* begin, const char* end, void* ptr += size; break; } - // .flyteidl.core.ArtifactBindingData triggered_binding = 2; + // .google.protobuf.Timestamp time_value = 2; case 2: { if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactBindingData::_InternalParse; - object = msg->mutable_triggered_binding(); + parser_till_end = ::google::protobuf::Timestamp::_InternalParse; + object = msg->mutable_time_value(); if (size > end - ptr) goto len_delim_till_end; ptr += size; GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( {parser_till_end, object}, ptr - size, ptr)); break; } - // .flyteidl.core.InputBindingData input_binding = 3; + // .flyteidl.core.ArtifactBindingData triggered_binding = 3; case 3: { if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::ArtifactBindingData::_InternalParse; + object = msg->mutable_triggered_binding(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // .flyteidl.core.InputBindingData input_binding = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); parser_till_end = ::flyteidl::core::InputBindingData::_InternalParse; object = msg->mutable_input_binding(); if (size > end - ptr) goto len_delim_till_end; @@ -1816,20 +1962,31 @@ bool LabelValue::MergePartialFromCodedStream( break; } - // .flyteidl.core.ArtifactBindingData triggered_binding = 2; + // .google.protobuf.Timestamp time_value = 2; case 2: { if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_triggered_binding())); + input, mutable_time_value())); } else { goto handle_unusual; } break; } - // .flyteidl.core.InputBindingData input_binding = 3; + // .flyteidl.core.ArtifactBindingData triggered_binding = 3; case 3: { if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_triggered_binding())); + } else { + goto handle_unusual; + } + break; + } + + // .flyteidl.core.InputBindingData input_binding = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_input_binding())); } else { @@ -1875,16 +2032,22 @@ void LabelValue::SerializeWithCachedSizes( 1, this->static_value(), output); } - // .flyteidl.core.ArtifactBindingData triggered_binding = 2; + // .google.protobuf.Timestamp time_value = 2; + if (has_time_value()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, HasBitSetters::time_value(this), output); + } + + // .flyteidl.core.ArtifactBindingData triggered_binding = 3; if (has_triggered_binding()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::triggered_binding(this), output); + 3, HasBitSetters::triggered_binding(this), output); } - // .flyteidl.core.InputBindingData input_binding = 3; + // .flyteidl.core.InputBindingData input_binding = 4; if (has_input_binding()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 3, HasBitSetters::input_binding(this), output); + 4, HasBitSetters::input_binding(this), output); } if (_internal_metadata_.have_unknown_fields()) { @@ -1911,18 +2074,25 @@ ::google::protobuf::uint8* LabelValue::InternalSerializeWithCachedSizesToArray( 1, this->static_value(), target); } - // .flyteidl.core.ArtifactBindingData triggered_binding = 2; + // .google.protobuf.Timestamp time_value = 2; + if (has_time_value()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 2, HasBitSetters::time_value(this), target); + } + + // .flyteidl.core.ArtifactBindingData triggered_binding = 3; if (has_triggered_binding()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 2, HasBitSetters::triggered_binding(this), target); + 3, HasBitSetters::triggered_binding(this), target); } - // .flyteidl.core.InputBindingData input_binding = 3; + // .flyteidl.core.InputBindingData input_binding = 4; if (has_input_binding()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 3, HasBitSetters::input_binding(this), target); + 4, HasBitSetters::input_binding(this), target); } if (_internal_metadata_.have_unknown_fields()) { @@ -1954,14 +2124,21 @@ size_t LabelValue::ByteSizeLong() const { this->static_value()); break; } - // .flyteidl.core.ArtifactBindingData triggered_binding = 2; + // .google.protobuf.Timestamp time_value = 2; + case kTimeValue: { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *value_.time_value_); + break; + } + // .flyteidl.core.ArtifactBindingData triggered_binding = 3; case kTriggeredBinding: { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *value_.triggered_binding_); break; } - // .flyteidl.core.InputBindingData input_binding = 3; + // .flyteidl.core.InputBindingData input_binding = 4; case kInputBinding: { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( @@ -2004,6 +2181,10 @@ void LabelValue::MergeFrom(const LabelValue& from) { set_static_value(from.static_value()); break; } + case kTimeValue: { + mutable_time_value()->::google::protobuf::Timestamp::MergeFrom(from.time_value()); + break; + } case kTriggeredBinding: { mutable_triggered_binding()->::flyteidl::core::ArtifactBindingData::MergeFrom(from.triggered_binding()); break; @@ -2462,141 +2643,81 @@ ::google::protobuf::Metadata Partitions::GetMetadata() const { // =================================================================== -void ArtifactID::InitAsDefaultInstance() { - ::flyteidl::core::_ArtifactID_default_instance_._instance.get_mutable()->artifact_key_ = const_cast< ::flyteidl::core::ArtifactKey*>( - ::flyteidl::core::ArtifactKey::internal_default_instance()); - ::flyteidl::core::_ArtifactID_default_instance_.partitions_ = const_cast< ::flyteidl::core::Partitions*>( - ::flyteidl::core::Partitions::internal_default_instance()); +void TimePartition::InitAsDefaultInstance() { + ::flyteidl::core::_TimePartition_default_instance_._instance.get_mutable()->value_ = const_cast< ::flyteidl::core::LabelValue*>( + ::flyteidl::core::LabelValue::internal_default_instance()); } -class ArtifactID::HasBitSetters { +class TimePartition::HasBitSetters { public: - static const ::flyteidl::core::ArtifactKey& artifact_key(const ArtifactID* msg); - static const ::flyteidl::core::Partitions& partitions(const ArtifactID* msg); + static const ::flyteidl::core::LabelValue& value(const TimePartition* msg); }; -const ::flyteidl::core::ArtifactKey& -ArtifactID::HasBitSetters::artifact_key(const ArtifactID* msg) { - return *msg->artifact_key_; -} -const ::flyteidl::core::Partitions& -ArtifactID::HasBitSetters::partitions(const ArtifactID* msg) { - return *msg->dimensions_.partitions_; -} -void ArtifactID::set_allocated_partitions(::flyteidl::core::Partitions* partitions) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - clear_dimensions(); - if (partitions) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - partitions = ::google::protobuf::internal::GetOwnedMessage( - message_arena, partitions, submessage_arena); - } - set_has_partitions(); - dimensions_.partitions_ = partitions; - } - // @@protoc_insertion_point(field_set_allocated:flyteidl.core.ArtifactID.partitions) +const ::flyteidl::core::LabelValue& +TimePartition::HasBitSetters::value(const TimePartition* msg) { + return *msg->value_; } #if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int ArtifactID::kArtifactKeyFieldNumber; -const int ArtifactID::kVersionFieldNumber; -const int ArtifactID::kPartitionsFieldNumber; +const int TimePartition::kValueFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 -ArtifactID::ArtifactID() +TimePartition::TimePartition() : ::google::protobuf::Message(), _internal_metadata_(nullptr) { SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.core.ArtifactID) + // @@protoc_insertion_point(constructor:flyteidl.core.TimePartition) } -ArtifactID::ArtifactID(const ArtifactID& from) +TimePartition::TimePartition(const TimePartition& from) : ::google::protobuf::Message(), _internal_metadata_(nullptr) { _internal_metadata_.MergeFrom(from._internal_metadata_); - version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (from.version().size() > 0) { - version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); - } - if (from.has_artifact_key()) { - artifact_key_ = new ::flyteidl::core::ArtifactKey(*from.artifact_key_); + if (from.has_value()) { + value_ = new ::flyteidl::core::LabelValue(*from.value_); } else { - artifact_key_ = nullptr; - } - clear_has_dimensions(); - switch (from.dimensions_case()) { - case kPartitions: { - mutable_partitions()->::flyteidl::core::Partitions::MergeFrom(from.partitions()); - break; - } - case DIMENSIONS_NOT_SET: { - break; - } + value_ = nullptr; } - // @@protoc_insertion_point(copy_constructor:flyteidl.core.ArtifactID) + // @@protoc_insertion_point(copy_constructor:flyteidl.core.TimePartition) } -void ArtifactID::SharedCtor() { +void TimePartition::SharedCtor() { ::google::protobuf::internal::InitSCC( - &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base); - version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - artifact_key_ = nullptr; - clear_has_dimensions(); + &scc_info_TimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto.base); + value_ = nullptr; } -ArtifactID::~ArtifactID() { - // @@protoc_insertion_point(destructor:flyteidl.core.ArtifactID) +TimePartition::~TimePartition() { + // @@protoc_insertion_point(destructor:flyteidl.core.TimePartition) SharedDtor(); } -void ArtifactID::SharedDtor() { - version_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (this != internal_default_instance()) delete artifact_key_; - if (has_dimensions()) { - clear_dimensions(); - } +void TimePartition::SharedDtor() { + if (this != internal_default_instance()) delete value_; } -void ArtifactID::SetCachedSize(int size) const { +void TimePartition::SetCachedSize(int size) const { _cached_size_.Set(size); } -const ArtifactID& ArtifactID::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base); +const TimePartition& TimePartition::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_TimePartition_flyteidl_2fcore_2fartifact_5fid_2eproto.base); return *internal_default_instance(); } -void ArtifactID::clear_dimensions() { -// @@protoc_insertion_point(one_of_clear_start:flyteidl.core.ArtifactID) - switch (dimensions_case()) { - case kPartitions: { - delete dimensions_.partitions_; - break; - } - case DIMENSIONS_NOT_SET: { - break; - } - } - _oneof_case_[0] = DIMENSIONS_NOT_SET; -} - - -void ArtifactID::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.core.ArtifactID) +void TimePartition::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.core.TimePartition) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); - if (GetArenaNoVirtual() == nullptr && artifact_key_ != nullptr) { - delete artifact_key_; + if (GetArenaNoVirtual() == nullptr && value_ != nullptr) { + delete value_; } - artifact_key_ = nullptr; - clear_dimensions(); + value_ = nullptr; _internal_metadata_.Clear(); } #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* ArtifactID::_InternalParse(const char* begin, const char* end, void* object, +const char* TimePartition::_InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); + auto msg = static_cast(object); ::google::protobuf::int32 size; (void)size; int depth; (void)depth; ::google::protobuf::uint32 tag; @@ -2606,39 +2727,372 @@ const char* ArtifactID::_InternalParse(const char* begin, const char* end, void* ptr = ::google::protobuf::io::Parse32(ptr, &tag); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); switch (tag >> 3) { - // .flyteidl.core.ArtifactKey artifact_key = 1; + // .flyteidl.core.LabelValue value = 1; case 1: { if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactKey::_InternalParse; - object = msg->mutable_artifact_key(); + parser_till_end = ::flyteidl::core::LabelValue::_InternalParse; + object = msg->mutable_value(); if (size > end - ptr) goto len_delim_till_end; ptr += size; GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( {parser_till_end, object}, ptr - size, ptr)); break; } - // string version = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.core.ArtifactID.version"); - object = msg->mutable_version(); - if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { - parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; - goto string_till_end; + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; } - GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); - ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); - ptr += size; - break; + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; } - // .flyteidl.core.Partitions partitions = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); + } // switch + } // while + return ptr; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool TimePartition::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.core.TimePartition) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // .flyteidl.core.LabelValue value = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_value())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.core.TimePartition) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.core.TimePartition) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void TimePartition::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.core.TimePartition) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.LabelValue value = 1; + if (this->has_value()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 1, HasBitSetters::value(this), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.core.TimePartition) +} + +::google::protobuf::uint8* TimePartition::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.core.TimePartition) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // .flyteidl.core.LabelValue value = 1; + if (this->has_value()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 1, HasBitSetters::value(this), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.core.TimePartition) + return target; +} + +size_t TimePartition::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.core.TimePartition) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // .flyteidl.core.LabelValue value = 1; + if (this->has_value()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *value_); + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void TimePartition::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.core.TimePartition) + GOOGLE_DCHECK_NE(&from, this); + const TimePartition* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.core.TimePartition) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.core.TimePartition) + MergeFrom(*source); + } +} + +void TimePartition::MergeFrom(const TimePartition& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.core.TimePartition) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.has_value()) { + mutable_value()->::flyteidl::core::LabelValue::MergeFrom(from.value()); + } +} + +void TimePartition::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.core.TimePartition) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void TimePartition::CopyFrom(const TimePartition& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.core.TimePartition) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool TimePartition::IsInitialized() const { + return true; +} + +void TimePartition::Swap(TimePartition* other) { + if (other == this) return; + InternalSwap(other); +} +void TimePartition::InternalSwap(TimePartition* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + swap(value_, other->value_); +} + +::google::protobuf::Metadata TimePartition::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fcore_2fartifact_5fid_2eproto); + return ::file_level_metadata_flyteidl_2fcore_2fartifact_5fid_2eproto[kIndexInFileMessages]; +} + + +// =================================================================== + +void ArtifactID::InitAsDefaultInstance() { + ::flyteidl::core::_ArtifactID_default_instance_._instance.get_mutable()->artifact_key_ = const_cast< ::flyteidl::core::ArtifactKey*>( + ::flyteidl::core::ArtifactKey::internal_default_instance()); + ::flyteidl::core::_ArtifactID_default_instance_._instance.get_mutable()->partitions_ = const_cast< ::flyteidl::core::Partitions*>( + ::flyteidl::core::Partitions::internal_default_instance()); + ::flyteidl::core::_ArtifactID_default_instance_._instance.get_mutable()->time_partition_ = const_cast< ::flyteidl::core::TimePartition*>( + ::flyteidl::core::TimePartition::internal_default_instance()); +} +class ArtifactID::HasBitSetters { + public: + static const ::flyteidl::core::ArtifactKey& artifact_key(const ArtifactID* msg); + static const ::flyteidl::core::Partitions& partitions(const ArtifactID* msg); + static const ::flyteidl::core::TimePartition& time_partition(const ArtifactID* msg); +}; + +const ::flyteidl::core::ArtifactKey& +ArtifactID::HasBitSetters::artifact_key(const ArtifactID* msg) { + return *msg->artifact_key_; +} +const ::flyteidl::core::Partitions& +ArtifactID::HasBitSetters::partitions(const ArtifactID* msg) { + return *msg->partitions_; +} +const ::flyteidl::core::TimePartition& +ArtifactID::HasBitSetters::time_partition(const ArtifactID* msg) { + return *msg->time_partition_; +} +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int ArtifactID::kArtifactKeyFieldNumber; +const int ArtifactID::kVersionFieldNumber; +const int ArtifactID::kPartitionsFieldNumber; +const int ArtifactID::kTimePartitionFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +ArtifactID::ArtifactID() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.core.ArtifactID) +} +ArtifactID::ArtifactID(const ArtifactID& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.version().size() > 0) { + version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); + } + if (from.has_artifact_key()) { + artifact_key_ = new ::flyteidl::core::ArtifactKey(*from.artifact_key_); + } else { + artifact_key_ = nullptr; + } + if (from.has_partitions()) { + partitions_ = new ::flyteidl::core::Partitions(*from.partitions_); + } else { + partitions_ = nullptr; + } + if (from.has_time_partition()) { + time_partition_ = new ::flyteidl::core::TimePartition(*from.time_partition_); + } else { + time_partition_ = nullptr; + } + // @@protoc_insertion_point(copy_constructor:flyteidl.core.ArtifactID) +} + +void ArtifactID::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base); + version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::memset(&artifact_key_, 0, static_cast( + reinterpret_cast(&time_partition_) - + reinterpret_cast(&artifact_key_)) + sizeof(time_partition_)); +} + +ArtifactID::~ArtifactID() { + // @@protoc_insertion_point(destructor:flyteidl.core.ArtifactID) + SharedDtor(); +} + +void ArtifactID::SharedDtor() { + version_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (this != internal_default_instance()) delete artifact_key_; + if (this != internal_default_instance()) delete partitions_; + if (this != internal_default_instance()) delete time_partition_; +} + +void ArtifactID::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const ArtifactID& ArtifactID::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base); + return *internal_default_instance(); +} + + +void ArtifactID::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.core.ArtifactID) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (GetArenaNoVirtual() == nullptr && artifact_key_ != nullptr) { + delete artifact_key_; + } + artifact_key_ = nullptr; + if (GetArenaNoVirtual() == nullptr && partitions_ != nullptr) { + delete partitions_; + } + partitions_ = nullptr; + if (GetArenaNoVirtual() == nullptr && time_partition_ != nullptr) { + delete time_partition_; + } + time_partition_ = nullptr; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* ArtifactID::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // .flyteidl.core.ArtifactKey artifact_key = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::ArtifactKey::_InternalParse; + object = msg->mutable_artifact_key(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + // string version = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.core.ArtifactID.version"); + object = msg->mutable_version(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // .flyteidl.core.Partitions partitions = 3; + case 3: { + if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); parser_till_end = ::flyteidl::core::Partitions::_InternalParse; object = msg->mutable_partitions(); @@ -2648,6 +3102,19 @@ const char* ArtifactID::_InternalParse(const char* begin, const char* end, void* {parser_till_end, object}, ptr - size, ptr)); break; } + // .flyteidl.core.TimePartition time_partition = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::flyteidl::core::TimePartition::_InternalParse; + object = msg->mutable_time_partition(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -2719,6 +3186,17 @@ bool ArtifactID::MergePartialFromCodedStream( break; } + // .flyteidl.core.TimePartition time_partition = 4; + case 4: { + if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_time_partition())); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -2763,11 +3241,17 @@ void ArtifactID::SerializeWithCachedSizes( } // .flyteidl.core.Partitions partitions = 3; - if (has_partitions()) { + if (this->has_partitions()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 3, HasBitSetters::partitions(this), output); } + // .flyteidl.core.TimePartition time_partition = 4; + if (this->has_time_partition()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 4, HasBitSetters::time_partition(this), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -2800,12 +3284,19 @@ ::google::protobuf::uint8* ArtifactID::InternalSerializeWithCachedSizesToArray( } // .flyteidl.core.Partitions partitions = 3; - if (has_partitions()) { + if (this->has_partitions()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 3, HasBitSetters::partitions(this), target); } + // .flyteidl.core.TimePartition time_partition = 4; + if (this->has_time_partition()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 4, HasBitSetters::time_partition(this), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -2838,21 +3329,23 @@ size_t ArtifactID::ByteSizeLong() const { if (this->has_artifact_key()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( - *artifact_key_); + *artifact_key_); + } + + // .flyteidl.core.Partitions partitions = 3; + if (this->has_partitions()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *partitions_); + } + + // .flyteidl.core.TimePartition time_partition = 4; + if (this->has_time_partition()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *time_partition_); } - switch (dimensions_case()) { - // .flyteidl.core.Partitions partitions = 3; - case kPartitions: { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *dimensions_.partitions_); - break; - } - case DIMENSIONS_NOT_SET: { - break; - } - } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -2887,14 +3380,11 @@ void ArtifactID::MergeFrom(const ArtifactID& from) { if (from.has_artifact_key()) { mutable_artifact_key()->::flyteidl::core::ArtifactKey::MergeFrom(from.artifact_key()); } - switch (from.dimensions_case()) { - case kPartitions: { - mutable_partitions()->::flyteidl::core::Partitions::MergeFrom(from.partitions()); - break; - } - case DIMENSIONS_NOT_SET: { - break; - } + if (from.has_partitions()) { + mutable_partitions()->::flyteidl::core::Partitions::MergeFrom(from.partitions()); + } + if (from.has_time_partition()) { + mutable_time_partition()->::flyteidl::core::TimePartition::MergeFrom(from.time_partition()); } } @@ -2926,8 +3416,8 @@ void ArtifactID::InternalSwap(ArtifactID* other) { version_.Swap(&other->version_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); swap(artifact_key_, other->artifact_key_); - swap(dimensions_, other->dimensions_); - swap(_oneof_case_[0], other->_oneof_case_[0]); + swap(partitions_, other->partitions_); + swap(time_partition_, other->time_partition_); } ::google::protobuf::Metadata ArtifactID::GetMetadata() const { @@ -3857,359 +4347,6 @@ ::google::protobuf::Metadata ArtifactQuery::GetMetadata() const { } -// =================================================================== - -void Trigger::InitAsDefaultInstance() { - ::flyteidl::core::_Trigger_default_instance_._instance.get_mutable()->trigger_id_ = const_cast< ::flyteidl::core::Identifier*>( - ::flyteidl::core::Identifier::internal_default_instance()); -} -class Trigger::HasBitSetters { - public: - static const ::flyteidl::core::Identifier& trigger_id(const Trigger* msg); -}; - -const ::flyteidl::core::Identifier& -Trigger::HasBitSetters::trigger_id(const Trigger* msg) { - return *msg->trigger_id_; -} -void Trigger::clear_trigger_id() { - if (GetArenaNoVirtual() == nullptr && trigger_id_ != nullptr) { - delete trigger_id_; - } - trigger_id_ = nullptr; -} -#if !defined(_MSC_VER) || _MSC_VER >= 1900 -const int Trigger::kTriggerIdFieldNumber; -const int Trigger::kTriggersFieldNumber; -#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 - -Trigger::Trigger() - : ::google::protobuf::Message(), _internal_metadata_(nullptr) { - SharedCtor(); - // @@protoc_insertion_point(constructor:flyteidl.core.Trigger) -} -Trigger::Trigger(const Trigger& from) - : ::google::protobuf::Message(), - _internal_metadata_(nullptr), - triggers_(from.triggers_) { - _internal_metadata_.MergeFrom(from._internal_metadata_); - if (from.has_trigger_id()) { - trigger_id_ = new ::flyteidl::core::Identifier(*from.trigger_id_); - } else { - trigger_id_ = nullptr; - } - // @@protoc_insertion_point(copy_constructor:flyteidl.core.Trigger) -} - -void Trigger::SharedCtor() { - ::google::protobuf::internal::InitSCC( - &scc_info_Trigger_flyteidl_2fcore_2fartifact_5fid_2eproto.base); - trigger_id_ = nullptr; -} - -Trigger::~Trigger() { - // @@protoc_insertion_point(destructor:flyteidl.core.Trigger) - SharedDtor(); -} - -void Trigger::SharedDtor() { - if (this != internal_default_instance()) delete trigger_id_; -} - -void Trigger::SetCachedSize(int size) const { - _cached_size_.Set(size); -} -const Trigger& Trigger::default_instance() { - ::google::protobuf::internal::InitSCC(&::scc_info_Trigger_flyteidl_2fcore_2fartifact_5fid_2eproto.base); - return *internal_default_instance(); -} - - -void Trigger::Clear() { -// @@protoc_insertion_point(message_clear_start:flyteidl.core.Trigger) - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - triggers_.Clear(); - if (GetArenaNoVirtual() == nullptr && trigger_id_ != nullptr) { - delete trigger_id_; - } - trigger_id_ = nullptr; - _internal_metadata_.Clear(); -} - -#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -const char* Trigger::_InternalParse(const char* begin, const char* end, void* object, - ::google::protobuf::internal::ParseContext* ctx) { - auto msg = static_cast(object); - ::google::protobuf::int32 size; (void)size; - int depth; (void)depth; - ::google::protobuf::uint32 tag; - ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; - auto ptr = begin; - while (ptr < end) { - ptr = ::google::protobuf::io::Parse32(ptr, &tag); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - switch (tag >> 3) { - // .flyteidl.core.Identifier trigger_id = 1; - case 1: { - if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::Identifier::_InternalParse; - object = msg->mutable_trigger_id(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // repeated .flyteidl.core.ArtifactID triggers = 2; - case 2: { - if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; - do { - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::ArtifactID::_InternalParse; - object = msg->add_triggers(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 18 && (ptr += 1)); - break; - } - default: { - handle_unusual: - if ((tag & 7) == 4 || tag == 0) { - ctx->EndGroup(tag); - return ptr; - } - auto res = UnknownFieldParse(tag, {_InternalParse, msg}, - ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); - ptr = res.first; - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); - if (res.second) return ptr; - } - } // switch - } // while - return ptr; -len_delim_till_end: - return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, - {parser_till_end, object}, size); -} -#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER -bool Trigger::MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) { -#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure - ::google::protobuf::uint32 tag; - // @@protoc_insertion_point(parse_start:flyteidl.core.Trigger) - for (;;) { - ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); - tag = p.first; - if (!p.second) goto handle_unusual; - switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { - // .flyteidl.core.Identifier trigger_id = 1; - case 1: { - if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_trigger_id())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .flyteidl.core.ArtifactID triggers = 2; - case 2: { - if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, add_triggers())); - } else { - goto handle_unusual; - } - break; - } - - default: { - handle_unusual: - if (tag == 0) { - goto success; - } - DO_(::google::protobuf::internal::WireFormat::SkipField( - input, tag, _internal_metadata_.mutable_unknown_fields())); - break; - } - } - } -success: - // @@protoc_insertion_point(parse_success:flyteidl.core.Trigger) - return true; -failure: - // @@protoc_insertion_point(parse_failure:flyteidl.core.Trigger) - return false; -#undef DO_ -} -#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - -void Trigger::SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const { - // @@protoc_insertion_point(serialize_start:flyteidl.core.Trigger) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier trigger_id = 1; - if (this->has_trigger_id()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 1, HasBitSetters::trigger_id(this), output); - } - - // repeated .flyteidl.core.ArtifactID triggers = 2; - for (unsigned int i = 0, - n = static_cast(this->triggers_size()); i < n; i++) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, - this->triggers(static_cast(i)), - output); - } - - if (_internal_metadata_.have_unknown_fields()) { - ::google::protobuf::internal::WireFormat::SerializeUnknownFields( - _internal_metadata_.unknown_fields(), output); - } - // @@protoc_insertion_point(serialize_end:flyteidl.core.Trigger) -} - -::google::protobuf::uint8* Trigger::InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const { - // @@protoc_insertion_point(serialize_to_array_start:flyteidl.core.Trigger) - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - // .flyteidl.core.Identifier trigger_id = 1; - if (this->has_trigger_id()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 1, HasBitSetters::trigger_id(this), target); - } - - // repeated .flyteidl.core.ArtifactID triggers = 2; - for (unsigned int i = 0, - n = static_cast(this->triggers_size()); i < n; i++) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, this->triggers(static_cast(i)), target); - } - - if (_internal_metadata_.have_unknown_fields()) { - target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( - _internal_metadata_.unknown_fields(), target); - } - // @@protoc_insertion_point(serialize_to_array_end:flyteidl.core.Trigger) - return target; -} - -size_t Trigger::ByteSizeLong() const { -// @@protoc_insertion_point(message_byte_size_start:flyteidl.core.Trigger) - size_t total_size = 0; - - if (_internal_metadata_.have_unknown_fields()) { - total_size += - ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( - _internal_metadata_.unknown_fields()); - } - ::google::protobuf::uint32 cached_has_bits = 0; - // Prevent compiler warnings about cached_has_bits being unused - (void) cached_has_bits; - - // repeated .flyteidl.core.ArtifactID triggers = 2; - { - unsigned int count = static_cast(this->triggers_size()); - total_size += 1UL * count; - for (unsigned int i = 0; i < count; i++) { - total_size += - ::google::protobuf::internal::WireFormatLite::MessageSize( - this->triggers(static_cast(i))); - } - } - - // .flyteidl.core.Identifier trigger_id = 1; - if (this->has_trigger_id()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *trigger_id_); - } - - int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); - SetCachedSize(cached_size); - return total_size; -} - -void Trigger::MergeFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.core.Trigger) - GOOGLE_DCHECK_NE(&from, this); - const Trigger* source = - ::google::protobuf::DynamicCastToGenerated( - &from); - if (source == nullptr) { - // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.core.Trigger) - ::google::protobuf::internal::ReflectionOps::Merge(from, this); - } else { - // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.core.Trigger) - MergeFrom(*source); - } -} - -void Trigger::MergeFrom(const Trigger& from) { -// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.core.Trigger) - GOOGLE_DCHECK_NE(&from, this); - _internal_metadata_.MergeFrom(from._internal_metadata_); - ::google::protobuf::uint32 cached_has_bits = 0; - (void) cached_has_bits; - - triggers_.MergeFrom(from.triggers_); - if (from.has_trigger_id()) { - mutable_trigger_id()->::flyteidl::core::Identifier::MergeFrom(from.trigger_id()); - } -} - -void Trigger::CopyFrom(const ::google::protobuf::Message& from) { -// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.core.Trigger) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -void Trigger::CopyFrom(const Trigger& from) { -// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.core.Trigger) - if (&from == this) return; - Clear(); - MergeFrom(from); -} - -bool Trigger::IsInitialized() const { - return true; -} - -void Trigger::Swap(Trigger* other) { - if (other == this) return; - InternalSwap(other); -} -void Trigger::InternalSwap(Trigger* other) { - using std::swap; - _internal_metadata_.Swap(&other->_internal_metadata_); - CastToBase(&triggers_)->InternalSwap(CastToBase(&other->triggers_)); - swap(trigger_id_, other->trigger_id_); -} - -::google::protobuf::Metadata Trigger::GetMetadata() const { - ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fcore_2fartifact_5fid_2eproto); - return ::file_level_metadata_flyteidl_2fcore_2fartifact_5fid_2eproto[kIndexInFileMessages]; -} - - // @@protoc_insertion_point(namespace_scope) } // namespace core } // namespace flyteidl @@ -4233,6 +4370,9 @@ template<> PROTOBUF_NOINLINE ::flyteidl::core::Partitions_ValueEntry_DoNotUse* A template<> PROTOBUF_NOINLINE ::flyteidl::core::Partitions* Arena::CreateMaybeMessage< ::flyteidl::core::Partitions >(Arena* arena) { return Arena::CreateInternal< ::flyteidl::core::Partitions >(arena); } +template<> PROTOBUF_NOINLINE ::flyteidl::core::TimePartition* Arena::CreateMaybeMessage< ::flyteidl::core::TimePartition >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::core::TimePartition >(arena); +} template<> PROTOBUF_NOINLINE ::flyteidl::core::ArtifactID* Arena::CreateMaybeMessage< ::flyteidl::core::ArtifactID >(Arena* arena) { return Arena::CreateInternal< ::flyteidl::core::ArtifactID >(arena); } @@ -4242,9 +4382,6 @@ template<> PROTOBUF_NOINLINE ::flyteidl::core::ArtifactTag* Arena::CreateMaybeMe template<> PROTOBUF_NOINLINE ::flyteidl::core::ArtifactQuery* Arena::CreateMaybeMessage< ::flyteidl::core::ArtifactQuery >(Arena* arena) { return Arena::CreateInternal< ::flyteidl::core::ArtifactQuery >(arena); } -template<> PROTOBUF_NOINLINE ::flyteidl::core::Trigger* Arena::CreateMaybeMessage< ::flyteidl::core::Trigger >(Arena* arena) { - return Arena::CreateInternal< ::flyteidl::core::Trigger >(arena); -} } // namespace protobuf } // namespace google diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.h b/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.h index 12c9cbd59f..5efd7a8cfb 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/core/artifact_id.pb.h @@ -34,6 +34,7 @@ #include #include #include +#include #include "flyteidl/core/identifier.pb.h" // @@protoc_insertion_point(includes) #include @@ -81,9 +82,9 @@ extern PartitionsDefaultTypeInternal _Partitions_default_instance_; class Partitions_ValueEntry_DoNotUse; class Partitions_ValueEntry_DoNotUseDefaultTypeInternal; extern Partitions_ValueEntry_DoNotUseDefaultTypeInternal _Partitions_ValueEntry_DoNotUse_default_instance_; -class Trigger; -class TriggerDefaultTypeInternal; -extern TriggerDefaultTypeInternal _Trigger_default_instance_; +class TimePartition; +class TimePartitionDefaultTypeInternal; +extern TimePartitionDefaultTypeInternal _TimePartition_default_instance_; } // namespace core } // namespace flyteidl namespace google { @@ -97,7 +98,7 @@ template<> ::flyteidl::core::InputBindingData* Arena::CreateMaybeMessage<::flyte template<> ::flyteidl::core::LabelValue* Arena::CreateMaybeMessage<::flyteidl::core::LabelValue>(Arena*); template<> ::flyteidl::core::Partitions* Arena::CreateMaybeMessage<::flyteidl::core::Partitions>(Arena*); template<> ::flyteidl::core::Partitions_ValueEntry_DoNotUse* Arena::CreateMaybeMessage<::flyteidl::core::Partitions_ValueEntry_DoNotUse>(Arena*); -template<> ::flyteidl::core::Trigger* Arena::CreateMaybeMessage<::flyteidl::core::Trigger>(Arena*); +template<> ::flyteidl::core::TimePartition* Arena::CreateMaybeMessage<::flyteidl::core::TimePartition>(Arena*); } // namespace protobuf } // namespace google namespace flyteidl { @@ -287,6 +288,12 @@ class ArtifactBindingData final : } static const ArtifactBindingData& default_instance(); + enum PartitionDataCase { + kPartitionKey = 2, + kBindToTimePartition = 3, + PARTITION_DATA_NOT_SET = 0, + }; + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const ArtifactBindingData* internal_default_instance() { return reinterpret_cast( @@ -350,23 +357,9 @@ class ArtifactBindingData final : // accessors ------------------------------------------------------- - // string partition_key = 2; - void clear_partition_key(); - static const int kPartitionKeyFieldNumber = 2; - const ::std::string& partition_key() const; - void set_partition_key(const ::std::string& value); - #if LANG_CXX11 - void set_partition_key(::std::string&& value); - #endif - void set_partition_key(const char* value); - void set_partition_key(const char* value, size_t size); - ::std::string* mutable_partition_key(); - ::std::string* release_partition_key(); - void set_allocated_partition_key(::std::string* partition_key); - - // string transform = 3; + // string transform = 4; void clear_transform(); - static const int kTransformFieldNumber = 3; + static const int kTransformFieldNumber = 4; const ::std::string& transform() const; void set_transform(const ::std::string& value); #if LANG_CXX11 @@ -384,15 +377,54 @@ class ArtifactBindingData final : ::google::protobuf::uint32 index() const; void set_index(::google::protobuf::uint32 value); + // string partition_key = 2; + private: + bool has_partition_key() const; + public: + void clear_partition_key(); + static const int kPartitionKeyFieldNumber = 2; + const ::std::string& partition_key() const; + void set_partition_key(const ::std::string& value); + #if LANG_CXX11 + void set_partition_key(::std::string&& value); + #endif + void set_partition_key(const char* value); + void set_partition_key(const char* value, size_t size); + ::std::string* mutable_partition_key(); + ::std::string* release_partition_key(); + void set_allocated_partition_key(::std::string* partition_key); + + // bool bind_to_time_partition = 3; + private: + bool has_bind_to_time_partition() const; + public: + void clear_bind_to_time_partition(); + static const int kBindToTimePartitionFieldNumber = 3; + bool bind_to_time_partition() const; + void set_bind_to_time_partition(bool value); + + void clear_partition_data(); + PartitionDataCase partition_data_case() const; // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactBindingData) private: class HasBitSetters; + void set_has_partition_key(); + void set_has_bind_to_time_partition(); + + inline bool has_partition_data() const; + inline void clear_has_partition_data(); ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::internal::ArenaStringPtr partition_key_; ::google::protobuf::internal::ArenaStringPtr transform_; ::google::protobuf::uint32 index_; + union PartitionDataUnion { + PartitionDataUnion() {} + ::google::protobuf::internal::ArenaStringPtr partition_key_; + bool bind_to_time_partition_; + } partition_data_; mutable ::google::protobuf::internal::CachedSize _cached_size_; + ::google::protobuf::uint32 _oneof_case_[1]; + friend struct ::TableStruct_flyteidl_2fcore_2fartifact_5fid_2eproto; }; // ------------------------------------------------------------------- @@ -551,8 +583,9 @@ class LabelValue final : enum ValueCase { kStaticValue = 1, - kTriggeredBinding = 2, - kInputBinding = 3, + kTimeValue = 2, + kTriggeredBinding = 3, + kInputBinding = 4, VALUE_NOT_SET = 0, }; @@ -636,19 +669,28 @@ class LabelValue final : ::std::string* release_static_value(); void set_allocated_static_value(::std::string* static_value); - // .flyteidl.core.ArtifactBindingData triggered_binding = 2; + // .google.protobuf.Timestamp time_value = 2; + bool has_time_value() const; + void clear_time_value(); + static const int kTimeValueFieldNumber = 2; + const ::google::protobuf::Timestamp& time_value() const; + ::google::protobuf::Timestamp* release_time_value(); + ::google::protobuf::Timestamp* mutable_time_value(); + void set_allocated_time_value(::google::protobuf::Timestamp* time_value); + + // .flyteidl.core.ArtifactBindingData triggered_binding = 3; bool has_triggered_binding() const; void clear_triggered_binding(); - static const int kTriggeredBindingFieldNumber = 2; + static const int kTriggeredBindingFieldNumber = 3; const ::flyteidl::core::ArtifactBindingData& triggered_binding() const; ::flyteidl::core::ArtifactBindingData* release_triggered_binding(); ::flyteidl::core::ArtifactBindingData* mutable_triggered_binding(); void set_allocated_triggered_binding(::flyteidl::core::ArtifactBindingData* triggered_binding); - // .flyteidl.core.InputBindingData input_binding = 3; + // .flyteidl.core.InputBindingData input_binding = 4; bool has_input_binding() const; void clear_input_binding(); - static const int kInputBindingFieldNumber = 3; + static const int kInputBindingFieldNumber = 4; const ::flyteidl::core::InputBindingData& input_binding() const; ::flyteidl::core::InputBindingData* release_input_binding(); ::flyteidl::core::InputBindingData* mutable_input_binding(); @@ -660,6 +702,7 @@ class LabelValue final : private: class HasBitSetters; void set_has_static_value(); + void set_has_time_value(); void set_has_triggered_binding(); void set_has_input_binding(); @@ -670,6 +713,7 @@ class LabelValue final : union ValueUnion { ValueUnion() {} ::google::protobuf::internal::ArenaStringPtr static_value_; + ::google::protobuf::Timestamp* time_value_; ::flyteidl::core::ArtifactBindingData* triggered_binding_; ::flyteidl::core::InputBindingData* input_binding_; } value_; @@ -825,6 +869,121 @@ class Partitions final : }; // ------------------------------------------------------------------- +class TimePartition final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.core.TimePartition) */ { + public: + TimePartition(); + virtual ~TimePartition(); + + TimePartition(const TimePartition& from); + + inline TimePartition& operator=(const TimePartition& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + TimePartition(TimePartition&& from) noexcept + : TimePartition() { + *this = ::std::move(from); + } + + inline TimePartition& operator=(TimePartition&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const TimePartition& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const TimePartition* internal_default_instance() { + return reinterpret_cast( + &_TimePartition_default_instance_); + } + static constexpr int kIndexInFileMessages = + 6; + + void Swap(TimePartition* other); + friend void swap(TimePartition& a, TimePartition& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline TimePartition* New() const final { + return CreateMaybeMessage(nullptr); + } + + TimePartition* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const TimePartition& from); + void MergeFrom(const TimePartition& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(TimePartition* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // .flyteidl.core.LabelValue value = 1; + bool has_value() const; + void clear_value(); + static const int kValueFieldNumber = 1; + const ::flyteidl::core::LabelValue& value() const; + ::flyteidl::core::LabelValue* release_value(); + ::flyteidl::core::LabelValue* mutable_value(); + void set_allocated_value(::flyteidl::core::LabelValue* value); + + // @@protoc_insertion_point(class_scope:flyteidl.core.TimePartition) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::flyteidl::core::LabelValue* value_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fcore_2fartifact_5fid_2eproto; +}; +// ------------------------------------------------------------------- + class ArtifactID final : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.core.ArtifactID) */ { public: @@ -857,18 +1016,13 @@ class ArtifactID final : } static const ArtifactID& default_instance(); - enum DimensionsCase { - kPartitions = 3, - DIMENSIONS_NOT_SET = 0, - }; - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY static inline const ArtifactID* internal_default_instance() { return reinterpret_cast( &_ArtifactID_default_instance_); } static constexpr int kIndexInFileMessages = - 6; + 7; void Swap(ArtifactID* other); friend void swap(ArtifactID& a, ArtifactID& b) { @@ -957,26 +1111,25 @@ class ArtifactID final : ::flyteidl::core::Partitions* mutable_partitions(); void set_allocated_partitions(::flyteidl::core::Partitions* partitions); - void clear_dimensions(); - DimensionsCase dimensions_case() const; + // .flyteidl.core.TimePartition time_partition = 4; + bool has_time_partition() const; + void clear_time_partition(); + static const int kTimePartitionFieldNumber = 4; + const ::flyteidl::core::TimePartition& time_partition() const; + ::flyteidl::core::TimePartition* release_time_partition(); + ::flyteidl::core::TimePartition* mutable_time_partition(); + void set_allocated_time_partition(::flyteidl::core::TimePartition* time_partition); + // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactID) private: class HasBitSetters; - void set_has_partitions(); - - inline bool has_dimensions() const; - inline void clear_has_dimensions(); ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::internal::ArenaStringPtr version_; ::flyteidl::core::ArtifactKey* artifact_key_; - union DimensionsUnion { - DimensionsUnion() {} - ::flyteidl::core::Partitions* partitions_; - } dimensions_; + ::flyteidl::core::Partitions* partitions_; + ::flyteidl::core::TimePartition* time_partition_; mutable ::google::protobuf::internal::CachedSize _cached_size_; - ::google::protobuf::uint32 _oneof_case_[1]; - friend struct ::TableStruct_flyteidl_2fcore_2fartifact_5fid_2eproto; }; // ------------------------------------------------------------------- @@ -1019,7 +1172,7 @@ class ArtifactTag final : &_ArtifactTag_default_instance_); } static constexpr int kIndexInFileMessages = - 7; + 8; void Swap(ArtifactTag* other); friend void swap(ArtifactTag& a, ArtifactTag& b) { @@ -1152,7 +1305,7 @@ class ArtifactQuery final : &_ArtifactQuery_default_instance_); } static constexpr int kIndexInFileMessages = - 8; + 9; void Swap(ArtifactQuery* other); friend void swap(ArtifactQuery& a, ArtifactQuery& b) { @@ -1279,134 +1432,6 @@ class ArtifactQuery final : friend struct ::TableStruct_flyteidl_2fcore_2fartifact_5fid_2eproto; }; -// ------------------------------------------------------------------- - -class Trigger final : - public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.core.Trigger) */ { - public: - Trigger(); - virtual ~Trigger(); - - Trigger(const Trigger& from); - - inline Trigger& operator=(const Trigger& from) { - CopyFrom(from); - return *this; - } - #if LANG_CXX11 - Trigger(Trigger&& from) noexcept - : Trigger() { - *this = ::std::move(from); - } - - inline Trigger& operator=(Trigger&& from) noexcept { - if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { - if (this != &from) InternalSwap(&from); - } else { - CopyFrom(from); - } - return *this; - } - #endif - static const ::google::protobuf::Descriptor* descriptor() { - return default_instance().GetDescriptor(); - } - static const Trigger& default_instance(); - - static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY - static inline const Trigger* internal_default_instance() { - return reinterpret_cast( - &_Trigger_default_instance_); - } - static constexpr int kIndexInFileMessages = - 9; - - void Swap(Trigger* other); - friend void swap(Trigger& a, Trigger& b) { - a.Swap(&b); - } - - // implements Message ---------------------------------------------- - - inline Trigger* New() const final { - return CreateMaybeMessage(nullptr); - } - - Trigger* New(::google::protobuf::Arena* arena) const final { - return CreateMaybeMessage(arena); - } - void CopyFrom(const ::google::protobuf::Message& from) final; - void MergeFrom(const ::google::protobuf::Message& from) final; - void CopyFrom(const Trigger& from); - void MergeFrom(const Trigger& from); - PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; - bool IsInitialized() const final; - - size_t ByteSizeLong() const final; - #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); - ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } - #else - bool MergePartialFromCodedStream( - ::google::protobuf::io::CodedInputStream* input) final; - #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER - void SerializeWithCachedSizes( - ::google::protobuf::io::CodedOutputStream* output) const final; - ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( - ::google::protobuf::uint8* target) const final; - int GetCachedSize() const final { return _cached_size_.Get(); } - - private: - void SharedCtor(); - void SharedDtor(); - void SetCachedSize(int size) const final; - void InternalSwap(Trigger* other); - private: - inline ::google::protobuf::Arena* GetArenaNoVirtual() const { - return nullptr; - } - inline void* MaybeArenaPtr() const { - return nullptr; - } - public: - - ::google::protobuf::Metadata GetMetadata() const final; - - // nested types ---------------------------------------------------- - - // accessors ------------------------------------------------------- - - // repeated .flyteidl.core.ArtifactID triggers = 2; - int triggers_size() const; - void clear_triggers(); - static const int kTriggersFieldNumber = 2; - ::flyteidl::core::ArtifactID* mutable_triggers(int index); - ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >* - mutable_triggers(); - const ::flyteidl::core::ArtifactID& triggers(int index) const; - ::flyteidl::core::ArtifactID* add_triggers(); - const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >& - triggers() const; - - // .flyteidl.core.Identifier trigger_id = 1; - bool has_trigger_id() const; - void clear_trigger_id(); - static const int kTriggerIdFieldNumber = 1; - const ::flyteidl::core::Identifier& trigger_id() const; - ::flyteidl::core::Identifier* release_trigger_id(); - ::flyteidl::core::Identifier* mutable_trigger_id(); - void set_allocated_trigger_id(::flyteidl::core::Identifier* trigger_id); - - // @@protoc_insertion_point(class_scope:flyteidl.core.Trigger) - private: - class HasBitSetters; - - ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; - ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID > triggers_; - ::flyteidl::core::Identifier* trigger_id_; - mutable ::google::protobuf::internal::CachedSize _cached_size_; - friend struct ::TableStruct_flyteidl_2fcore_2fartifact_5fid_2eproto; -}; // =================================================================== @@ -1596,59 +1621,127 @@ inline void ArtifactBindingData::set_index(::google::protobuf::uint32 value) { } // string partition_key = 2; +inline bool ArtifactBindingData::has_partition_key() const { + return partition_data_case() == kPartitionKey; +} +inline void ArtifactBindingData::set_has_partition_key() { + _oneof_case_[0] = kPartitionKey; +} inline void ArtifactBindingData::clear_partition_key() { - partition_key_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (has_partition_key()) { + partition_data_.partition_key_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_partition_data(); + } } inline const ::std::string& ArtifactBindingData::partition_key() const { // @@protoc_insertion_point(field_get:flyteidl.core.ArtifactBindingData.partition_key) - return partition_key_.GetNoArena(); + if (has_partition_key()) { + return partition_data_.partition_key_.GetNoArena(); + } + return *&::google::protobuf::internal::GetEmptyStringAlreadyInited(); } inline void ArtifactBindingData::set_partition_key(const ::std::string& value) { - - partition_key_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.core.ArtifactBindingData.partition_key) + if (!has_partition_key()) { + clear_partition_data(); + set_has_partition_key(); + partition_data_.partition_key_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + partition_data_.partition_key_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); // @@protoc_insertion_point(field_set:flyteidl.core.ArtifactBindingData.partition_key) } #if LANG_CXX11 inline void ArtifactBindingData::set_partition_key(::std::string&& value) { - - partition_key_.SetNoArena( - &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set:flyteidl.core.ArtifactBindingData.partition_key) + if (!has_partition_key()) { + clear_partition_data(); + set_has_partition_key(); + partition_data_.partition_key_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + partition_data_.partition_key_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); // @@protoc_insertion_point(field_set_rvalue:flyteidl.core.ArtifactBindingData.partition_key) } #endif inline void ArtifactBindingData::set_partition_key(const char* value) { GOOGLE_DCHECK(value != nullptr); - - partition_key_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + if (!has_partition_key()) { + clear_partition_data(); + set_has_partition_key(); + partition_data_.partition_key_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + partition_data_.partition_key_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(value)); // @@protoc_insertion_point(field_set_char:flyteidl.core.ArtifactBindingData.partition_key) } inline void ArtifactBindingData::set_partition_key(const char* value, size_t size) { - - partition_key_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), - ::std::string(reinterpret_cast(value), size)); + if (!has_partition_key()) { + clear_partition_data(); + set_has_partition_key(); + partition_data_.partition_key_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + partition_data_.partition_key_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string( + reinterpret_cast(value), size)); // @@protoc_insertion_point(field_set_pointer:flyteidl.core.ArtifactBindingData.partition_key) } inline ::std::string* ArtifactBindingData::mutable_partition_key() { - + if (!has_partition_key()) { + clear_partition_data(); + set_has_partition_key(); + partition_data_.partition_key_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } // @@protoc_insertion_point(field_mutable:flyteidl.core.ArtifactBindingData.partition_key) - return partition_key_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + return partition_data_.partition_key_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } inline ::std::string* ArtifactBindingData::release_partition_key() { // @@protoc_insertion_point(field_release:flyteidl.core.ArtifactBindingData.partition_key) - - return partition_key_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (has_partition_key()) { + clear_has_partition_data(); + return partition_data_.partition_key_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } else { + return nullptr; + } } inline void ArtifactBindingData::set_allocated_partition_key(::std::string* partition_key) { + if (has_partition_data()) { + clear_partition_data(); + } if (partition_key != nullptr) { - - } else { - + set_has_partition_key(); + partition_data_.partition_key_.UnsafeSetDefault(partition_key); } - partition_key_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), partition_key); // @@protoc_insertion_point(field_set_allocated:flyteidl.core.ArtifactBindingData.partition_key) } -// string transform = 3; +// bool bind_to_time_partition = 3; +inline bool ArtifactBindingData::has_bind_to_time_partition() const { + return partition_data_case() == kBindToTimePartition; +} +inline void ArtifactBindingData::set_has_bind_to_time_partition() { + _oneof_case_[0] = kBindToTimePartition; +} +inline void ArtifactBindingData::clear_bind_to_time_partition() { + if (has_bind_to_time_partition()) { + partition_data_.bind_to_time_partition_ = false; + clear_has_partition_data(); + } +} +inline bool ArtifactBindingData::bind_to_time_partition() const { + // @@protoc_insertion_point(field_get:flyteidl.core.ArtifactBindingData.bind_to_time_partition) + if (has_bind_to_time_partition()) { + return partition_data_.bind_to_time_partition_; + } + return false; +} +inline void ArtifactBindingData::set_bind_to_time_partition(bool value) { + if (!has_bind_to_time_partition()) { + clear_partition_data(); + set_has_bind_to_time_partition(); + } + partition_data_.bind_to_time_partition_ = value; + // @@protoc_insertion_point(field_set:flyteidl.core.ArtifactBindingData.bind_to_time_partition) +} + +// string transform = 4; inline void ArtifactBindingData::clear_transform() { transform_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } @@ -1701,6 +1794,15 @@ inline void ArtifactBindingData::set_allocated_transform(::std::string* transfor // @@protoc_insertion_point(field_set_allocated:flyteidl.core.ArtifactBindingData.transform) } +inline bool ArtifactBindingData::has_partition_data() const { + return partition_data_case() != PARTITION_DATA_NOT_SET; +} +inline void ArtifactBindingData::clear_has_partition_data() { + _oneof_case_[0] = PARTITION_DATA_NOT_SET; +} +inline ArtifactBindingData::PartitionDataCase ArtifactBindingData::partition_data_case() const { + return ArtifactBindingData::PartitionDataCase(_oneof_case_[0]); +} // ------------------------------------------------------------------- // InputBindingData @@ -1854,7 +1956,42 @@ inline void LabelValue::set_allocated_static_value(::std::string* static_value) // @@protoc_insertion_point(field_set_allocated:flyteidl.core.LabelValue.static_value) } -// .flyteidl.core.ArtifactBindingData triggered_binding = 2; +// .google.protobuf.Timestamp time_value = 2; +inline bool LabelValue::has_time_value() const { + return value_case() == kTimeValue; +} +inline void LabelValue::set_has_time_value() { + _oneof_case_[0] = kTimeValue; +} +inline ::google::protobuf::Timestamp* LabelValue::release_time_value() { + // @@protoc_insertion_point(field_release:flyteidl.core.LabelValue.time_value) + if (has_time_value()) { + clear_has_value(); + ::google::protobuf::Timestamp* temp = value_.time_value_; + value_.time_value_ = nullptr; + return temp; + } else { + return nullptr; + } +} +inline const ::google::protobuf::Timestamp& LabelValue::time_value() const { + // @@protoc_insertion_point(field_get:flyteidl.core.LabelValue.time_value) + return has_time_value() + ? *value_.time_value_ + : *reinterpret_cast< ::google::protobuf::Timestamp*>(&::google::protobuf::_Timestamp_default_instance_); +} +inline ::google::protobuf::Timestamp* LabelValue::mutable_time_value() { + if (!has_time_value()) { + clear_value(); + set_has_time_value(); + value_.time_value_ = CreateMaybeMessage< ::google::protobuf::Timestamp >( + GetArenaNoVirtual()); + } + // @@protoc_insertion_point(field_mutable:flyteidl.core.LabelValue.time_value) + return value_.time_value_; +} + +// .flyteidl.core.ArtifactBindingData triggered_binding = 3; inline bool LabelValue::has_triggered_binding() const { return value_case() == kTriggeredBinding; } @@ -1895,7 +2032,7 @@ inline ::flyteidl::core::ArtifactBindingData* LabelValue::mutable_triggered_bind return value_.triggered_binding_; } -// .flyteidl.core.InputBindingData input_binding = 3; +// .flyteidl.core.InputBindingData input_binding = 4; inline bool LabelValue::has_input_binding() const { return value_case() == kInputBinding; } @@ -1971,6 +2108,61 @@ Partitions::mutable_value() { // ------------------------------------------------------------------- +// TimePartition + +// .flyteidl.core.LabelValue value = 1; +inline bool TimePartition::has_value() const { + return this != internal_default_instance() && value_ != nullptr; +} +inline void TimePartition::clear_value() { + if (GetArenaNoVirtual() == nullptr && value_ != nullptr) { + delete value_; + } + value_ = nullptr; +} +inline const ::flyteidl::core::LabelValue& TimePartition::value() const { + const ::flyteidl::core::LabelValue* p = value_; + // @@protoc_insertion_point(field_get:flyteidl.core.TimePartition.value) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_LabelValue_default_instance_); +} +inline ::flyteidl::core::LabelValue* TimePartition::release_value() { + // @@protoc_insertion_point(field_release:flyteidl.core.TimePartition.value) + + ::flyteidl::core::LabelValue* temp = value_; + value_ = nullptr; + return temp; +} +inline ::flyteidl::core::LabelValue* TimePartition::mutable_value() { + + if (value_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::LabelValue>(GetArenaNoVirtual()); + value_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.core.TimePartition.value) + return value_; +} +inline void TimePartition::set_allocated_value(::flyteidl::core::LabelValue* value) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete value_; + } + if (value) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + value = ::google::protobuf::internal::GetOwnedMessage( + message_arena, value, submessage_arena); + } + + } else { + + } + value_ = value; + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.TimePartition.value) +} + +// ------------------------------------------------------------------- + // ArtifactID // .flyteidl.core.ArtifactKey artifact_key = 1; @@ -2079,54 +2271,106 @@ inline void ArtifactID::set_allocated_version(::std::string* version) { // .flyteidl.core.Partitions partitions = 3; inline bool ArtifactID::has_partitions() const { - return dimensions_case() == kPartitions; -} -inline void ArtifactID::set_has_partitions() { - _oneof_case_[0] = kPartitions; + return this != internal_default_instance() && partitions_ != nullptr; } inline void ArtifactID::clear_partitions() { - if (has_partitions()) { - delete dimensions_.partitions_; - clear_has_dimensions(); - } -} -inline ::flyteidl::core::Partitions* ArtifactID::release_partitions() { - // @@protoc_insertion_point(field_release:flyteidl.core.ArtifactID.partitions) - if (has_partitions()) { - clear_has_dimensions(); - ::flyteidl::core::Partitions* temp = dimensions_.partitions_; - dimensions_.partitions_ = nullptr; - return temp; - } else { - return nullptr; + if (GetArenaNoVirtual() == nullptr && partitions_ != nullptr) { + delete partitions_; } + partitions_ = nullptr; } inline const ::flyteidl::core::Partitions& ArtifactID::partitions() const { + const ::flyteidl::core::Partitions* p = partitions_; // @@protoc_insertion_point(field_get:flyteidl.core.ArtifactID.partitions) - return has_partitions() - ? *dimensions_.partitions_ - : *reinterpret_cast< ::flyteidl::core::Partitions*>(&::flyteidl::core::_Partitions_default_instance_); + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_Partitions_default_instance_); +} +inline ::flyteidl::core::Partitions* ArtifactID::release_partitions() { + // @@protoc_insertion_point(field_release:flyteidl.core.ArtifactID.partitions) + + ::flyteidl::core::Partitions* temp = partitions_; + partitions_ = nullptr; + return temp; } inline ::flyteidl::core::Partitions* ArtifactID::mutable_partitions() { - if (!has_partitions()) { - clear_dimensions(); - set_has_partitions(); - dimensions_.partitions_ = CreateMaybeMessage< ::flyteidl::core::Partitions >( - GetArenaNoVirtual()); + + if (partitions_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::Partitions>(GetArenaNoVirtual()); + partitions_ = p; } // @@protoc_insertion_point(field_mutable:flyteidl.core.ArtifactID.partitions) - return dimensions_.partitions_; + return partitions_; +} +inline void ArtifactID::set_allocated_partitions(::flyteidl::core::Partitions* partitions) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete partitions_; + } + if (partitions) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + partitions = ::google::protobuf::internal::GetOwnedMessage( + message_arena, partitions, submessage_arena); + } + + } else { + + } + partitions_ = partitions; + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.ArtifactID.partitions) } -inline bool ArtifactID::has_dimensions() const { - return dimensions_case() != DIMENSIONS_NOT_SET; +// .flyteidl.core.TimePartition time_partition = 4; +inline bool ArtifactID::has_time_partition() const { + return this != internal_default_instance() && time_partition_ != nullptr; } -inline void ArtifactID::clear_has_dimensions() { - _oneof_case_[0] = DIMENSIONS_NOT_SET; +inline void ArtifactID::clear_time_partition() { + if (GetArenaNoVirtual() == nullptr && time_partition_ != nullptr) { + delete time_partition_; + } + time_partition_ = nullptr; } -inline ArtifactID::DimensionsCase ArtifactID::dimensions_case() const { - return ArtifactID::DimensionsCase(_oneof_case_[0]); +inline const ::flyteidl::core::TimePartition& ArtifactID::time_partition() const { + const ::flyteidl::core::TimePartition* p = time_partition_; + // @@protoc_insertion_point(field_get:flyteidl.core.ArtifactID.time_partition) + return p != nullptr ? *p : *reinterpret_cast( + &::flyteidl::core::_TimePartition_default_instance_); } +inline ::flyteidl::core::TimePartition* ArtifactID::release_time_partition() { + // @@protoc_insertion_point(field_release:flyteidl.core.ArtifactID.time_partition) + + ::flyteidl::core::TimePartition* temp = time_partition_; + time_partition_ = nullptr; + return temp; +} +inline ::flyteidl::core::TimePartition* ArtifactID::mutable_time_partition() { + + if (time_partition_ == nullptr) { + auto* p = CreateMaybeMessage<::flyteidl::core::TimePartition>(GetArenaNoVirtual()); + time_partition_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.core.ArtifactID.time_partition) + return time_partition_; +} +inline void ArtifactID::set_allocated_time_partition(::flyteidl::core::TimePartition* time_partition) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete time_partition_; + } + if (time_partition) { + ::google::protobuf::Arena* submessage_arena = nullptr; + if (message_arena != submessage_arena) { + time_partition = ::google::protobuf::internal::GetOwnedMessage( + message_arena, time_partition, submessage_arena); + } + + } else { + + } + time_partition_ = time_partition; + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.ArtifactID.time_partition) +} + // ------------------------------------------------------------------- // ArtifactTag @@ -2461,85 +2705,6 @@ inline void ArtifactQuery::clear_has_identifier() { inline ArtifactQuery::IdentifierCase ArtifactQuery::identifier_case() const { return ArtifactQuery::IdentifierCase(_oneof_case_[0]); } -// ------------------------------------------------------------------- - -// Trigger - -// .flyteidl.core.Identifier trigger_id = 1; -inline bool Trigger::has_trigger_id() const { - return this != internal_default_instance() && trigger_id_ != nullptr; -} -inline const ::flyteidl::core::Identifier& Trigger::trigger_id() const { - const ::flyteidl::core::Identifier* p = trigger_id_; - // @@protoc_insertion_point(field_get:flyteidl.core.Trigger.trigger_id) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_Identifier_default_instance_); -} -inline ::flyteidl::core::Identifier* Trigger::release_trigger_id() { - // @@protoc_insertion_point(field_release:flyteidl.core.Trigger.trigger_id) - - ::flyteidl::core::Identifier* temp = trigger_id_; - trigger_id_ = nullptr; - return temp; -} -inline ::flyteidl::core::Identifier* Trigger::mutable_trigger_id() { - - if (trigger_id_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::Identifier>(GetArenaNoVirtual()); - trigger_id_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.core.Trigger.trigger_id) - return trigger_id_; -} -inline void Trigger::set_allocated_trigger_id(::flyteidl::core::Identifier* trigger_id) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(trigger_id_); - } - if (trigger_id) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - trigger_id = ::google::protobuf::internal::GetOwnedMessage( - message_arena, trigger_id, submessage_arena); - } - - } else { - - } - trigger_id_ = trigger_id; - // @@protoc_insertion_point(field_set_allocated:flyteidl.core.Trigger.trigger_id) -} - -// repeated .flyteidl.core.ArtifactID triggers = 2; -inline int Trigger::triggers_size() const { - return triggers_.size(); -} -inline void Trigger::clear_triggers() { - triggers_.Clear(); -} -inline ::flyteidl::core::ArtifactID* Trigger::mutable_triggers(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.core.Trigger.triggers) - return triggers_.Mutable(index); -} -inline ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >* -Trigger::mutable_triggers() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.core.Trigger.triggers) - return &triggers_; -} -inline const ::flyteidl::core::ArtifactID& Trigger::triggers(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.core.Trigger.triggers) - return triggers_.Get(index); -} -inline ::flyteidl::core::ArtifactID* Trigger::add_triggers() { - // @@protoc_insertion_point(field_add:flyteidl.core.Trigger.triggers) - return triggers_.Add(); -} -inline const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >& -Trigger::triggers() const { - // @@protoc_insertion_point(field_list:flyteidl.core.Trigger.triggers) - return triggers_; -} - #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/catalog.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/core/catalog.pb.cc index e366a80acd..83c7bb3fe6 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/catalog.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/core/catalog.pb.cc @@ -145,18 +145,19 @@ const char descriptor_table_protodef_flyteidl_2fcore_2fcatalog_2eproto[] = "ogReservation\"\207\001\n\006Status\022\030\n\024RESERVATION_" "DISABLED\020\000\022\030\n\024RESERVATION_ACQUIRED\020\001\022\026\n\022" "RESERVATION_EXISTS\020\002\022\030\n\024RESERVATION_RELE" - "ASED\020\003\022\027\n\023RESERVATION_FAILURE\020\004*\240\001\n\022Cata" + "ASED\020\003\022\027\n\023RESERVATION_FAILURE\020\004*\263\001\n\022Cata" "logCacheStatus\022\022\n\016CACHE_DISABLED\020\000\022\016\n\nCA" "CHE_MISS\020\001\022\r\n\tCACHE_HIT\020\002\022\023\n\017CACHE_POPUL" "ATED\020\003\022\030\n\024CACHE_LOOKUP_FAILURE\020\004\022\025\n\021CACH" - "E_PUT_FAILURE\020\005\022\021\n\rCACHE_SKIPPED\020\006B::min(), CatalogCacheStatus_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::google::protobuf::int32>::max() }; bool CatalogCacheStatus_IsValid(int value); const CatalogCacheStatus CatalogCacheStatus_MIN = CACHE_DISABLED; -const CatalogCacheStatus CatalogCacheStatus_MAX = CACHE_SKIPPED; +const CatalogCacheStatus CatalogCacheStatus_MAX = CACHE_EVICTED; const int CatalogCacheStatus_ARRAYSIZE = CatalogCacheStatus_MAX + 1; const ::google::protobuf::EnumDescriptor* CatalogCacheStatus_descriptor(); diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.cc index 23adc9e27e..1e16dd7858 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.cc @@ -140,6 +140,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fidentifier_2eprot PROTOBUF_FIELD_OFFSET(::flyteidl::core::Identifier, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::Identifier, name_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::Identifier, version_), + PROTOBUF_FIELD_OFFSET(::flyteidl::core::Identifier, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::WorkflowExecutionIdentifier, _internal_metadata_), ~0u, // no _extensions_ @@ -148,6 +149,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fidentifier_2eprot PROTOBUF_FIELD_OFFSET(::flyteidl::core::WorkflowExecutionIdentifier, project_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::WorkflowExecutionIdentifier, domain_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::WorkflowExecutionIdentifier, name_), + PROTOBUF_FIELD_OFFSET(::flyteidl::core::WorkflowExecutionIdentifier, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::flyteidl::core::NodeExecutionIdentifier, _internal_metadata_), ~0u, // no _extensions_ @@ -173,10 +175,10 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fidentifier_2eprot }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::core::Identifier)}, - { 10, -1, sizeof(::flyteidl::core::WorkflowExecutionIdentifier)}, - { 18, -1, sizeof(::flyteidl::core::NodeExecutionIdentifier)}, - { 25, -1, sizeof(::flyteidl::core::TaskExecutionIdentifier)}, - { 33, -1, sizeof(::flyteidl::core::SignalIdentifier)}, + { 11, -1, sizeof(::flyteidl::core::WorkflowExecutionIdentifier)}, + { 20, -1, sizeof(::flyteidl::core::NodeExecutionIdentifier)}, + { 27, -1, sizeof(::flyteidl::core::TaskExecutionIdentifier)}, + { 35, -1, sizeof(::flyteidl::core::SignalIdentifier)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -195,30 +197,31 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fcore_2fidentifier_2eproto[] = "\n\036flyteidl/core/identifier.proto\022\rflytei" - "dl.core\"\200\001\n\nIdentifier\0222\n\rresource_type\030" + "dl.core\"\215\001\n\nIdentifier\0222\n\rresource_type\030" "\001 \001(\0162\033.flyteidl.core.ResourceType\022\017\n\007pr" "oject\030\002 \001(\t\022\016\n\006domain\030\003 \001(\t\022\014\n\004name\030\004 \001(" - "\t\022\017\n\007version\030\005 \001(\t\"L\n\033WorkflowExecutionI" - "dentifier\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001" - "(\t\022\014\n\004name\030\004 \001(\t\"l\n\027NodeExecutionIdentif" - "ier\022\017\n\007node_id\030\001 \001(\t\022@\n\014execution_id\030\002 \001" - "(\0132*.flyteidl.core.WorkflowExecutionIden" - "tifier\"\237\001\n\027TaskExecutionIdentifier\022*\n\007ta" - "sk_id\030\001 \001(\0132\031.flyteidl.core.Identifier\022A" - "\n\021node_execution_id\030\002 \001(\0132&.flyteidl.cor" - "e.NodeExecutionIdentifier\022\025\n\rretry_attem" - "pt\030\003 \001(\r\"g\n\020SignalIdentifier\022\021\n\tsignal_i" - "d\030\001 \001(\t\022@\n\014execution_id\030\002 \001(\0132*.flyteidl" - ".core.WorkflowExecutionIdentifier*U\n\014Res" - "ourceType\022\017\n\013UNSPECIFIED\020\000\022\010\n\004TASK\020\001\022\014\n\010" - "WORKFLOW\020\002\022\017\n\013LAUNCH_PLAN\020\003\022\013\n\007DATASET\020\004" - "B= 1900 Identifier::Identifier() @@ -291,6 +295,10 @@ Identifier::Identifier(const Identifier& from) if (from.version().size() > 0) { version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } resource_type_ = from.resource_type_; // @@protoc_insertion_point(copy_constructor:flyteidl.core.Identifier) } @@ -302,6 +310,7 @@ void Identifier::SharedCtor() { domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; } @@ -315,6 +324,7 @@ void Identifier::SharedDtor() { domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); version_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void Identifier::SetCachedSize(int size) const { @@ -336,6 +346,7 @@ void Identifier::Clear() { domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); resource_type_ = 0; _internal_metadata_.Clear(); } @@ -425,6 +436,22 @@ const char* Identifier::_InternalParse(const char* begin, const char* end, void* ptr += size; break; } + // string org = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.core.Identifier.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -533,6 +560,21 @@ bool Identifier::MergePartialFromCodedStream( break; } + // string org = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.core.Identifier.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -606,6 +648,16 @@ void Identifier::SerializeWithCachedSizes( 5, this->version(), output); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.core.Identifier.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 6, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -669,6 +721,17 @@ ::google::protobuf::uint8* Identifier::InternalSerializeWithCachedSizesToArray( 5, this->version(), target); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.core.Identifier.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 6, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -718,6 +781,13 @@ size_t Identifier::ByteSizeLong() const { this->version()); } + // string org = 6; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + // .flyteidl.core.ResourceType resource_type = 1; if (this->resource_type() != 0) { total_size += 1 + @@ -767,6 +837,10 @@ void Identifier::MergeFrom(const Identifier& from) { version_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.version_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } if (from.resource_type() != 0) { set_resource_type(from.resource_type()); } @@ -805,6 +879,8 @@ void Identifier::InternalSwap(Identifier* other) { GetArenaNoVirtual()); version_.Swap(&other->version_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); swap(resource_type_, other->resource_type_); } @@ -826,6 +902,7 @@ class WorkflowExecutionIdentifier::HasBitSetters { const int WorkflowExecutionIdentifier::kProjectFieldNumber; const int WorkflowExecutionIdentifier::kDomainFieldNumber; const int WorkflowExecutionIdentifier::kNameFieldNumber; +const int WorkflowExecutionIdentifier::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 WorkflowExecutionIdentifier::WorkflowExecutionIdentifier() @@ -849,6 +926,10 @@ WorkflowExecutionIdentifier::WorkflowExecutionIdentifier(const WorkflowExecution if (from.name().size() > 0) { name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } // @@protoc_insertion_point(copy_constructor:flyteidl.core.WorkflowExecutionIdentifier) } @@ -858,6 +939,7 @@ void WorkflowExecutionIdentifier::SharedCtor() { project_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } WorkflowExecutionIdentifier::~WorkflowExecutionIdentifier() { @@ -869,6 +951,7 @@ void WorkflowExecutionIdentifier::SharedDtor() { project_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void WorkflowExecutionIdentifier::SetCachedSize(int size) const { @@ -889,6 +972,7 @@ void WorkflowExecutionIdentifier::Clear() { project_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); name_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); _internal_metadata_.Clear(); } @@ -953,6 +1037,22 @@ const char* WorkflowExecutionIdentifier::_InternalParse(const char* begin, const ptr += size; break; } + // string org = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.core.WorkflowExecutionIdentifier.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -1032,6 +1132,21 @@ bool WorkflowExecutionIdentifier::MergePartialFromCodedStream( break; } + // string org = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.core.WorkflowExecutionIdentifier.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -1089,6 +1204,16 @@ void WorkflowExecutionIdentifier::SerializeWithCachedSizes( 4, this->name(), output); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.core.WorkflowExecutionIdentifier.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 5, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -1135,6 +1260,17 @@ ::google::protobuf::uint8* WorkflowExecutionIdentifier::InternalSerializeWithCac 4, this->name(), target); } + // string org = 5; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.core.WorkflowExecutionIdentifier.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 5, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -1177,6 +1313,13 @@ size_t WorkflowExecutionIdentifier::ByteSizeLong() const { this->name()); } + // string org = 5; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -1216,6 +1359,10 @@ void WorkflowExecutionIdentifier::MergeFrom(const WorkflowExecutionIdentifier& f name_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.name_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } } void WorkflowExecutionIdentifier::CopyFrom(const ::google::protobuf::Message& from) { @@ -1249,6 +1396,8 @@ void WorkflowExecutionIdentifier::InternalSwap(WorkflowExecutionIdentifier* othe GetArenaNoVirtual()); name_.Swap(&other->name_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); } ::google::protobuf::Metadata WorkflowExecutionIdentifier::GetMetadata() const { diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.h b/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.h index d3ee554d5a..efcc339874 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/core/identifier.pb.h @@ -257,6 +257,20 @@ class Identifier final : ::std::string* release_version(); void set_allocated_version(::std::string* version); + // string org = 6; + void clear_org(); + static const int kOrgFieldNumber = 6; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // .flyteidl.core.ResourceType resource_type = 1; void clear_resource_type(); static const int kResourceTypeFieldNumber = 1; @@ -272,6 +286,7 @@ class Identifier final : ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr name_; ::google::protobuf::internal::ArenaStringPtr version_; + ::google::protobuf::internal::ArenaStringPtr org_; int resource_type_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fcore_2fidentifier_2eproto; @@ -415,6 +430,20 @@ class WorkflowExecutionIdentifier final : ::std::string* release_name(); void set_allocated_name(::std::string* name); + // string org = 5; + void clear_org(); + static const int kOrgFieldNumber = 5; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // @@protoc_insertion_point(class_scope:flyteidl.core.WorkflowExecutionIdentifier) private: class HasBitSetters; @@ -423,6 +452,7 @@ class WorkflowExecutionIdentifier final : ::google::protobuf::internal::ArenaStringPtr project_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr name_; + ::google::protobuf::internal::ArenaStringPtr org_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fcore_2fidentifier_2eproto; }; @@ -1055,6 +1085,59 @@ inline void Identifier::set_allocated_version(::std::string* version) { // @@protoc_insertion_point(field_set_allocated:flyteidl.core.Identifier.version) } +// string org = 6; +inline void Identifier::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& Identifier::org() const { + // @@protoc_insertion_point(field_get:flyteidl.core.Identifier.org) + return org_.GetNoArena(); +} +inline void Identifier::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.core.Identifier.org) +} +#if LANG_CXX11 +inline void Identifier::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.core.Identifier.org) +} +#endif +inline void Identifier::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.core.Identifier.org) +} +inline void Identifier::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.core.Identifier.org) +} +inline ::std::string* Identifier::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.core.Identifier.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* Identifier::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.core.Identifier.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void Identifier::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.Identifier.org) +} + // ------------------------------------------------------------------- // WorkflowExecutionIdentifier @@ -1218,6 +1301,59 @@ inline void WorkflowExecutionIdentifier::set_allocated_name(::std::string* name) // @@protoc_insertion_point(field_set_allocated:flyteidl.core.WorkflowExecutionIdentifier.name) } +// string org = 5; +inline void WorkflowExecutionIdentifier::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& WorkflowExecutionIdentifier::org() const { + // @@protoc_insertion_point(field_get:flyteidl.core.WorkflowExecutionIdentifier.org) + return org_.GetNoArena(); +} +inline void WorkflowExecutionIdentifier::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.core.WorkflowExecutionIdentifier.org) +} +#if LANG_CXX11 +inline void WorkflowExecutionIdentifier::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.core.WorkflowExecutionIdentifier.org) +} +#endif +inline void WorkflowExecutionIdentifier::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.core.WorkflowExecutionIdentifier.org) +} +inline void WorkflowExecutionIdentifier::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.core.WorkflowExecutionIdentifier.org) +} +inline ::std::string* WorkflowExecutionIdentifier::mutable_org() { + + // @@protoc_insertion_point(field_mutable:flyteidl.core.WorkflowExecutionIdentifier.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* WorkflowExecutionIdentifier::release_org() { + // @@protoc_insertion_point(field_release:flyteidl.core.WorkflowExecutionIdentifier.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void WorkflowExecutionIdentifier::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.WorkflowExecutionIdentifier.org) +} + // ------------------------------------------------------------------- // NodeExecutionIdentifier diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/interface.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/core/interface.pb.cc index 96e2d13b02..3304c00c25 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/interface.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/core/interface.pb.cc @@ -16,8 +16,8 @@ // @@protoc_insertion_point(includes) #include -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactTag_flyteidl_2fcore_2fartifact_5fid_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactQuery_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2finterface_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_ParameterMap_ParametersEntry_DoNotUse_flyteidl_2fcore_2finterface_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2finterface_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_VariableMap_VariablesEntry_DoNotUse_flyteidl_2fcore_2finterface_2eproto; diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.cc index cc19b46d3c..c96ae09bdb 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.cc @@ -20,6 +20,7 @@ extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::p extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_NodeExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_TaskExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fmetrics_2eproto ::google::protobuf::internal::SCCInfo<4> scc_info_Span_flyteidl_2fcore_2fmetrics_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2fstruct_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto; extern PROTOBUF_INTERNAL_EXPORT_google_2fprotobuf_2ftimestamp_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Timestamp_google_2fprotobuf_2ftimestamp_2eproto; namespace flyteidl { namespace core { @@ -31,6 +32,10 @@ class SpanDefaultTypeInternal { const ::flyteidl::core::TaskExecutionIdentifier* task_id_; ::google::protobuf::internal::ArenaStringPtr operation_id_; } _Span_default_instance_; +class ExecutionMetricResultDefaultTypeInternal { + public: + ::google::protobuf::internal::ExplicitlyConstructed _instance; +} _ExecutionMetricResult_default_instance_; } // namespace core } // namespace flyteidl static void InitDefaultsSpan_flyteidl_2fcore_2fmetrics_2eproto() { @@ -51,11 +56,27 @@ ::google::protobuf::internal::SCCInfo<4> scc_info_Span_flyteidl_2fcore_2fmetrics &scc_info_NodeExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base, &scc_info_TaskExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base,}}; +static void InitDefaultsExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto() { + GOOGLE_PROTOBUF_VERIFY_VERSION; + + { + void* ptr = &::flyteidl::core::_ExecutionMetricResult_default_instance_; + new (ptr) ::flyteidl::core::ExecutionMetricResult(); + ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); + } + ::flyteidl::core::ExecutionMetricResult::InitAsDefaultInstance(); +} + +::google::protobuf::internal::SCCInfo<1> scc_info_ExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto}, { + &scc_info_ListValue_google_2fprotobuf_2fstruct_2eproto.base,}}; + void InitDefaults_flyteidl_2fcore_2fmetrics_2eproto() { ::google::protobuf::internal::InitSCC(&scc_info_Span_flyteidl_2fcore_2fmetrics_2eproto.base); + ::google::protobuf::internal::InitSCC(&scc_info_ExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto.base); } -::google::protobuf::Metadata file_level_metadata_flyteidl_2fcore_2fmetrics_2eproto[1]; +::google::protobuf::Metadata file_level_metadata_flyteidl_2fcore_2fmetrics_2eproto[2]; constexpr ::google::protobuf::EnumDescriptor const** file_level_enum_descriptors_flyteidl_2fcore_2fmetrics_2eproto = nullptr; constexpr ::google::protobuf::ServiceDescriptor const** file_level_service_descriptors_flyteidl_2fcore_2fmetrics_2eproto = nullptr; @@ -73,50 +94,63 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fcore_2fmetrics_2eproto:: offsetof(::flyteidl::core::SpanDefaultTypeInternal, operation_id_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::Span, spans_), PROTOBUF_FIELD_OFFSET(::flyteidl::core::Span, id_), + ~0u, // no _has_bits_ + PROTOBUF_FIELD_OFFSET(::flyteidl::core::ExecutionMetricResult, _internal_metadata_), + ~0u, // no _extensions_ + ~0u, // no _oneof_case_ + ~0u, // no _weak_field_map_ + PROTOBUF_FIELD_OFFSET(::flyteidl::core::ExecutionMetricResult, metric_), + PROTOBUF_FIELD_OFFSET(::flyteidl::core::ExecutionMetricResult, data_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::core::Span)}, + { 13, -1, sizeof(::flyteidl::core::ExecutionMetricResult)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast(&::flyteidl::core::_Span_default_instance_), + reinterpret_cast(&::flyteidl::core::_ExecutionMetricResult_default_instance_), }; ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_flyteidl_2fcore_2fmetrics_2eproto = { {}, AddDescriptors_flyteidl_2fcore_2fmetrics_2eproto, "flyteidl/core/metrics.proto", schemas, file_default_instances, TableStruct_flyteidl_2fcore_2fmetrics_2eproto::offsets, - file_level_metadata_flyteidl_2fcore_2fmetrics_2eproto, 1, file_level_enum_descriptors_flyteidl_2fcore_2fmetrics_2eproto, file_level_service_descriptors_flyteidl_2fcore_2fmetrics_2eproto, + file_level_metadata_flyteidl_2fcore_2fmetrics_2eproto, 2, file_level_enum_descriptors_flyteidl_2fcore_2fmetrics_2eproto, file_level_service_descriptors_flyteidl_2fcore_2fmetrics_2eproto, }; const char descriptor_table_protodef_flyteidl_2fcore_2fmetrics_2eproto[] = "\n\033flyteidl/core/metrics.proto\022\rflyteidl." "core\032\036flyteidl/core/identifier.proto\032\037go" - "ogle/protobuf/timestamp.proto\"\337\002\n\004Span\022." - "\n\nstart_time\030\001 \001(\0132\032.google.protobuf.Tim" - "estamp\022,\n\010end_time\030\002 \001(\0132\032.google.protob" - "uf.Timestamp\022A\n\013workflow_id\030\003 \001(\0132*.flyt" - "eidl.core.WorkflowExecutionIdentifierH\000\022" - "9\n\007node_id\030\004 \001(\0132&.flyteidl.core.NodeExe" - "cutionIdentifierH\000\0229\n\007task_id\030\005 \001(\0132&.fl" - "yteidl.core.TaskExecutionIdentifierH\000\022\026\n" - "\014operation_id\030\006 \001(\tH\000\022\"\n\005spans\030\007 \003(\0132\023.f" - "lyteidl.core.SpanB\004\n\002idBdata_ = const_cast< ::google::protobuf::Struct*>( + ::google::protobuf::Struct::internal_default_instance()); +} +class ExecutionMetricResult::HasBitSetters { + public: + static const ::google::protobuf::Struct& data(const ExecutionMetricResult* msg); +}; + +const ::google::protobuf::Struct& +ExecutionMetricResult::HasBitSetters::data(const ExecutionMetricResult* msg) { + return *msg->data_; +} +void ExecutionMetricResult::clear_data() { + if (GetArenaNoVirtual() == nullptr && data_ != nullptr) { + delete data_; + } + data_ = nullptr; +} +#if !defined(_MSC_VER) || _MSC_VER >= 1900 +const int ExecutionMetricResult::kMetricFieldNumber; +const int ExecutionMetricResult::kDataFieldNumber; +#endif // !defined(_MSC_VER) || _MSC_VER >= 1900 + +ExecutionMetricResult::ExecutionMetricResult() + : ::google::protobuf::Message(), _internal_metadata_(nullptr) { + SharedCtor(); + // @@protoc_insertion_point(constructor:flyteidl.core.ExecutionMetricResult) +} +ExecutionMetricResult::ExecutionMetricResult(const ExecutionMetricResult& from) + : ::google::protobuf::Message(), + _internal_metadata_(nullptr) { + _internal_metadata_.MergeFrom(from._internal_metadata_); + metric_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.metric().size() > 0) { + metric_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.metric_); + } + if (from.has_data()) { + data_ = new ::google::protobuf::Struct(*from.data_); + } else { + data_ = nullptr; + } + // @@protoc_insertion_point(copy_constructor:flyteidl.core.ExecutionMetricResult) +} + +void ExecutionMetricResult::SharedCtor() { + ::google::protobuf::internal::InitSCC( + &scc_info_ExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto.base); + metric_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + data_ = nullptr; +} + +ExecutionMetricResult::~ExecutionMetricResult() { + // @@protoc_insertion_point(destructor:flyteidl.core.ExecutionMetricResult) + SharedDtor(); +} + +void ExecutionMetricResult::SharedDtor() { + metric_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (this != internal_default_instance()) delete data_; +} + +void ExecutionMetricResult::SetCachedSize(int size) const { + _cached_size_.Set(size); +} +const ExecutionMetricResult& ExecutionMetricResult::default_instance() { + ::google::protobuf::internal::InitSCC(&::scc_info_ExecutionMetricResult_flyteidl_2fcore_2fmetrics_2eproto.base); + return *internal_default_instance(); +} + + +void ExecutionMetricResult::Clear() { +// @@protoc_insertion_point(message_clear_start:flyteidl.core.ExecutionMetricResult) + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + metric_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (GetArenaNoVirtual() == nullptr && data_ != nullptr) { + delete data_; + } + data_ = nullptr; + _internal_metadata_.Clear(); +} + +#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +const char* ExecutionMetricResult::_InternalParse(const char* begin, const char* end, void* object, + ::google::protobuf::internal::ParseContext* ctx) { + auto msg = static_cast(object); + ::google::protobuf::int32 size; (void)size; + int depth; (void)depth; + ::google::protobuf::uint32 tag; + ::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end; + auto ptr = begin; + while (ptr < end) { + ptr = ::google::protobuf::io::Parse32(ptr, &tag); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + switch (tag >> 3) { + // string metric = 1; + case 1: { + if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("flyteidl.core.ExecutionMetricResult.metric"); + object = msg->mutable_metric(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } + // .google.protobuf.Struct data = 2; + case 2: { + if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + parser_till_end = ::google::protobuf::Struct::_InternalParse; + object = msg->mutable_data(); + if (size > end - ptr) goto len_delim_till_end; + ptr += size; + GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( + {parser_till_end, object}, ptr - size, ptr)); + break; + } + default: { + handle_unusual: + if ((tag & 7) == 4 || tag == 0) { + ctx->EndGroup(tag); + return ptr; + } + auto res = UnknownFieldParse(tag, {_InternalParse, msg}, + ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx); + ptr = res.first; + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr); + if (res.second) return ptr; + } + } // switch + } // while + return ptr; +string_till_end: + static_cast<::std::string*>(object)->clear(); + static_cast<::std::string*>(object)->reserve(size); + goto len_delim_till_end; +len_delim_till_end: + return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg}, + {parser_till_end, object}, size); +} +#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER +bool ExecutionMetricResult::MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) { +#define DO_(EXPRESSION) if (!PROTOBUF_PREDICT_TRUE(EXPRESSION)) goto failure + ::google::protobuf::uint32 tag; + // @@protoc_insertion_point(parse_start:flyteidl.core.ExecutionMetricResult) + for (;;) { + ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); + tag = p.first; + if (!p.second) goto handle_unusual; + switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { + // string metric = 1; + case 1: { + if (static_cast< ::google::protobuf::uint8>(tag) == (10 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_metric())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->metric().data(), static_cast(this->metric().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "flyteidl.core.ExecutionMetricResult.metric")); + } else { + goto handle_unusual; + } + break; + } + + // .google.protobuf.Struct data = 2; + case 2: { + if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( + input, mutable_data())); + } else { + goto handle_unusual; + } + break; + } + + default: { + handle_unusual: + if (tag == 0) { + goto success; + } + DO_(::google::protobuf::internal::WireFormat::SkipField( + input, tag, _internal_metadata_.mutable_unknown_fields())); + break; + } + } + } +success: + // @@protoc_insertion_point(parse_success:flyteidl.core.ExecutionMetricResult) + return true; +failure: + // @@protoc_insertion_point(parse_failure:flyteidl.core.ExecutionMetricResult) + return false; +#undef DO_ +} +#endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + +void ExecutionMetricResult::SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const { + // @@protoc_insertion_point(serialize_start:flyteidl.core.ExecutionMetricResult) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string metric = 1; + if (this->metric().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->metric().data(), static_cast(this->metric().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.core.ExecutionMetricResult.metric"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 1, this->metric(), output); + } + + // .google.protobuf.Struct data = 2; + if (this->has_data()) { + ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( + 2, HasBitSetters::data(this), output); + } + + if (_internal_metadata_.have_unknown_fields()) { + ::google::protobuf::internal::WireFormat::SerializeUnknownFields( + _internal_metadata_.unknown_fields(), output); + } + // @@protoc_insertion_point(serialize_end:flyteidl.core.ExecutionMetricResult) +} + +::google::protobuf::uint8* ExecutionMetricResult::InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const { + // @@protoc_insertion_point(serialize_to_array_start:flyteidl.core.ExecutionMetricResult) + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + // string metric = 1; + if (this->metric().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->metric().data(), static_cast(this->metric().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "flyteidl.core.ExecutionMetricResult.metric"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 1, this->metric(), target); + } + + // .google.protobuf.Struct data = 2; + if (this->has_data()) { + target = ::google::protobuf::internal::WireFormatLite:: + InternalWriteMessageToArray( + 2, HasBitSetters::data(this), target); + } + + if (_internal_metadata_.have_unknown_fields()) { + target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( + _internal_metadata_.unknown_fields(), target); + } + // @@protoc_insertion_point(serialize_to_array_end:flyteidl.core.ExecutionMetricResult) + return target; +} + +size_t ExecutionMetricResult::ByteSizeLong() const { +// @@protoc_insertion_point(message_byte_size_start:flyteidl.core.ExecutionMetricResult) + size_t total_size = 0; + + if (_internal_metadata_.have_unknown_fields()) { + total_size += + ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( + _internal_metadata_.unknown_fields()); + } + ::google::protobuf::uint32 cached_has_bits = 0; + // Prevent compiler warnings about cached_has_bits being unused + (void) cached_has_bits; + + // string metric = 1; + if (this->metric().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->metric()); + } + + // .google.protobuf.Struct data = 2; + if (this->has_data()) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::MessageSize( + *data_); + } + + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); + SetCachedSize(cached_size); + return total_size; +} + +void ExecutionMetricResult::MergeFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_merge_from_start:flyteidl.core.ExecutionMetricResult) + GOOGLE_DCHECK_NE(&from, this); + const ExecutionMetricResult* source = + ::google::protobuf::DynamicCastToGenerated( + &from); + if (source == nullptr) { + // @@protoc_insertion_point(generalized_merge_from_cast_fail:flyteidl.core.ExecutionMetricResult) + ::google::protobuf::internal::ReflectionOps::Merge(from, this); + } else { + // @@protoc_insertion_point(generalized_merge_from_cast_success:flyteidl.core.ExecutionMetricResult) + MergeFrom(*source); + } +} + +void ExecutionMetricResult::MergeFrom(const ExecutionMetricResult& from) { +// @@protoc_insertion_point(class_specific_merge_from_start:flyteidl.core.ExecutionMetricResult) + GOOGLE_DCHECK_NE(&from, this); + _internal_metadata_.MergeFrom(from._internal_metadata_); + ::google::protobuf::uint32 cached_has_bits = 0; + (void) cached_has_bits; + + if (from.metric().size() > 0) { + + metric_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.metric_); + } + if (from.has_data()) { + mutable_data()->::google::protobuf::Struct::MergeFrom(from.data()); + } +} + +void ExecutionMetricResult::CopyFrom(const ::google::protobuf::Message& from) { +// @@protoc_insertion_point(generalized_copy_from_start:flyteidl.core.ExecutionMetricResult) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +void ExecutionMetricResult::CopyFrom(const ExecutionMetricResult& from) { +// @@protoc_insertion_point(class_specific_copy_from_start:flyteidl.core.ExecutionMetricResult) + if (&from == this) return; + Clear(); + MergeFrom(from); +} + +bool ExecutionMetricResult::IsInitialized() const { + return true; +} + +void ExecutionMetricResult::Swap(ExecutionMetricResult* other) { + if (other == this) return; + InternalSwap(other); +} +void ExecutionMetricResult::InternalSwap(ExecutionMetricResult* other) { + using std::swap; + _internal_metadata_.Swap(&other->_internal_metadata_); + metric_.Swap(&other->metric_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); + swap(data_, other->data_); +} + +::google::protobuf::Metadata ExecutionMetricResult::GetMetadata() const { + ::google::protobuf::internal::AssignDescriptors(&::assign_descriptors_table_flyteidl_2fcore_2fmetrics_2eproto); + return ::file_level_metadata_flyteidl_2fcore_2fmetrics_2eproto[kIndexInFileMessages]; +} + + // @@protoc_insertion_point(namespace_scope) } // namespace core } // namespace flyteidl @@ -923,6 +1327,9 @@ namespace protobuf { template<> PROTOBUF_NOINLINE ::flyteidl::core::Span* Arena::CreateMaybeMessage< ::flyteidl::core::Span >(Arena* arena) { return Arena::CreateInternal< ::flyteidl::core::Span >(arena); } +template<> PROTOBUF_NOINLINE ::flyteidl::core::ExecutionMetricResult* Arena::CreateMaybeMessage< ::flyteidl::core::ExecutionMetricResult >(Arena* arena) { + return Arena::CreateInternal< ::flyteidl::core::ExecutionMetricResult >(arena); +} } // namespace protobuf } // namespace google diff --git a/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.h b/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.h index c19d6f944f..8c7140c273 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/core/metrics.pb.h @@ -33,6 +33,7 @@ #include #include "flyteidl/core/identifier.pb.h" #include +#include // @@protoc_insertion_point(includes) #include #define PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fmetrics_2eproto @@ -43,7 +44,7 @@ struct TableStruct_flyteidl_2fcore_2fmetrics_2eproto { PROTOBUF_SECTION_VARIABLE(protodesc_cold); static const ::google::protobuf::internal::AuxillaryParseTableField aux[] PROTOBUF_SECTION_VARIABLE(protodesc_cold); - static const ::google::protobuf::internal::ParseTable schema[1] + static const ::google::protobuf::internal::ParseTable schema[2] PROTOBUF_SECTION_VARIABLE(protodesc_cold); static const ::google::protobuf::internal::FieldMetadata field_metadata[]; static const ::google::protobuf::internal::SerializationTable serialization_table[]; @@ -52,6 +53,9 @@ struct TableStruct_flyteidl_2fcore_2fmetrics_2eproto { void AddDescriptors_flyteidl_2fcore_2fmetrics_2eproto(); namespace flyteidl { namespace core { +class ExecutionMetricResult; +class ExecutionMetricResultDefaultTypeInternal; +extern ExecutionMetricResultDefaultTypeInternal _ExecutionMetricResult_default_instance_; class Span; class SpanDefaultTypeInternal; extern SpanDefaultTypeInternal _Span_default_instance_; @@ -59,6 +63,7 @@ extern SpanDefaultTypeInternal _Span_default_instance_; } // namespace flyteidl namespace google { namespace protobuf { +template<> ::flyteidl::core::ExecutionMetricResult* Arena::CreateMaybeMessage<::flyteidl::core::ExecutionMetricResult>(Arena*); template<> ::flyteidl::core::Span* Arena::CreateMaybeMessage<::flyteidl::core::Span>(Arena*); } // namespace protobuf } // namespace google @@ -273,6 +278,136 @@ class Span final : friend struct ::TableStruct_flyteidl_2fcore_2fmetrics_2eproto; }; +// ------------------------------------------------------------------- + +class ExecutionMetricResult final : + public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:flyteidl.core.ExecutionMetricResult) */ { + public: + ExecutionMetricResult(); + virtual ~ExecutionMetricResult(); + + ExecutionMetricResult(const ExecutionMetricResult& from); + + inline ExecutionMetricResult& operator=(const ExecutionMetricResult& from) { + CopyFrom(from); + return *this; + } + #if LANG_CXX11 + ExecutionMetricResult(ExecutionMetricResult&& from) noexcept + : ExecutionMetricResult() { + *this = ::std::move(from); + } + + inline ExecutionMetricResult& operator=(ExecutionMetricResult&& from) noexcept { + if (GetArenaNoVirtual() == from.GetArenaNoVirtual()) { + if (this != &from) InternalSwap(&from); + } else { + CopyFrom(from); + } + return *this; + } + #endif + static const ::google::protobuf::Descriptor* descriptor() { + return default_instance().GetDescriptor(); + } + static const ExecutionMetricResult& default_instance(); + + static void InitAsDefaultInstance(); // FOR INTERNAL USE ONLY + static inline const ExecutionMetricResult* internal_default_instance() { + return reinterpret_cast( + &_ExecutionMetricResult_default_instance_); + } + static constexpr int kIndexInFileMessages = + 1; + + void Swap(ExecutionMetricResult* other); + friend void swap(ExecutionMetricResult& a, ExecutionMetricResult& b) { + a.Swap(&b); + } + + // implements Message ---------------------------------------------- + + inline ExecutionMetricResult* New() const final { + return CreateMaybeMessage(nullptr); + } + + ExecutionMetricResult* New(::google::protobuf::Arena* arena) const final { + return CreateMaybeMessage(arena); + } + void CopyFrom(const ::google::protobuf::Message& from) final; + void MergeFrom(const ::google::protobuf::Message& from) final; + void CopyFrom(const ExecutionMetricResult& from); + void MergeFrom(const ExecutionMetricResult& from); + PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final; + bool IsInitialized() const final; + + size_t ByteSizeLong() const final; + #if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx); + ::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; } + #else + bool MergePartialFromCodedStream( + ::google::protobuf::io::CodedInputStream* input) final; + #endif // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER + void SerializeWithCachedSizes( + ::google::protobuf::io::CodedOutputStream* output) const final; + ::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray( + ::google::protobuf::uint8* target) const final; + int GetCachedSize() const final { return _cached_size_.Get(); } + + private: + void SharedCtor(); + void SharedDtor(); + void SetCachedSize(int size) const final; + void InternalSwap(ExecutionMetricResult* other); + private: + inline ::google::protobuf::Arena* GetArenaNoVirtual() const { + return nullptr; + } + inline void* MaybeArenaPtr() const { + return nullptr; + } + public: + + ::google::protobuf::Metadata GetMetadata() const final; + + // nested types ---------------------------------------------------- + + // accessors ------------------------------------------------------- + + // string metric = 1; + void clear_metric(); + static const int kMetricFieldNumber = 1; + const ::std::string& metric() const; + void set_metric(const ::std::string& value); + #if LANG_CXX11 + void set_metric(::std::string&& value); + #endif + void set_metric(const char* value); + void set_metric(const char* value, size_t size); + ::std::string* mutable_metric(); + ::std::string* release_metric(); + void set_allocated_metric(::std::string* metric); + + // .google.protobuf.Struct data = 2; + bool has_data() const; + void clear_data(); + static const int kDataFieldNumber = 2; + const ::google::protobuf::Struct& data() const; + ::google::protobuf::Struct* release_data(); + ::google::protobuf::Struct* mutable_data(); + void set_allocated_data(::google::protobuf::Struct* data); + + // @@protoc_insertion_point(class_scope:flyteidl.core.ExecutionMetricResult) + private: + class HasBitSetters; + + ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; + ::google::protobuf::internal::ArenaStringPtr metric_; + ::google::protobuf::Struct* data_; + mutable ::google::protobuf::internal::CachedSize _cached_size_; + friend struct ::TableStruct_flyteidl_2fcore_2fmetrics_2eproto; +}; // =================================================================== @@ -612,9 +747,114 @@ inline void Span::clear_has_id() { inline Span::IdCase Span::id_case() const { return Span::IdCase(_oneof_case_[0]); } +// ------------------------------------------------------------------- + +// ExecutionMetricResult + +// string metric = 1; +inline void ExecutionMetricResult::clear_metric() { + metric_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& ExecutionMetricResult::metric() const { + // @@protoc_insertion_point(field_get:flyteidl.core.ExecutionMetricResult.metric) + return metric_.GetNoArena(); +} +inline void ExecutionMetricResult::set_metric(const ::std::string& value) { + + metric_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:flyteidl.core.ExecutionMetricResult.metric) +} +#if LANG_CXX11 +inline void ExecutionMetricResult::set_metric(::std::string&& value) { + + metric_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:flyteidl.core.ExecutionMetricResult.metric) +} +#endif +inline void ExecutionMetricResult::set_metric(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + metric_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:flyteidl.core.ExecutionMetricResult.metric) +} +inline void ExecutionMetricResult::set_metric(const char* value, size_t size) { + + metric_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:flyteidl.core.ExecutionMetricResult.metric) +} +inline ::std::string* ExecutionMetricResult::mutable_metric() { + + // @@protoc_insertion_point(field_mutable:flyteidl.core.ExecutionMetricResult.metric) + return metric_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* ExecutionMetricResult::release_metric() { + // @@protoc_insertion_point(field_release:flyteidl.core.ExecutionMetricResult.metric) + + return metric_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void ExecutionMetricResult::set_allocated_metric(::std::string* metric) { + if (metric != nullptr) { + + } else { + + } + metric_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), metric); + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.ExecutionMetricResult.metric) +} + +// .google.protobuf.Struct data = 2; +inline bool ExecutionMetricResult::has_data() const { + return this != internal_default_instance() && data_ != nullptr; +} +inline const ::google::protobuf::Struct& ExecutionMetricResult::data() const { + const ::google::protobuf::Struct* p = data_; + // @@protoc_insertion_point(field_get:flyteidl.core.ExecutionMetricResult.data) + return p != nullptr ? *p : *reinterpret_cast( + &::google::protobuf::_Struct_default_instance_); +} +inline ::google::protobuf::Struct* ExecutionMetricResult::release_data() { + // @@protoc_insertion_point(field_release:flyteidl.core.ExecutionMetricResult.data) + + ::google::protobuf::Struct* temp = data_; + data_ = nullptr; + return temp; +} +inline ::google::protobuf::Struct* ExecutionMetricResult::mutable_data() { + + if (data_ == nullptr) { + auto* p = CreateMaybeMessage<::google::protobuf::Struct>(GetArenaNoVirtual()); + data_ = p; + } + // @@protoc_insertion_point(field_mutable:flyteidl.core.ExecutionMetricResult.data) + return data_; +} +inline void ExecutionMetricResult::set_allocated_data(::google::protobuf::Struct* data) { + ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); + if (message_arena == nullptr) { + delete reinterpret_cast< ::google::protobuf::MessageLite*>(data_); + } + if (data) { + ::google::protobuf::Arena* submessage_arena = + reinterpret_cast<::google::protobuf::MessageLite*>(data)->GetArena(); + if (message_arena != submessage_arena) { + data = ::google::protobuf::internal::GetOwnedMessage( + message_arena, data, submessage_arena); + } + + } else { + + } + data_ = data; + // @@protoc_insertion_point(field_set_allocated:flyteidl.core.ExecutionMetricResult.data) +} + #ifdef __GNUC__ #pragma GCC diagnostic pop #endif // __GNUC__ +// ------------------------------------------------------------------- + // @@protoc_insertion_point(namespace_scope) diff --git a/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.cc index dbd20da087..1ce63673cb 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.cc @@ -200,6 +200,7 @@ class DatasetPropertyFilterDefaultTypeInternal { ::google::protobuf::internal::ArenaStringPtr name_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr version_; + ::google::protobuf::internal::ArenaStringPtr org_; } _DatasetPropertyFilter_default_instance_; class PaginationOptionsDefaultTypeInternal { public: @@ -1000,6 +1001,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fdatacatalog_2fdatacatalo PROTOBUF_FIELD_OFFSET(::datacatalog::DatasetID, domain_), PROTOBUF_FIELD_OFFSET(::datacatalog::DatasetID, version_), PROTOBUF_FIELD_OFFSET(::datacatalog::DatasetID, uuid_), + PROTOBUF_FIELD_OFFSET(::datacatalog::DatasetID, org_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::datacatalog::Artifact, _internal_metadata_), ~0u, // no _extensions_ @@ -1096,6 +1098,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fdatacatalog_2fdatacatalo offsetof(::datacatalog::DatasetPropertyFilterDefaultTypeInternal, name_), offsetof(::datacatalog::DatasetPropertyFilterDefaultTypeInternal, domain_), offsetof(::datacatalog::DatasetPropertyFilterDefaultTypeInternal, version_), + offsetof(::datacatalog::DatasetPropertyFilterDefaultTypeInternal, org_), PROTOBUF_FIELD_OFFSET(::datacatalog::DatasetPropertyFilter, property_), ~0u, // no _has_bits_ PROTOBUF_FIELD_OFFSET(::datacatalog::PaginationOptions, _internal_metadata_), @@ -1133,19 +1136,19 @@ static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SE { 149, -1, sizeof(::datacatalog::Dataset)}, { 157, -1, sizeof(::datacatalog::Partition)}, { 164, -1, sizeof(::datacatalog::DatasetID)}, - { 174, -1, sizeof(::datacatalog::Artifact)}, - { 186, -1, sizeof(::datacatalog::ArtifactData)}, - { 193, -1, sizeof(::datacatalog::Tag)}, - { 201, 208, sizeof(::datacatalog::Metadata_KeyMapEntry_DoNotUse)}, - { 210, -1, sizeof(::datacatalog::Metadata)}, - { 216, -1, sizeof(::datacatalog::FilterExpression)}, - { 222, -1, sizeof(::datacatalog::SinglePropertyFilter)}, - { 233, -1, sizeof(::datacatalog::ArtifactPropertyFilter)}, - { 240, -1, sizeof(::datacatalog::TagPropertyFilter)}, - { 247, -1, sizeof(::datacatalog::PartitionPropertyFilter)}, - { 254, -1, sizeof(::datacatalog::KeyValuePair)}, - { 261, -1, sizeof(::datacatalog::DatasetPropertyFilter)}, - { 271, -1, sizeof(::datacatalog::PaginationOptions)}, + { 175, -1, sizeof(::datacatalog::Artifact)}, + { 187, -1, sizeof(::datacatalog::ArtifactData)}, + { 194, -1, sizeof(::datacatalog::Tag)}, + { 202, 209, sizeof(::datacatalog::Metadata_KeyMapEntry_DoNotUse)}, + { 211, -1, sizeof(::datacatalog::Metadata)}, + { 217, -1, sizeof(::datacatalog::FilterExpression)}, + { 223, -1, sizeof(::datacatalog::SinglePropertyFilter)}, + { 234, -1, sizeof(::datacatalog::ArtifactPropertyFilter)}, + { 241, -1, sizeof(::datacatalog::TagPropertyFilter)}, + { 248, -1, sizeof(::datacatalog::PartitionPropertyFilter)}, + { 255, -1, sizeof(::datacatalog::KeyValuePair)}, + { 262, -1, sizeof(::datacatalog::DatasetPropertyFilter)}, + { 273, -1, sizeof(::datacatalog::PaginationOptions)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -1252,78 +1255,79 @@ const char descriptor_table_protodef_flyteidl_2fdatacatalog_2fdatacatalog_2eprot "datacatalog.DatasetID\022\'\n\010metadata\030\002 \001(\0132" "\025.datacatalog.Metadata\022\025\n\rpartitionKeys\030" "\003 \003(\t\"\'\n\tPartition\022\013\n\003key\030\001 \001(\t\022\r\n\005value" - "\030\002 \001(\t\"Y\n\tDatasetID\022\017\n\007project\030\001 \001(\t\022\014\n\004" + "\030\002 \001(\t\"f\n\tDatasetID\022\017\n\007project\030\001 \001(\t\022\014\n\004" "name\030\002 \001(\t\022\016\n\006domain\030\003 \001(\t\022\017\n\007version\030\004 " - "\001(\t\022\014\n\004UUID\030\005 \001(\t\"\215\002\n\010Artifact\022\n\n\002id\030\001 \001" - "(\t\022\'\n\007dataset\030\002 \001(\0132\026.datacatalog.Datase" - "tID\022\'\n\004data\030\003 \003(\0132\031.datacatalog.Artifact" - "Data\022\'\n\010metadata\030\004 \001(\0132\025.datacatalog.Met" - "adata\022*\n\npartitions\030\005 \003(\0132\026.datacatalog." - "Partition\022\036\n\004tags\030\006 \003(\0132\020.datacatalog.Ta" - "g\022.\n\ncreated_at\030\007 \001(\0132\032.google.protobuf." - "Timestamp\"C\n\014ArtifactData\022\014\n\004name\030\001 \001(\t\022" - "%\n\005value\030\002 \001(\0132\026.flyteidl.core.Literal\"Q" - "\n\003Tag\022\014\n\004name\030\001 \001(\t\022\023\n\013artifact_id\030\002 \001(\t" - "\022\'\n\007dataset\030\003 \001(\0132\026.datacatalog.DatasetI" - "D\"m\n\010Metadata\0222\n\007key_map\030\001 \003(\0132!.datacat" - "alog.Metadata.KeyMapEntry\032-\n\013KeyMapEntry" - "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"F\n\020Filt" - "erExpression\0222\n\007filters\030\001 \003(\0132!.datacata" - "log.SinglePropertyFilter\"\211\003\n\024SinglePrope" - "rtyFilter\0224\n\ntag_filter\030\001 \001(\0132\036.datacata" - "log.TagPropertyFilterH\000\022@\n\020partition_fil" - "ter\030\002 \001(\0132$.datacatalog.PartitionPropert" - "yFilterH\000\022>\n\017artifact_filter\030\003 \001(\0132#.dat" - "acatalog.ArtifactPropertyFilterH\000\022<\n\016dat" - "aset_filter\030\004 \001(\0132\".datacatalog.DatasetP" - "ropertyFilterH\000\022F\n\010operator\030\n \001(\01624.data" - "catalog.SinglePropertyFilter.ComparisonO" - "perator\" \n\022ComparisonOperator\022\n\n\006EQUALS\020" - "\000B\021\n\017property_filter\";\n\026ArtifactProperty" - "Filter\022\025\n\013artifact_id\030\001 \001(\tH\000B\n\n\010propert" - "y\"3\n\021TagPropertyFilter\022\022\n\010tag_name\030\001 \001(\t" - "H\000B\n\n\010property\"S\n\027PartitionPropertyFilte" - "r\022,\n\007key_val\030\001 \001(\0132\031.datacatalog.KeyValu" - "ePairH\000B\n\n\010property\"*\n\014KeyValuePair\022\013\n\003k" - "ey\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\"k\n\025DatasetProper" - "tyFilter\022\021\n\007project\030\001 \001(\tH\000\022\016\n\004name\030\002 \001(" - "\tH\000\022\020\n\006domain\030\003 \001(\tH\000\022\021\n\007version\030\004 \001(\tH\000" - "B\n\n\010property\"\361\001\n\021PaginationOptions\022\r\n\005li" - "mit\030\001 \001(\r\022\r\n\005token\030\002 \001(\t\0227\n\007sortKey\030\003 \001(" - "\0162&.datacatalog.PaginationOptions.SortKe" - "y\022;\n\tsortOrder\030\004 \001(\0162(.datacatalog.Pagin" - "ationOptions.SortOrder\"*\n\tSortOrder\022\016\n\nD" - "ESCENDING\020\000\022\r\n\tASCENDING\020\001\"\034\n\007SortKey\022\021\n" - "\rCREATION_TIME\020\0002\206\007\n\013DataCatalog\022V\n\rCrea" - "teDataset\022!.datacatalog.CreateDatasetReq" - "uest\032\".datacatalog.CreateDatasetResponse" - "\022M\n\nGetDataset\022\036.datacatalog.GetDatasetR" - "equest\032\037.datacatalog.GetDatasetResponse\022" - "Y\n\016CreateArtifact\022\".datacatalog.CreateAr" - "tifactRequest\032#.datacatalog.CreateArtifa" - "ctResponse\022P\n\013GetArtifact\022\037.datacatalog." - "GetArtifactRequest\032 .datacatalog.GetArti" - "factResponse\022A\n\006AddTag\022\032.datacatalog.Add" - "TagRequest\032\033.datacatalog.AddTagResponse\022" - "V\n\rListArtifacts\022!.datacatalog.ListArtif" - "actsRequest\032\".datacatalog.ListArtifactsR" - "esponse\022S\n\014ListDatasets\022 .datacatalog.Li" - "stDatasetsRequest\032!.datacatalog.ListData" - "setsResponse\022Y\n\016UpdateArtifact\022\".datacat" - "alog.UpdateArtifactRequest\032#.datacatalog" - ".UpdateArtifactResponse\022q\n\026GetOrExtendRe" - "servation\022*.datacatalog.GetOrExtendReser" - "vationRequest\032+.datacatalog.GetOrExtendR" - "eservationResponse\022e\n\022ReleaseReservation" - "\022&.datacatalog.ReleaseReservationRequest" - "\032\'.datacatalog.ReleaseReservationRespons" - "eBCZAgithub.com/flyteorg/flyte/flyteidl/" - "gen/pb-go/flyteidl/datacatalogb\006proto3" + "\001(\t\022\014\n\004UUID\030\005 \001(\t\022\013\n\003org\030\006 \001(\t\"\215\002\n\010Artif" + "act\022\n\n\002id\030\001 \001(\t\022\'\n\007dataset\030\002 \001(\0132\026.datac" + "atalog.DatasetID\022\'\n\004data\030\003 \003(\0132\031.datacat" + "alog.ArtifactData\022\'\n\010metadata\030\004 \001(\0132\025.da" + "tacatalog.Metadata\022*\n\npartitions\030\005 \003(\0132\026" + ".datacatalog.Partition\022\036\n\004tags\030\006 \003(\0132\020.d" + "atacatalog.Tag\022.\n\ncreated_at\030\007 \001(\0132\032.goo" + "gle.protobuf.Timestamp\"C\n\014ArtifactData\022\014" + "\n\004name\030\001 \001(\t\022%\n\005value\030\002 \001(\0132\026.flyteidl.c" + "ore.Literal\"Q\n\003Tag\022\014\n\004name\030\001 \001(\t\022\023\n\013arti" + "fact_id\030\002 \001(\t\022\'\n\007dataset\030\003 \001(\0132\026.datacat" + "alog.DatasetID\"m\n\010Metadata\0222\n\007key_map\030\001 " + "\003(\0132!.datacatalog.Metadata.KeyMapEntry\032-" + "\n\013KeyMapEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(" + "\t:\0028\001\"F\n\020FilterExpression\0222\n\007filters\030\001 \003" + "(\0132!.datacatalog.SinglePropertyFilter\"\211\003" + "\n\024SinglePropertyFilter\0224\n\ntag_filter\030\001 \001" + "(\0132\036.datacatalog.TagPropertyFilterH\000\022@\n\020" + "partition_filter\030\002 \001(\0132$.datacatalog.Par" + "titionPropertyFilterH\000\022>\n\017artifact_filte" + "r\030\003 \001(\0132#.datacatalog.ArtifactPropertyFi" + "lterH\000\022<\n\016dataset_filter\030\004 \001(\0132\".datacat" + "alog.DatasetPropertyFilterH\000\022F\n\010operator" + "\030\n \001(\01624.datacatalog.SinglePropertyFilte" + "r.ComparisonOperator\" \n\022ComparisonOperat" + "or\022\n\n\006EQUALS\020\000B\021\n\017property_filter\";\n\026Art" + "ifactPropertyFilter\022\025\n\013artifact_id\030\001 \001(\t" + "H\000B\n\n\010property\"3\n\021TagPropertyFilter\022\022\n\010t" + "ag_name\030\001 \001(\tH\000B\n\n\010property\"S\n\027Partition" + "PropertyFilter\022,\n\007key_val\030\001 \001(\0132\031.dataca" + "talog.KeyValuePairH\000B\n\n\010property\"*\n\014KeyV" + "aluePair\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\"z\n\025" + "DatasetPropertyFilter\022\021\n\007project\030\001 \001(\tH\000" + "\022\016\n\004name\030\002 \001(\tH\000\022\020\n\006domain\030\003 \001(\tH\000\022\021\n\007ve" + "rsion\030\004 \001(\tH\000\022\r\n\003org\030\005 \001(\tH\000B\n\n\010property" + "\"\361\001\n\021PaginationOptions\022\r\n\005limit\030\001 \001(\r\022\r\n" + "\005token\030\002 \001(\t\0227\n\007sortKey\030\003 \001(\0162&.datacata" + "log.PaginationOptions.SortKey\022;\n\tsortOrd" + "er\030\004 \001(\0162(.datacatalog.PaginationOptions" + ".SortOrder\"*\n\tSortOrder\022\016\n\nDESCENDING\020\000\022" + "\r\n\tASCENDING\020\001\"\034\n\007SortKey\022\021\n\rCREATION_TI" + "ME\020\0002\206\007\n\013DataCatalog\022V\n\rCreateDataset\022!." + "datacatalog.CreateDatasetRequest\032\".datac" + "atalog.CreateDatasetResponse\022M\n\nGetDatas" + "et\022\036.datacatalog.GetDatasetRequest\032\037.dat" + "acatalog.GetDatasetResponse\022Y\n\016CreateArt" + "ifact\022\".datacatalog.CreateArtifactReques" + "t\032#.datacatalog.CreateArtifactResponse\022P" + "\n\013GetArtifact\022\037.datacatalog.GetArtifactR" + "equest\032 .datacatalog.GetArtifactResponse" + "\022A\n\006AddTag\022\032.datacatalog.AddTagRequest\032\033" + ".datacatalog.AddTagResponse\022V\n\rListArtif" + "acts\022!.datacatalog.ListArtifactsRequest\032" + "\".datacatalog.ListArtifactsResponse\022S\n\014L" + "istDatasets\022 .datacatalog.ListDatasetsRe" + "quest\032!.datacatalog.ListDatasetsResponse" + "\022Y\n\016UpdateArtifact\022\".datacatalog.UpdateA" + "rtifactRequest\032#.datacatalog.UpdateArtif" + "actResponse\022q\n\026GetOrExtendReservation\022*." + "datacatalog.GetOrExtendReservationReques" + "t\032+.datacatalog.GetOrExtendReservationRe" + "sponse\022e\n\022ReleaseReservation\022&.datacatal" + "og.ReleaseReservationRequest\032\'.datacatal" + "og.ReleaseReservationResponseBCZAgithub." + "com/flyteorg/flyte/flyteidl/gen/pb-go/fl" + "yteidl/datacatalogb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fdatacatalog_2fdatacatalog_2eproto = { false, InitDefaults_flyteidl_2fdatacatalog_2fdatacatalog_2eproto, descriptor_table_protodef_flyteidl_2fdatacatalog_2fdatacatalog_2eproto, - "flyteidl/datacatalog/datacatalog.proto", &assign_descriptors_table_flyteidl_2fdatacatalog_2fdatacatalog_2eproto, 4918, + "flyteidl/datacatalog/datacatalog.proto", &assign_descriptors_table_flyteidl_2fdatacatalog_2fdatacatalog_2eproto, 4946, }; void AddDescriptors_flyteidl_2fdatacatalog_2fdatacatalog_2eproto() { @@ -9658,6 +9662,7 @@ const int DatasetID::kNameFieldNumber; const int DatasetID::kDomainFieldNumber; const int DatasetID::kVersionFieldNumber; const int DatasetID::kUUIDFieldNumber; +const int DatasetID::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 DatasetID::DatasetID() @@ -9689,6 +9694,10 @@ DatasetID::DatasetID(const DatasetID& from) if (from.uuid().size() > 0) { uuid_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.uuid_); } + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + if (from.org().size() > 0) { + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } // @@protoc_insertion_point(copy_constructor:datacatalog.DatasetID) } @@ -9700,6 +9709,7 @@ void DatasetID::SharedCtor() { domain_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); version_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); uuid_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } DatasetID::~DatasetID() { @@ -9713,6 +9723,7 @@ void DatasetID::SharedDtor() { domain_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); version_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); uuid_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } void DatasetID::SetCachedSize(int size) const { @@ -9735,6 +9746,7 @@ void DatasetID::Clear() { domain_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); version_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); uuid_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); _internal_metadata_.Clear(); } @@ -9831,6 +9843,22 @@ const char* DatasetID::_InternalParse(const char* begin, const char* end, void* ptr += size; break; } + // string org = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("datacatalog.DatasetID.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -9940,6 +9968,21 @@ bool DatasetID::MergePartialFromCodedStream( break; } + // string org = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "datacatalog.DatasetID.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -10017,6 +10060,16 @@ void DatasetID::SerializeWithCachedSizes( 5, this->uuid(), output); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "datacatalog.DatasetID.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 6, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -10085,6 +10138,17 @@ ::google::protobuf::uint8* DatasetID::InternalSerializeWithCachedSizesToArray( 5, this->uuid(), target); } + // string org = 6; + if (this->org().size() > 0) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "datacatalog.DatasetID.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 6, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -10141,6 +10205,13 @@ size_t DatasetID::ByteSizeLong() const { this->uuid()); } + // string org = 6; + if (this->org().size() > 0) { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + } + int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; @@ -10188,6 +10259,10 @@ void DatasetID::MergeFrom(const DatasetID& from) { uuid_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.uuid_); } + if (from.org().size() > 0) { + + org_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.org_); + } } void DatasetID::CopyFrom(const ::google::protobuf::Message& from) { @@ -10225,6 +10300,8 @@ void DatasetID::InternalSwap(DatasetID* other) { GetArenaNoVirtual()); uuid_.Swap(&other->uuid_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); + org_.Swap(&other->org_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), + GetArenaNoVirtual()); } ::google::protobuf::Metadata DatasetID::GetMetadata() const { @@ -14418,6 +14495,8 @@ void DatasetPropertyFilter::InitAsDefaultInstance() { &::google::protobuf::internal::GetEmptyStringAlreadyInited()); ::datacatalog::_DatasetPropertyFilter_default_instance_.version_.UnsafeSetDefault( &::google::protobuf::internal::GetEmptyStringAlreadyInited()); + ::datacatalog::_DatasetPropertyFilter_default_instance_.org_.UnsafeSetDefault( + &::google::protobuf::internal::GetEmptyStringAlreadyInited()); } class DatasetPropertyFilter::HasBitSetters { public: @@ -14428,6 +14507,7 @@ const int DatasetPropertyFilter::kProjectFieldNumber; const int DatasetPropertyFilter::kNameFieldNumber; const int DatasetPropertyFilter::kDomainFieldNumber; const int DatasetPropertyFilter::kVersionFieldNumber; +const int DatasetPropertyFilter::kOrgFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 DatasetPropertyFilter::DatasetPropertyFilter() @@ -14457,6 +14537,10 @@ DatasetPropertyFilter::DatasetPropertyFilter(const DatasetPropertyFilter& from) set_version(from.version()); break; } + case kOrg: { + set_org(from.org()); + break; + } case PROPERTY_NOT_SET: { break; } @@ -14509,6 +14593,10 @@ void DatasetPropertyFilter::clear_property() { property_.version_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); break; } + case kOrg: { + property_.org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + break; + } case PROPERTY_NOT_SET: { break; } @@ -14604,6 +14692,22 @@ const char* DatasetPropertyFilter::_InternalParse(const char* begin, const char* ptr += size; break; } + // string org = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + ptr = ::google::protobuf::io::ReadSize(ptr, &size); + GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); + ctx->extra_parse_data().SetFieldName("datacatalog.DatasetPropertyFilter.org"); + object = msg->mutable_org(); + if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { + parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; + goto string_till_end; + } + GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx)); + ::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx); + ptr += size; + break; + } default: { handle_unusual: if ((tag & 7) == 4 || tag == 0) { @@ -14698,6 +14802,21 @@ bool DatasetPropertyFilter::MergePartialFromCodedStream( break; } + // string org = 5; + case 5: { + if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { + DO_(::google::protobuf::internal::WireFormatLite::ReadString( + input, this->mutable_org())); + DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::PARSE, + "datacatalog.DatasetPropertyFilter.org")); + } else { + goto handle_unusual; + } + break; + } + default: { handle_unusual: if (tag == 0) { @@ -14765,6 +14884,16 @@ void DatasetPropertyFilter::SerializeWithCachedSizes( 4, this->version(), output); } + // string org = 5; + if (has_org()) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "datacatalog.DatasetPropertyFilter.org"); + ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( + 5, this->org(), output); + } + if (_internal_metadata_.have_unknown_fields()) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( _internal_metadata_.unknown_fields(), output); @@ -14822,6 +14951,17 @@ ::google::protobuf::uint8* DatasetPropertyFilter::InternalSerializeWithCachedSiz 4, this->version(), target); } + // string org = 5; + if (has_org()) { + ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( + this->org().data(), static_cast(this->org().length()), + ::google::protobuf::internal::WireFormatLite::SERIALIZE, + "datacatalog.DatasetPropertyFilter.org"); + target = + ::google::protobuf::internal::WireFormatLite::WriteStringToArray( + 5, this->org(), target); + } + if (_internal_metadata_.have_unknown_fields()) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( _internal_metadata_.unknown_fields(), target); @@ -14872,6 +15012,13 @@ size_t DatasetPropertyFilter::ByteSizeLong() const { this->version()); break; } + // string org = 5; + case kOrg: { + total_size += 1 + + ::google::protobuf::internal::WireFormatLite::StringSize( + this->org()); + break; + } case PROPERTY_NOT_SET: { break; } @@ -14920,6 +15067,10 @@ void DatasetPropertyFilter::MergeFrom(const DatasetPropertyFilter& from) { set_version(from.version()); break; } + case kOrg: { + set_org(from.org()); + break; + } case PROPERTY_NOT_SET: { break; } diff --git a/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.h b/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.h index da52d149e1..27a69c19e7 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/datacatalog/datacatalog.pb.h @@ -3518,6 +3518,20 @@ class DatasetID final : ::std::string* release_uuid(); void set_allocated_uuid(::std::string* uuid); + // string org = 6; + void clear_org(); + static const int kOrgFieldNumber = 6; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + // @@protoc_insertion_point(class_scope:datacatalog.DatasetID) private: class HasBitSetters; @@ -3528,6 +3542,7 @@ class DatasetID final : ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr version_; ::google::protobuf::internal::ArenaStringPtr uuid_; + ::google::protobuf::internal::ArenaStringPtr org_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fdatacatalog_2fdatacatalog_2eproto; }; @@ -5039,6 +5054,7 @@ class DatasetPropertyFilter final : kName = 2, kDomain = 3, kVersion = 4, + kOrg = 5, PROPERTY_NOT_SET = 0, }; @@ -5173,6 +5189,23 @@ class DatasetPropertyFilter final : ::std::string* release_version(); void set_allocated_version(::std::string* version); + // string org = 5; + private: + bool has_org() const; + public: + void clear_org(); + static const int kOrgFieldNumber = 5; + const ::std::string& org() const; + void set_org(const ::std::string& value); + #if LANG_CXX11 + void set_org(::std::string&& value); + #endif + void set_org(const char* value); + void set_org(const char* value, size_t size); + ::std::string* mutable_org(); + ::std::string* release_org(); + void set_allocated_org(::std::string* org); + void clear_property(); PropertyCase property_case() const; // @@protoc_insertion_point(class_scope:datacatalog.DatasetPropertyFilter) @@ -5182,6 +5215,7 @@ class DatasetPropertyFilter final : void set_has_name(); void set_has_domain(); void set_has_version(); + void set_has_org(); inline bool has_property() const; inline void clear_has_property(); @@ -5193,6 +5227,7 @@ class DatasetPropertyFilter final : ::google::protobuf::internal::ArenaStringPtr name_; ::google::protobuf::internal::ArenaStringPtr domain_; ::google::protobuf::internal::ArenaStringPtr version_; + ::google::protobuf::internal::ArenaStringPtr org_; } property_; mutable ::google::protobuf::internal::CachedSize _cached_size_; ::google::protobuf::uint32 _oneof_case_[1]; @@ -8044,6 +8079,59 @@ inline void DatasetID::set_allocated_uuid(::std::string* uuid) { // @@protoc_insertion_point(field_set_allocated:datacatalog.DatasetID.UUID) } +// string org = 6; +inline void DatasetID::clear_org() { + org_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline const ::std::string& DatasetID::org() const { + // @@protoc_insertion_point(field_get:datacatalog.DatasetID.org) + return org_.GetNoArena(); +} +inline void DatasetID::set_org(const ::std::string& value) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:datacatalog.DatasetID.org) +} +#if LANG_CXX11 +inline void DatasetID::set_org(::std::string&& value) { + + org_.SetNoArena( + &::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:datacatalog.DatasetID.org) +} +#endif +inline void DatasetID::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value)); + // @@protoc_insertion_point(field_set_char:datacatalog.DatasetID.org) +} +inline void DatasetID::set_org(const char* value, size_t size) { + + org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:datacatalog.DatasetID.org) +} +inline ::std::string* DatasetID::mutable_org() { + + // @@protoc_insertion_point(field_mutable:datacatalog.DatasetID.org) + return org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* DatasetID::release_org() { + // @@protoc_insertion_point(field_release:datacatalog.DatasetID.org) + + return org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline void DatasetID::set_allocated_org(::std::string* org) { + if (org != nullptr) { + + } else { + + } + org_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), org); + // @@protoc_insertion_point(field_set_allocated:datacatalog.DatasetID.org) +} + // ------------------------------------------------------------------- // Artifact @@ -9597,6 +9685,98 @@ inline void DatasetPropertyFilter::set_allocated_version(::std::string* version) // @@protoc_insertion_point(field_set_allocated:datacatalog.DatasetPropertyFilter.version) } +// string org = 5; +inline bool DatasetPropertyFilter::has_org() const { + return property_case() == kOrg; +} +inline void DatasetPropertyFilter::set_has_org() { + _oneof_case_[0] = kOrg; +} +inline void DatasetPropertyFilter::clear_org() { + if (has_org()) { + property_.org_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + clear_has_property(); + } +} +inline const ::std::string& DatasetPropertyFilter::org() const { + // @@protoc_insertion_point(field_get:datacatalog.DatasetPropertyFilter.org) + if (has_org()) { + return property_.org_.GetNoArena(); + } + return *&::google::protobuf::internal::GetEmptyStringAlreadyInited(); +} +inline void DatasetPropertyFilter::set_org(const ::std::string& value) { + // @@protoc_insertion_point(field_set:datacatalog.DatasetPropertyFilter.org) + if (!has_org()) { + clear_property(); + set_has_org(); + property_.org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + property_.org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value); + // @@protoc_insertion_point(field_set:datacatalog.DatasetPropertyFilter.org) +} +#if LANG_CXX11 +inline void DatasetPropertyFilter::set_org(::std::string&& value) { + // @@protoc_insertion_point(field_set:datacatalog.DatasetPropertyFilter.org) + if (!has_org()) { + clear_property(); + set_has_org(); + property_.org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + property_.org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value)); + // @@protoc_insertion_point(field_set_rvalue:datacatalog.DatasetPropertyFilter.org) +} +#endif +inline void DatasetPropertyFilter::set_org(const char* value) { + GOOGLE_DCHECK(value != nullptr); + if (!has_org()) { + clear_property(); + set_has_org(); + property_.org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + property_.org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), + ::std::string(value)); + // @@protoc_insertion_point(field_set_char:datacatalog.DatasetPropertyFilter.org) +} +inline void DatasetPropertyFilter::set_org(const char* value, size_t size) { + if (!has_org()) { + clear_property(); + set_has_org(); + property_.org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + property_.org_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string( + reinterpret_cast(value), size)); + // @@protoc_insertion_point(field_set_pointer:datacatalog.DatasetPropertyFilter.org) +} +inline ::std::string* DatasetPropertyFilter::mutable_org() { + if (!has_org()) { + clear_property(); + set_has_org(); + property_.org_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } + // @@protoc_insertion_point(field_mutable:datacatalog.DatasetPropertyFilter.org) + return property_.org_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); +} +inline ::std::string* DatasetPropertyFilter::release_org() { + // @@protoc_insertion_point(field_release:datacatalog.DatasetPropertyFilter.org) + if (has_org()) { + clear_has_property(); + return property_.org_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); + } else { + return nullptr; + } +} +inline void DatasetPropertyFilter::set_allocated_org(::std::string* org) { + if (has_property()) { + clear_property(); + } + if (org != nullptr) { + set_has_org(); + property_.org_.UnsafeSetDefault(org); + } + // @@protoc_insertion_point(field_set_allocated:datacatalog.DatasetPropertyFilter.org) +} + inline bool DatasetPropertyFilter::has_property() const { return property_case() != PROPERTY_NOT_SET; } diff --git a/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.cc index 5200354c7b..60b397bfa4 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.cc @@ -16,12 +16,11 @@ // @@protoc_insertion_point(includes) #include -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; +extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fartifact_5fid_2eproto ::google::protobuf::internal::SCCInfo<3> scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_WorkflowExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fidentifier_2eproto ::google::protobuf::internal::SCCInfo<2> scc_info_TaskExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2finterface_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_TypedInterface_flyteidl_2fcore_2finterface_2eproto; -extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fcore_2fliterals_2eproto ::google::protobuf::internal::SCCInfo<10> scc_info_Literal_flyteidl_2fcore_2fliterals_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fevent_2fevent_2eproto ::google::protobuf::internal::SCCInfo<4> scc_info_WorkflowExecutionEvent_flyteidl_2fevent_2fevent_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fevent_2fevent_2eproto ::google::protobuf::internal::SCCInfo<8> scc_info_NodeExecutionEvent_flyteidl_2fevent_2fevent_2eproto; extern PROTOBUF_INTERNAL_EXPORT_flyteidl_2fevent_2fevent_2eproto ::google::protobuf::internal::SCCInfo<9> scc_info_TaskExecutionEvent_flyteidl_2fevent_2fevent_2eproto; @@ -56,10 +55,9 @@ static void InitDefaultsCloudEventWorkflowExecution_flyteidl_2fevent_2fcloudeven ::flyteidl::event::CloudEventWorkflowExecution::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<6> scc_info_CloudEventWorkflowExecution_flyteidl_2fevent_2fcloudevents_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 6, InitDefaultsCloudEventWorkflowExecution_flyteidl_2fevent_2fcloudevents_2eproto}, { +::google::protobuf::internal::SCCInfo<5> scc_info_CloudEventWorkflowExecution_flyteidl_2fevent_2fcloudevents_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 5, InitDefaultsCloudEventWorkflowExecution_flyteidl_2fevent_2fcloudevents_2eproto}, { &scc_info_WorkflowExecutionEvent_flyteidl_2fevent_2fevent_2eproto.base, - &scc_info_Literal_flyteidl_2fcore_2fliterals_2eproto.base, &scc_info_TypedInterface_flyteidl_2fcore_2finterface_2eproto.base, &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base, &scc_info_WorkflowExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base, @@ -76,11 +74,10 @@ static void InitDefaultsCloudEventNodeExecution_flyteidl_2fevent_2fcloudevents_2 ::flyteidl::event::CloudEventNodeExecution::InitAsDefaultInstance(); } -::google::protobuf::internal::SCCInfo<6> scc_info_CloudEventNodeExecution_flyteidl_2fevent_2fcloudevents_2eproto = - {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 6, InitDefaultsCloudEventNodeExecution_flyteidl_2fevent_2fcloudevents_2eproto}, { +::google::protobuf::internal::SCCInfo<5> scc_info_CloudEventNodeExecution_flyteidl_2fevent_2fcloudevents_2eproto = + {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 5, InitDefaultsCloudEventNodeExecution_flyteidl_2fevent_2fcloudevents_2eproto}, { &scc_info_NodeExecutionEvent_flyteidl_2fevent_2fevent_2eproto.base, &scc_info_TaskExecutionIdentifier_flyteidl_2fcore_2fidentifier_2eproto.base, - &scc_info_Literal_flyteidl_2fcore_2fliterals_2eproto.base, &scc_info_TypedInterface_flyteidl_2fcore_2finterface_2eproto.base, &scc_info_ArtifactID_flyteidl_2fcore_2fartifact_5fid_2eproto.base, &scc_info_Identifier_flyteidl_2fcore_2fidentifier_2eproto.base,}}; @@ -135,9 +132,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fevent_2fcloudevents_2epr ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventWorkflowExecution, raw_event_), - PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventWorkflowExecution, output_data_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventWorkflowExecution, output_interface_), - PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventWorkflowExecution, input_data_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventWorkflowExecution, artifact_ids_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventWorkflowExecution, reference_execution_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventWorkflowExecution, principal_), @@ -149,9 +144,7 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fevent_2fcloudevents_2epr ~0u, // no _weak_field_map_ PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, raw_event_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, task_exec_id_), - PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, output_data_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, output_interface_), - PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, input_data_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, artifact_ids_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, principal_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventNodeExecution, launch_plan_id_), @@ -170,14 +163,14 @@ const ::google::protobuf::uint32 TableStruct_flyteidl_2fevent_2fcloudevents_2epr PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventExecutionStart, launch_plan_id_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventExecutionStart, workflow_id_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventExecutionStart, artifact_ids_), - PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventExecutionStart, artifact_keys_), + PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventExecutionStart, artifact_trackers_), PROTOBUF_FIELD_OFFSET(::flyteidl::event::CloudEventExecutionStart, principal_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::flyteidl::event::CloudEventWorkflowExecution)}, - { 13, -1, sizeof(::flyteidl::event::CloudEventNodeExecution)}, - { 26, -1, sizeof(::flyteidl::event::CloudEventTaskExecution)}, - { 32, -1, sizeof(::flyteidl::event::CloudEventExecutionStart)}, + { 11, -1, sizeof(::flyteidl::event::CloudEventNodeExecution)}, + { 22, -1, sizeof(::flyteidl::event::CloudEventTaskExecution)}, + { 28, -1, sizeof(::flyteidl::event::CloudEventExecutionStart)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { @@ -199,45 +192,40 @@ const char descriptor_table_protodef_flyteidl_2fevent_2fcloudevents_2eproto[] = "flyteidl/core/literals.proto\032\035flyteidl/c" "ore/interface.proto\032\037flyteidl/core/artif" "act_id.proto\032\036flyteidl/core/identifier.p" - "roto\032\037google/protobuf/timestamp.proto\"\260\003" + "roto\032\037google/protobuf/timestamp.proto\"\321\002" "\n\033CloudEventWorkflowExecution\0229\n\traw_eve" "nt\030\001 \001(\0132&.flyteidl.event.WorkflowExecut" - "ionEvent\022.\n\013output_data\030\002 \001(\0132\031.flyteidl" - ".core.LiteralMap\0227\n\020output_interface\030\003 \001" - "(\0132\035.flyteidl.core.TypedInterface\022-\n\ninp" - "ut_data\030\004 \001(\0132\031.flyteidl.core.LiteralMap" - "\022/\n\014artifact_ids\030\005 \003(\0132\031.flyteidl.core.A" - "rtifactID\022G\n\023reference_execution\030\006 \001(\0132*" - ".flyteidl.core.WorkflowExecutionIdentifi" - "er\022\021\n\tprincipal\030\007 \001(\t\0221\n\016launch_plan_id\030" - "\010 \001(\0132\031.flyteidl.core.Identifier\"\235\003\n\027Clo" - "udEventNodeExecution\0225\n\traw_event\030\001 \001(\0132" - "\".flyteidl.event.NodeExecutionEvent\022<\n\014t" - "ask_exec_id\030\002 \001(\0132&.flyteidl.core.TaskEx" - "ecutionIdentifier\022.\n\013output_data\030\003 \001(\0132\031" - ".flyteidl.core.LiteralMap\0227\n\020output_inte" - "rface\030\004 \001(\0132\035.flyteidl.core.TypedInterfa" - "ce\022-\n\ninput_data\030\005 \001(\0132\031.flyteidl.core.L" - "iteralMap\022/\n\014artifact_ids\030\006 \003(\0132\031.flytei" - "dl.core.ArtifactID\022\021\n\tprincipal\030\007 \001(\t\0221\n" - "\016launch_plan_id\030\010 \001(\0132\031.flyteidl.core.Id" - "entifier\"P\n\027CloudEventTaskExecution\0225\n\tr" - "aw_event\030\001 \001(\0132\".flyteidl.event.TaskExec" - "utionEvent\"\232\002\n\030CloudEventExecutionStart\022" - "@\n\014execution_id\030\001 \001(\0132*.flyteidl.core.Wo" - "rkflowExecutionIdentifier\0221\n\016launch_plan" - "_id\030\002 \001(\0132\031.flyteidl.core.Identifier\022.\n\013" - "workflow_id\030\003 \001(\0132\031.flyteidl.core.Identi" - "fier\022/\n\014artifact_ids\030\004 \003(\0132\031.flyteidl.co" - "re.ArtifactID\022\025\n\rartifact_keys\030\005 \003(\t\022\021\n\t" - "principal\030\006 \001(\tB=Z;github.com/flyteorg/f" - "lyte/flyteidl/gen/pb-go/flyteidl/eventb\006" - "proto3" + "ionEvent\0227\n\020output_interface\030\002 \001(\0132\035.fly" + "teidl.core.TypedInterface\022/\n\014artifact_id" + "s\030\003 \003(\0132\031.flyteidl.core.ArtifactID\022G\n\023re" + "ference_execution\030\004 \001(\0132*.flyteidl.core." + "WorkflowExecutionIdentifier\022\021\n\tprincipal" + "\030\005 \001(\t\0221\n\016launch_plan_id\030\006 \001(\0132\031.flyteid" + "l.core.Identifier\"\276\002\n\027CloudEventNodeExec" + "ution\0225\n\traw_event\030\001 \001(\0132\".flyteidl.even" + "t.NodeExecutionEvent\022<\n\014task_exec_id\030\002 \001" + "(\0132&.flyteidl.core.TaskExecutionIdentifi" + "er\0227\n\020output_interface\030\003 \001(\0132\035.flyteidl." + "core.TypedInterface\022/\n\014artifact_ids\030\004 \003(" + "\0132\031.flyteidl.core.ArtifactID\022\021\n\tprincipa" + "l\030\005 \001(\t\0221\n\016launch_plan_id\030\006 \001(\0132\031.flytei" + "dl.core.Identifier\"P\n\027CloudEventTaskExec" + "ution\0225\n\traw_event\030\001 \001(\0132\".flyteidl.even" + "t.TaskExecutionEvent\"\236\002\n\030CloudEventExecu" + "tionStart\022@\n\014execution_id\030\001 \001(\0132*.flytei" + "dl.core.WorkflowExecutionIdentifier\0221\n\016l" + "aunch_plan_id\030\002 \001(\0132\031.flyteidl.core.Iden" + "tifier\022.\n\013workflow_id\030\003 \001(\0132\031.flyteidl.c" + "ore.Identifier\022/\n\014artifact_ids\030\004 \003(\0132\031.f" + "lyteidl.core.ArtifactID\022\031\n\021artifact_trac" + "kers\030\005 \003(\t\022\021\n\tprincipal\030\006 \001(\tB=Z;github." + "com/flyteorg/flyte/flyteidl/gen/pb-go/fl" + "yteidl/eventb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fevent_2fcloudevents_2eproto = { false, InitDefaults_flyteidl_2fevent_2fcloudevents_2eproto, descriptor_table_protodef_flyteidl_2fevent_2fcloudevents_2eproto, - "flyteidl/event/cloudevents.proto", &assign_descriptors_table_flyteidl_2fevent_2fcloudevents_2eproto, 1526, + "flyteidl/event/cloudevents.proto", &assign_descriptors_table_flyteidl_2fevent_2fcloudevents_2eproto, 1340, }; void AddDescriptors_flyteidl_2fevent_2fcloudevents_2eproto() { @@ -263,12 +251,8 @@ namespace event { void CloudEventWorkflowExecution::InitAsDefaultInstance() { ::flyteidl::event::_CloudEventWorkflowExecution_default_instance_._instance.get_mutable()->raw_event_ = const_cast< ::flyteidl::event::WorkflowExecutionEvent*>( ::flyteidl::event::WorkflowExecutionEvent::internal_default_instance()); - ::flyteidl::event::_CloudEventWorkflowExecution_default_instance_._instance.get_mutable()->output_data_ = const_cast< ::flyteidl::core::LiteralMap*>( - ::flyteidl::core::LiteralMap::internal_default_instance()); ::flyteidl::event::_CloudEventWorkflowExecution_default_instance_._instance.get_mutable()->output_interface_ = const_cast< ::flyteidl::core::TypedInterface*>( ::flyteidl::core::TypedInterface::internal_default_instance()); - ::flyteidl::event::_CloudEventWorkflowExecution_default_instance_._instance.get_mutable()->input_data_ = const_cast< ::flyteidl::core::LiteralMap*>( - ::flyteidl::core::LiteralMap::internal_default_instance()); ::flyteidl::event::_CloudEventWorkflowExecution_default_instance_._instance.get_mutable()->reference_execution_ = const_cast< ::flyteidl::core::WorkflowExecutionIdentifier*>( ::flyteidl::core::WorkflowExecutionIdentifier::internal_default_instance()); ::flyteidl::event::_CloudEventWorkflowExecution_default_instance_._instance.get_mutable()->launch_plan_id_ = const_cast< ::flyteidl::core::Identifier*>( @@ -277,9 +261,7 @@ void CloudEventWorkflowExecution::InitAsDefaultInstance() { class CloudEventWorkflowExecution::HasBitSetters { public: static const ::flyteidl::event::WorkflowExecutionEvent& raw_event(const CloudEventWorkflowExecution* msg); - static const ::flyteidl::core::LiteralMap& output_data(const CloudEventWorkflowExecution* msg); static const ::flyteidl::core::TypedInterface& output_interface(const CloudEventWorkflowExecution* msg); - static const ::flyteidl::core::LiteralMap& input_data(const CloudEventWorkflowExecution* msg); static const ::flyteidl::core::WorkflowExecutionIdentifier& reference_execution(const CloudEventWorkflowExecution* msg); static const ::flyteidl::core::Identifier& launch_plan_id(const CloudEventWorkflowExecution* msg); }; @@ -288,18 +270,10 @@ const ::flyteidl::event::WorkflowExecutionEvent& CloudEventWorkflowExecution::HasBitSetters::raw_event(const CloudEventWorkflowExecution* msg) { return *msg->raw_event_; } -const ::flyteidl::core::LiteralMap& -CloudEventWorkflowExecution::HasBitSetters::output_data(const CloudEventWorkflowExecution* msg) { - return *msg->output_data_; -} const ::flyteidl::core::TypedInterface& CloudEventWorkflowExecution::HasBitSetters::output_interface(const CloudEventWorkflowExecution* msg) { return *msg->output_interface_; } -const ::flyteidl::core::LiteralMap& -CloudEventWorkflowExecution::HasBitSetters::input_data(const CloudEventWorkflowExecution* msg) { - return *msg->input_data_; -} const ::flyteidl::core::WorkflowExecutionIdentifier& CloudEventWorkflowExecution::HasBitSetters::reference_execution(const CloudEventWorkflowExecution* msg) { return *msg->reference_execution_; @@ -314,24 +288,12 @@ void CloudEventWorkflowExecution::clear_raw_event() { } raw_event_ = nullptr; } -void CloudEventWorkflowExecution::clear_output_data() { - if (GetArenaNoVirtual() == nullptr && output_data_ != nullptr) { - delete output_data_; - } - output_data_ = nullptr; -} void CloudEventWorkflowExecution::clear_output_interface() { if (GetArenaNoVirtual() == nullptr && output_interface_ != nullptr) { delete output_interface_; } output_interface_ = nullptr; } -void CloudEventWorkflowExecution::clear_input_data() { - if (GetArenaNoVirtual() == nullptr && input_data_ != nullptr) { - delete input_data_; - } - input_data_ = nullptr; -} void CloudEventWorkflowExecution::clear_artifact_ids() { artifact_ids_.Clear(); } @@ -349,9 +311,7 @@ void CloudEventWorkflowExecution::clear_launch_plan_id() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int CloudEventWorkflowExecution::kRawEventFieldNumber; -const int CloudEventWorkflowExecution::kOutputDataFieldNumber; const int CloudEventWorkflowExecution::kOutputInterfaceFieldNumber; -const int CloudEventWorkflowExecution::kInputDataFieldNumber; const int CloudEventWorkflowExecution::kArtifactIdsFieldNumber; const int CloudEventWorkflowExecution::kReferenceExecutionFieldNumber; const int CloudEventWorkflowExecution::kPrincipalFieldNumber; @@ -377,21 +337,11 @@ CloudEventWorkflowExecution::CloudEventWorkflowExecution(const CloudEventWorkflo } else { raw_event_ = nullptr; } - if (from.has_output_data()) { - output_data_ = new ::flyteidl::core::LiteralMap(*from.output_data_); - } else { - output_data_ = nullptr; - } if (from.has_output_interface()) { output_interface_ = new ::flyteidl::core::TypedInterface(*from.output_interface_); } else { output_interface_ = nullptr; } - if (from.has_input_data()) { - input_data_ = new ::flyteidl::core::LiteralMap(*from.input_data_); - } else { - input_data_ = nullptr; - } if (from.has_reference_execution()) { reference_execution_ = new ::flyteidl::core::WorkflowExecutionIdentifier(*from.reference_execution_); } else { @@ -422,9 +372,7 @@ CloudEventWorkflowExecution::~CloudEventWorkflowExecution() { void CloudEventWorkflowExecution::SharedDtor() { principal_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete raw_event_; - if (this != internal_default_instance()) delete output_data_; if (this != internal_default_instance()) delete output_interface_; - if (this != internal_default_instance()) delete input_data_; if (this != internal_default_instance()) delete reference_execution_; if (this != internal_default_instance()) delete launch_plan_id_; } @@ -450,18 +398,10 @@ void CloudEventWorkflowExecution::Clear() { delete raw_event_; } raw_event_ = nullptr; - if (GetArenaNoVirtual() == nullptr && output_data_ != nullptr) { - delete output_data_; - } - output_data_ = nullptr; if (GetArenaNoVirtual() == nullptr && output_interface_ != nullptr) { delete output_interface_; } output_interface_ = nullptr; - if (GetArenaNoVirtual() == nullptr && input_data_ != nullptr) { - delete input_data_; - } - input_data_ = nullptr; if (GetArenaNoVirtual() == nullptr && reference_execution_ != nullptr) { delete reference_execution_; } @@ -499,24 +439,11 @@ const char* CloudEventWorkflowExecution::_InternalParse(const char* begin, const {parser_till_end, object}, ptr - size, ptr)); break; } - // .flyteidl.core.LiteralMap output_data = 2; + // .flyteidl.core.TypedInterface output_interface = 2; case 2: { if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::LiteralMap::_InternalParse; - object = msg->mutable_output_data(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.core.TypedInterface output_interface = 3; - case 3: { - if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); parser_till_end = ::flyteidl::core::TypedInterface::_InternalParse; object = msg->mutable_output_interface(); if (size > end - ptr) goto len_delim_till_end; @@ -525,22 +452,9 @@ const char* CloudEventWorkflowExecution::_InternalParse(const char* begin, const {parser_till_end, object}, ptr - size, ptr)); break; } - // .flyteidl.core.LiteralMap input_data = 4; - case 4: { - if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::LiteralMap::_InternalParse; - object = msg->mutable_input_data(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // repeated .flyteidl.core.ArtifactID artifact_ids = 5; - case 5: { - if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; + // repeated .flyteidl.core.ArtifactID artifact_ids = 3; + case 3: { + if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; do { ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); @@ -551,12 +465,12 @@ const char* CloudEventWorkflowExecution::_InternalParse(const char* begin, const GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( {parser_till_end, object}, ptr - size, ptr)); if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 42 && (ptr += 1)); + } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 26 && (ptr += 1)); break; } - // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; - case 6: { - if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); parser_till_end = ::flyteidl::core::WorkflowExecutionIdentifier::_InternalParse; @@ -567,9 +481,9 @@ const char* CloudEventWorkflowExecution::_InternalParse(const char* begin, const {parser_till_end, object}, ptr - size, ptr)); break; } - // string principal = 7; - case 7: { - if (static_cast<::google::protobuf::uint8>(tag) != 58) goto handle_unusual; + // string principal = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); ctx->extra_parse_data().SetFieldName("flyteidl.event.CloudEventWorkflowExecution.principal"); @@ -583,9 +497,9 @@ const char* CloudEventWorkflowExecution::_InternalParse(const char* begin, const ptr += size; break; } - // .flyteidl.core.Identifier launch_plan_id = 8; - case 8: { - if (static_cast<::google::protobuf::uint8>(tag) != 66) goto handle_unusual; + // .flyteidl.core.Identifier launch_plan_id = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); parser_till_end = ::flyteidl::core::Identifier::_InternalParse; @@ -641,64 +555,42 @@ bool CloudEventWorkflowExecution::MergePartialFromCodedStream( break; } - // .flyteidl.core.LiteralMap output_data = 2; + // .flyteidl.core.TypedInterface output_interface = 2; case 2: { if (static_cast< ::google::protobuf::uint8>(tag) == (18 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_output_data())); + input, mutable_output_interface())); } else { goto handle_unusual; } break; } - // .flyteidl.core.TypedInterface output_interface = 3; + // repeated .flyteidl.core.ArtifactID artifact_ids = 3; case 3: { if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_output_interface())); + input, add_artifact_ids())); } else { goto handle_unusual; } break; } - // .flyteidl.core.LiteralMap input_data = 4; + // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; case 4: { if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_input_data())); + input, mutable_reference_execution())); } else { goto handle_unusual; } break; } - // repeated .flyteidl.core.ArtifactID artifact_ids = 5; + // string principal = 5; case 5: { if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, add_artifact_ids())); - } else { - goto handle_unusual; - } - break; - } - - // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; - case 6: { - if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_reference_execution())); - } else { - goto handle_unusual; - } - break; - } - - // string principal = 7; - case 7: { - if (static_cast< ::google::protobuf::uint8>(tag) == (58 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadString( input, this->mutable_principal())); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( @@ -711,9 +603,9 @@ bool CloudEventWorkflowExecution::MergePartialFromCodedStream( break; } - // .flyteidl.core.Identifier launch_plan_id = 8; - case 8: { - if (static_cast< ::google::protobuf::uint8>(tag) == (66 & 0xFF)) { + // .flyteidl.core.Identifier launch_plan_id = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_launch_plan_id())); } else { @@ -755,53 +647,41 @@ void CloudEventWorkflowExecution::SerializeWithCachedSizes( 1, HasBitSetters::raw_event(this), output); } - // .flyteidl.core.LiteralMap output_data = 2; - if (this->has_output_data()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 2, HasBitSetters::output_data(this), output); - } - - // .flyteidl.core.TypedInterface output_interface = 3; + // .flyteidl.core.TypedInterface output_interface = 2; if (this->has_output_interface()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 3, HasBitSetters::output_interface(this), output); - } - - // .flyteidl.core.LiteralMap input_data = 4; - if (this->has_input_data()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 4, HasBitSetters::input_data(this), output); + 2, HasBitSetters::output_interface(this), output); } - // repeated .flyteidl.core.ArtifactID artifact_ids = 5; + // repeated .flyteidl.core.ArtifactID artifact_ids = 3; for (unsigned int i = 0, n = static_cast(this->artifact_ids_size()); i < n; i++) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 5, + 3, this->artifact_ids(static_cast(i)), output); } - // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; if (this->has_reference_execution()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 6, HasBitSetters::reference_execution(this), output); + 4, HasBitSetters::reference_execution(this), output); } - // string principal = 7; + // string principal = 5; if (this->principal().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->principal().data(), static_cast(this->principal().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "flyteidl.event.CloudEventWorkflowExecution.principal"); ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 7, this->principal(), output); + 5, this->principal(), output); } - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; if (this->has_launch_plan_id()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 8, HasBitSetters::launch_plan_id(this), output); + 6, HasBitSetters::launch_plan_id(this), output); } if (_internal_metadata_.have_unknown_fields()) { @@ -824,43 +704,29 @@ ::google::protobuf::uint8* CloudEventWorkflowExecution::InternalSerializeWithCac 1, HasBitSetters::raw_event(this), target); } - // .flyteidl.core.LiteralMap output_data = 2; - if (this->has_output_data()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 2, HasBitSetters::output_data(this), target); - } - - // .flyteidl.core.TypedInterface output_interface = 3; + // .flyteidl.core.TypedInterface output_interface = 2; if (this->has_output_interface()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 3, HasBitSetters::output_interface(this), target); - } - - // .flyteidl.core.LiteralMap input_data = 4; - if (this->has_input_data()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 4, HasBitSetters::input_data(this), target); + 2, HasBitSetters::output_interface(this), target); } - // repeated .flyteidl.core.ArtifactID artifact_ids = 5; + // repeated .flyteidl.core.ArtifactID artifact_ids = 3; for (unsigned int i = 0, n = static_cast(this->artifact_ids_size()); i < n; i++) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 5, this->artifact_ids(static_cast(i)), target); + 3, this->artifact_ids(static_cast(i)), target); } - // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; if (this->has_reference_execution()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 6, HasBitSetters::reference_execution(this), target); + 4, HasBitSetters::reference_execution(this), target); } - // string principal = 7; + // string principal = 5; if (this->principal().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->principal().data(), static_cast(this->principal().length()), @@ -868,14 +734,14 @@ ::google::protobuf::uint8* CloudEventWorkflowExecution::InternalSerializeWithCac "flyteidl.event.CloudEventWorkflowExecution.principal"); target = ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 7, this->principal(), target); + 5, this->principal(), target); } - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; if (this->has_launch_plan_id()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 8, HasBitSetters::launch_plan_id(this), target); + 6, HasBitSetters::launch_plan_id(this), target); } if (_internal_metadata_.have_unknown_fields()) { @@ -899,7 +765,7 @@ size_t CloudEventWorkflowExecution::ByteSizeLong() const { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - // repeated .flyteidl.core.ArtifactID artifact_ids = 5; + // repeated .flyteidl.core.ArtifactID artifact_ids = 3; { unsigned int count = static_cast(this->artifact_ids_size()); total_size += 1UL * count; @@ -910,7 +776,7 @@ size_t CloudEventWorkflowExecution::ByteSizeLong() const { } } - // string principal = 7; + // string principal = 5; if (this->principal().size() > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize( @@ -924,35 +790,21 @@ size_t CloudEventWorkflowExecution::ByteSizeLong() const { *raw_event_); } - // .flyteidl.core.LiteralMap output_data = 2; - if (this->has_output_data()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *output_data_); - } - - // .flyteidl.core.TypedInterface output_interface = 3; + // .flyteidl.core.TypedInterface output_interface = 2; if (this->has_output_interface()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *output_interface_); } - // .flyteidl.core.LiteralMap input_data = 4; - if (this->has_input_data()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *input_data_); - } - - // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; if (this->has_reference_execution()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *reference_execution_); } - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; if (this->has_launch_plan_id()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( @@ -994,15 +846,9 @@ void CloudEventWorkflowExecution::MergeFrom(const CloudEventWorkflowExecution& f if (from.has_raw_event()) { mutable_raw_event()->::flyteidl::event::WorkflowExecutionEvent::MergeFrom(from.raw_event()); } - if (from.has_output_data()) { - mutable_output_data()->::flyteidl::core::LiteralMap::MergeFrom(from.output_data()); - } if (from.has_output_interface()) { mutable_output_interface()->::flyteidl::core::TypedInterface::MergeFrom(from.output_interface()); } - if (from.has_input_data()) { - mutable_input_data()->::flyteidl::core::LiteralMap::MergeFrom(from.input_data()); - } if (from.has_reference_execution()) { mutable_reference_execution()->::flyteidl::core::WorkflowExecutionIdentifier::MergeFrom(from.reference_execution()); } @@ -1040,9 +886,7 @@ void CloudEventWorkflowExecution::InternalSwap(CloudEventWorkflowExecution* othe principal_.Swap(&other->principal_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); swap(raw_event_, other->raw_event_); - swap(output_data_, other->output_data_); swap(output_interface_, other->output_interface_); - swap(input_data_, other->input_data_); swap(reference_execution_, other->reference_execution_); swap(launch_plan_id_, other->launch_plan_id_); } @@ -1060,12 +904,8 @@ void CloudEventNodeExecution::InitAsDefaultInstance() { ::flyteidl::event::NodeExecutionEvent::internal_default_instance()); ::flyteidl::event::_CloudEventNodeExecution_default_instance_._instance.get_mutable()->task_exec_id_ = const_cast< ::flyteidl::core::TaskExecutionIdentifier*>( ::flyteidl::core::TaskExecutionIdentifier::internal_default_instance()); - ::flyteidl::event::_CloudEventNodeExecution_default_instance_._instance.get_mutable()->output_data_ = const_cast< ::flyteidl::core::LiteralMap*>( - ::flyteidl::core::LiteralMap::internal_default_instance()); ::flyteidl::event::_CloudEventNodeExecution_default_instance_._instance.get_mutable()->output_interface_ = const_cast< ::flyteidl::core::TypedInterface*>( ::flyteidl::core::TypedInterface::internal_default_instance()); - ::flyteidl::event::_CloudEventNodeExecution_default_instance_._instance.get_mutable()->input_data_ = const_cast< ::flyteidl::core::LiteralMap*>( - ::flyteidl::core::LiteralMap::internal_default_instance()); ::flyteidl::event::_CloudEventNodeExecution_default_instance_._instance.get_mutable()->launch_plan_id_ = const_cast< ::flyteidl::core::Identifier*>( ::flyteidl::core::Identifier::internal_default_instance()); } @@ -1073,9 +913,7 @@ class CloudEventNodeExecution::HasBitSetters { public: static const ::flyteidl::event::NodeExecutionEvent& raw_event(const CloudEventNodeExecution* msg); static const ::flyteidl::core::TaskExecutionIdentifier& task_exec_id(const CloudEventNodeExecution* msg); - static const ::flyteidl::core::LiteralMap& output_data(const CloudEventNodeExecution* msg); static const ::flyteidl::core::TypedInterface& output_interface(const CloudEventNodeExecution* msg); - static const ::flyteidl::core::LiteralMap& input_data(const CloudEventNodeExecution* msg); static const ::flyteidl::core::Identifier& launch_plan_id(const CloudEventNodeExecution* msg); }; @@ -1087,18 +925,10 @@ const ::flyteidl::core::TaskExecutionIdentifier& CloudEventNodeExecution::HasBitSetters::task_exec_id(const CloudEventNodeExecution* msg) { return *msg->task_exec_id_; } -const ::flyteidl::core::LiteralMap& -CloudEventNodeExecution::HasBitSetters::output_data(const CloudEventNodeExecution* msg) { - return *msg->output_data_; -} const ::flyteidl::core::TypedInterface& CloudEventNodeExecution::HasBitSetters::output_interface(const CloudEventNodeExecution* msg) { return *msg->output_interface_; } -const ::flyteidl::core::LiteralMap& -CloudEventNodeExecution::HasBitSetters::input_data(const CloudEventNodeExecution* msg) { - return *msg->input_data_; -} const ::flyteidl::core::Identifier& CloudEventNodeExecution::HasBitSetters::launch_plan_id(const CloudEventNodeExecution* msg) { return *msg->launch_plan_id_; @@ -1115,24 +945,12 @@ void CloudEventNodeExecution::clear_task_exec_id() { } task_exec_id_ = nullptr; } -void CloudEventNodeExecution::clear_output_data() { - if (GetArenaNoVirtual() == nullptr && output_data_ != nullptr) { - delete output_data_; - } - output_data_ = nullptr; -} void CloudEventNodeExecution::clear_output_interface() { if (GetArenaNoVirtual() == nullptr && output_interface_ != nullptr) { delete output_interface_; } output_interface_ = nullptr; } -void CloudEventNodeExecution::clear_input_data() { - if (GetArenaNoVirtual() == nullptr && input_data_ != nullptr) { - delete input_data_; - } - input_data_ = nullptr; -} void CloudEventNodeExecution::clear_artifact_ids() { artifact_ids_.Clear(); } @@ -1145,9 +963,7 @@ void CloudEventNodeExecution::clear_launch_plan_id() { #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int CloudEventNodeExecution::kRawEventFieldNumber; const int CloudEventNodeExecution::kTaskExecIdFieldNumber; -const int CloudEventNodeExecution::kOutputDataFieldNumber; const int CloudEventNodeExecution::kOutputInterfaceFieldNumber; -const int CloudEventNodeExecution::kInputDataFieldNumber; const int CloudEventNodeExecution::kArtifactIdsFieldNumber; const int CloudEventNodeExecution::kPrincipalFieldNumber; const int CloudEventNodeExecution::kLaunchPlanIdFieldNumber; @@ -1177,21 +993,11 @@ CloudEventNodeExecution::CloudEventNodeExecution(const CloudEventNodeExecution& } else { task_exec_id_ = nullptr; } - if (from.has_output_data()) { - output_data_ = new ::flyteidl::core::LiteralMap(*from.output_data_); - } else { - output_data_ = nullptr; - } if (from.has_output_interface()) { output_interface_ = new ::flyteidl::core::TypedInterface(*from.output_interface_); } else { output_interface_ = nullptr; } - if (from.has_input_data()) { - input_data_ = new ::flyteidl::core::LiteralMap(*from.input_data_); - } else { - input_data_ = nullptr; - } if (from.has_launch_plan_id()) { launch_plan_id_ = new ::flyteidl::core::Identifier(*from.launch_plan_id_); } else { @@ -1218,9 +1024,7 @@ void CloudEventNodeExecution::SharedDtor() { principal_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete raw_event_; if (this != internal_default_instance()) delete task_exec_id_; - if (this != internal_default_instance()) delete output_data_; if (this != internal_default_instance()) delete output_interface_; - if (this != internal_default_instance()) delete input_data_; if (this != internal_default_instance()) delete launch_plan_id_; } @@ -1249,18 +1053,10 @@ void CloudEventNodeExecution::Clear() { delete task_exec_id_; } task_exec_id_ = nullptr; - if (GetArenaNoVirtual() == nullptr && output_data_ != nullptr) { - delete output_data_; - } - output_data_ = nullptr; if (GetArenaNoVirtual() == nullptr && output_interface_ != nullptr) { delete output_interface_; } output_interface_ = nullptr; - if (GetArenaNoVirtual() == nullptr && input_data_ != nullptr) { - delete input_data_; - } - input_data_ = nullptr; if (GetArenaNoVirtual() == nullptr && launch_plan_id_ != nullptr) { delete launch_plan_id_; } @@ -1307,24 +1103,11 @@ const char* CloudEventNodeExecution::_InternalParse(const char* begin, const cha {parser_till_end, object}, ptr - size, ptr)); break; } - // .flyteidl.core.LiteralMap output_data = 3; + // .flyteidl.core.TypedInterface output_interface = 3; case 3: { if (static_cast<::google::protobuf::uint8>(tag) != 26) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::LiteralMap::_InternalParse; - object = msg->mutable_output_data(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // .flyteidl.core.TypedInterface output_interface = 4; - case 4: { - if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); parser_till_end = ::flyteidl::core::TypedInterface::_InternalParse; object = msg->mutable_output_interface(); if (size > end - ptr) goto len_delim_till_end; @@ -1333,22 +1116,9 @@ const char* CloudEventNodeExecution::_InternalParse(const char* begin, const cha {parser_till_end, object}, ptr - size, ptr)); break; } - // .flyteidl.core.LiteralMap input_data = 5; - case 5: { - if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; - ptr = ::google::protobuf::io::ReadSize(ptr, &size); - GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - parser_till_end = ::flyteidl::core::LiteralMap::_InternalParse; - object = msg->mutable_input_data(); - if (size > end - ptr) goto len_delim_till_end; - ptr += size; - GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( - {parser_till_end, object}, ptr - size, ptr)); - break; - } - // repeated .flyteidl.core.ArtifactID artifact_ids = 6; - case 6: { - if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; + // repeated .flyteidl.core.ArtifactID artifact_ids = 4; + case 4: { + if (static_cast<::google::protobuf::uint8>(tag) != 34) goto handle_unusual; do { ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); @@ -1359,12 +1129,12 @@ const char* CloudEventNodeExecution::_InternalParse(const char* begin, const cha GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->ParseExactRange( {parser_till_end, object}, ptr - size, ptr)); if (ptr >= end) break; - } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 50 && (ptr += 1)); + } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 34 && (ptr += 1)); break; } - // string principal = 7; - case 7: { - if (static_cast<::google::protobuf::uint8>(tag) != 58) goto handle_unusual; + // string principal = 5; + case 5: { + if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); ctx->extra_parse_data().SetFieldName("flyteidl.event.CloudEventNodeExecution.principal"); @@ -1378,9 +1148,9 @@ const char* CloudEventNodeExecution::_InternalParse(const char* begin, const cha ptr += size; break; } - // .flyteidl.core.Identifier launch_plan_id = 8; - case 8: { - if (static_cast<::google::protobuf::uint8>(tag) != 66) goto handle_unusual; + // .flyteidl.core.Identifier launch_plan_id = 6; + case 6: { + if (static_cast<::google::protobuf::uint8>(tag) != 50) goto handle_unusual; ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); parser_till_end = ::flyteidl::core::Identifier::_InternalParse; @@ -1447,53 +1217,31 @@ bool CloudEventNodeExecution::MergePartialFromCodedStream( break; } - // .flyteidl.core.LiteralMap output_data = 3; + // .flyteidl.core.TypedInterface output_interface = 3; case 3: { if (static_cast< ::google::protobuf::uint8>(tag) == (26 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_output_data())); + input, mutable_output_interface())); } else { goto handle_unusual; } break; } - // .flyteidl.core.TypedInterface output_interface = 4; + // repeated .flyteidl.core.ArtifactID artifact_ids = 4; case 4: { if (static_cast< ::google::protobuf::uint8>(tag) == (34 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_output_interface())); + input, add_artifact_ids())); } else { goto handle_unusual; } break; } - // .flyteidl.core.LiteralMap input_data = 5; + // string principal = 5; case 5: { if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, mutable_input_data())); - } else { - goto handle_unusual; - } - break; - } - - // repeated .flyteidl.core.ArtifactID artifact_ids = 6; - case 6: { - if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { - DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( - input, add_artifact_ids())); - } else { - goto handle_unusual; - } - break; - } - - // string principal = 7; - case 7: { - if (static_cast< ::google::protobuf::uint8>(tag) == (58 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadString( input, this->mutable_principal())); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( @@ -1506,9 +1254,9 @@ bool CloudEventNodeExecution::MergePartialFromCodedStream( break; } - // .flyteidl.core.Identifier launch_plan_id = 8; - case 8: { - if (static_cast< ::google::protobuf::uint8>(tag) == (66 & 0xFF)) { + // .flyteidl.core.Identifier launch_plan_id = 6; + case 6: { + if (static_cast< ::google::protobuf::uint8>(tag) == (50 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_launch_plan_id())); } else { @@ -1556,47 +1304,35 @@ void CloudEventNodeExecution::SerializeWithCachedSizes( 2, HasBitSetters::task_exec_id(this), output); } - // .flyteidl.core.LiteralMap output_data = 3; - if (this->has_output_data()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 3, HasBitSetters::output_data(this), output); - } - - // .flyteidl.core.TypedInterface output_interface = 4; + // .flyteidl.core.TypedInterface output_interface = 3; if (this->has_output_interface()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 4, HasBitSetters::output_interface(this), output); - } - - // .flyteidl.core.LiteralMap input_data = 5; - if (this->has_input_data()) { - ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 5, HasBitSetters::input_data(this), output); + 3, HasBitSetters::output_interface(this), output); } - // repeated .flyteidl.core.ArtifactID artifact_ids = 6; + // repeated .flyteidl.core.ArtifactID artifact_ids = 4; for (unsigned int i = 0, n = static_cast(this->artifact_ids_size()); i < n; i++) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 6, + 4, this->artifact_ids(static_cast(i)), output); } - // string principal = 7; + // string principal = 5; if (this->principal().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->principal().data(), static_cast(this->principal().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "flyteidl.event.CloudEventNodeExecution.principal"); ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( - 7, this->principal(), output); + 5, this->principal(), output); } - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; if (this->has_launch_plan_id()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( - 8, HasBitSetters::launch_plan_id(this), output); + 6, HasBitSetters::launch_plan_id(this), output); } if (_internal_metadata_.have_unknown_fields()) { @@ -1626,36 +1362,22 @@ ::google::protobuf::uint8* CloudEventNodeExecution::InternalSerializeWithCachedS 2, HasBitSetters::task_exec_id(this), target); } - // .flyteidl.core.LiteralMap output_data = 3; - if (this->has_output_data()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 3, HasBitSetters::output_data(this), target); - } - - // .flyteidl.core.TypedInterface output_interface = 4; + // .flyteidl.core.TypedInterface output_interface = 3; if (this->has_output_interface()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 4, HasBitSetters::output_interface(this), target); - } - - // .flyteidl.core.LiteralMap input_data = 5; - if (this->has_input_data()) { - target = ::google::protobuf::internal::WireFormatLite:: - InternalWriteMessageToArray( - 5, HasBitSetters::input_data(this), target); + 3, HasBitSetters::output_interface(this), target); } - // repeated .flyteidl.core.ArtifactID artifact_ids = 6; + // repeated .flyteidl.core.ArtifactID artifact_ids = 4; for (unsigned int i = 0, n = static_cast(this->artifact_ids_size()); i < n; i++) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 6, this->artifact_ids(static_cast(i)), target); + 4, this->artifact_ids(static_cast(i)), target); } - // string principal = 7; + // string principal = 5; if (this->principal().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->principal().data(), static_cast(this->principal().length()), @@ -1663,14 +1385,14 @@ ::google::protobuf::uint8* CloudEventNodeExecution::InternalSerializeWithCachedS "flyteidl.event.CloudEventNodeExecution.principal"); target = ::google::protobuf::internal::WireFormatLite::WriteStringToArray( - 7, this->principal(), target); + 5, this->principal(), target); } - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; if (this->has_launch_plan_id()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( - 8, HasBitSetters::launch_plan_id(this), target); + 6, HasBitSetters::launch_plan_id(this), target); } if (_internal_metadata_.have_unknown_fields()) { @@ -1694,7 +1416,7 @@ size_t CloudEventNodeExecution::ByteSizeLong() const { // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; - // repeated .flyteidl.core.ArtifactID artifact_ids = 6; + // repeated .flyteidl.core.ArtifactID artifact_ids = 4; { unsigned int count = static_cast(this->artifact_ids_size()); total_size += 1UL * count; @@ -1705,7 +1427,7 @@ size_t CloudEventNodeExecution::ByteSizeLong() const { } } - // string principal = 7; + // string principal = 5; if (this->principal().size() > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize( @@ -1726,28 +1448,14 @@ size_t CloudEventNodeExecution::ByteSizeLong() const { *task_exec_id_); } - // .flyteidl.core.LiteralMap output_data = 3; - if (this->has_output_data()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *output_data_); - } - - // .flyteidl.core.TypedInterface output_interface = 4; + // .flyteidl.core.TypedInterface output_interface = 3; if (this->has_output_interface()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *output_interface_); } - // .flyteidl.core.LiteralMap input_data = 5; - if (this->has_input_data()) { - total_size += 1 + - ::google::protobuf::internal::WireFormatLite::MessageSize( - *input_data_); - } - - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; if (this->has_launch_plan_id()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( @@ -1792,15 +1500,9 @@ void CloudEventNodeExecution::MergeFrom(const CloudEventNodeExecution& from) { if (from.has_task_exec_id()) { mutable_task_exec_id()->::flyteidl::core::TaskExecutionIdentifier::MergeFrom(from.task_exec_id()); } - if (from.has_output_data()) { - mutable_output_data()->::flyteidl::core::LiteralMap::MergeFrom(from.output_data()); - } if (from.has_output_interface()) { mutable_output_interface()->::flyteidl::core::TypedInterface::MergeFrom(from.output_interface()); } - if (from.has_input_data()) { - mutable_input_data()->::flyteidl::core::LiteralMap::MergeFrom(from.input_data()); - } if (from.has_launch_plan_id()) { mutable_launch_plan_id()->::flyteidl::core::Identifier::MergeFrom(from.launch_plan_id()); } @@ -1836,9 +1538,7 @@ void CloudEventNodeExecution::InternalSwap(CloudEventNodeExecution* other) { GetArenaNoVirtual()); swap(raw_event_, other->raw_event_); swap(task_exec_id_, other->task_exec_id_); - swap(output_data_, other->output_data_); swap(output_interface_, other->output_interface_); - swap(input_data_, other->input_data_); swap(launch_plan_id_, other->launch_plan_id_); } @@ -2196,7 +1896,7 @@ const int CloudEventExecutionStart::kExecutionIdFieldNumber; const int CloudEventExecutionStart::kLaunchPlanIdFieldNumber; const int CloudEventExecutionStart::kWorkflowIdFieldNumber; const int CloudEventExecutionStart::kArtifactIdsFieldNumber; -const int CloudEventExecutionStart::kArtifactKeysFieldNumber; +const int CloudEventExecutionStart::kArtifactTrackersFieldNumber; const int CloudEventExecutionStart::kPrincipalFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 @@ -2209,7 +1909,7 @@ CloudEventExecutionStart::CloudEventExecutionStart(const CloudEventExecutionStar : ::google::protobuf::Message(), _internal_metadata_(nullptr), artifact_ids_(from.artifact_ids_), - artifact_keys_(from.artifact_keys_) { + artifact_trackers_(from.artifact_trackers_) { _internal_metadata_.MergeFrom(from._internal_metadata_); principal_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (from.principal().size() > 0) { @@ -2270,7 +1970,7 @@ void CloudEventExecutionStart::Clear() { (void) cached_has_bits; artifact_ids_.Clear(); - artifact_keys_.Clear(); + artifact_trackers_.Clear(); principal_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == nullptr && execution_id_ != nullptr) { delete execution_id_; @@ -2355,14 +2055,14 @@ const char* CloudEventExecutionStart::_InternalParse(const char* begin, const ch } while ((::google::protobuf::io::UnalignedLoad<::google::protobuf::uint64>(ptr) & 255) == 34 && (ptr += 1)); break; } - // repeated string artifact_keys = 5; + // repeated string artifact_trackers = 5; case 5: { if (static_cast<::google::protobuf::uint8>(tag) != 42) goto handle_unusual; do { ptr = ::google::protobuf::io::ReadSize(ptr, &size); GOOGLE_PROTOBUF_PARSER_ASSERT(ptr); - ctx->extra_parse_data().SetFieldName("flyteidl.event.CloudEventExecutionStart.artifact_keys"); - object = msg->add_artifact_keys(); + ctx->extra_parse_data().SetFieldName("flyteidl.event.CloudEventExecutionStart.artifact_trackers"); + object = msg->add_artifact_trackers(); if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) { parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8; goto string_till_end; @@ -2468,16 +2168,16 @@ bool CloudEventExecutionStart::MergePartialFromCodedStream( break; } - // repeated string artifact_keys = 5; + // repeated string artifact_trackers = 5; case 5: { if (static_cast< ::google::protobuf::uint8>(tag) == (42 & 0xFF)) { DO_(::google::protobuf::internal::WireFormatLite::ReadString( - input, this->add_artifact_keys())); + input, this->add_artifact_trackers())); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->artifact_keys(this->artifact_keys_size() - 1).data(), - static_cast(this->artifact_keys(this->artifact_keys_size() - 1).length()), + this->artifact_trackers(this->artifact_trackers_size() - 1).data(), + static_cast(this->artifact_trackers(this->artifact_trackers_size() - 1).length()), ::google::protobuf::internal::WireFormatLite::PARSE, - "flyteidl.event.CloudEventExecutionStart.artifact_keys")); + "flyteidl.event.CloudEventExecutionStart.artifact_trackers")); } else { goto handle_unusual; } @@ -2553,14 +2253,14 @@ void CloudEventExecutionStart::SerializeWithCachedSizes( output); } - // repeated string artifact_keys = 5; - for (int i = 0, n = this->artifact_keys_size(); i < n; i++) { + // repeated string artifact_trackers = 5; + for (int i = 0, n = this->artifact_trackers_size(); i < n; i++) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->artifact_keys(i).data(), static_cast(this->artifact_keys(i).length()), + this->artifact_trackers(i).data(), static_cast(this->artifact_trackers(i).length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.event.CloudEventExecutionStart.artifact_keys"); + "flyteidl.event.CloudEventExecutionStart.artifact_trackers"); ::google::protobuf::internal::WireFormatLite::WriteString( - 5, this->artifact_keys(i), output); + 5, this->artifact_trackers(i), output); } // string principal = 6; @@ -2615,14 +2315,14 @@ ::google::protobuf::uint8* CloudEventExecutionStart::InternalSerializeWithCached 4, this->artifact_ids(static_cast(i)), target); } - // repeated string artifact_keys = 5; - for (int i = 0, n = this->artifact_keys_size(); i < n; i++) { + // repeated string artifact_trackers = 5; + for (int i = 0, n = this->artifact_trackers_size(); i < n; i++) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( - this->artifact_keys(i).data(), static_cast(this->artifact_keys(i).length()), + this->artifact_trackers(i).data(), static_cast(this->artifact_trackers(i).length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, - "flyteidl.event.CloudEventExecutionStart.artifact_keys"); + "flyteidl.event.CloudEventExecutionStart.artifact_trackers"); target = ::google::protobuf::internal::WireFormatLite:: - WriteStringToArray(5, this->artifact_keys(i), target); + WriteStringToArray(5, this->artifact_trackers(i), target); } // string principal = 6; @@ -2668,12 +2368,12 @@ size_t CloudEventExecutionStart::ByteSizeLong() const { } } - // repeated string artifact_keys = 5; + // repeated string artifact_trackers = 5; total_size += 1 * - ::google::protobuf::internal::FromIntSize(this->artifact_keys_size()); - for (int i = 0, n = this->artifact_keys_size(); i < n; i++) { + ::google::protobuf::internal::FromIntSize(this->artifact_trackers_size()); + for (int i = 0, n = this->artifact_trackers_size(); i < n; i++) { total_size += ::google::protobuf::internal::WireFormatLite::StringSize( - this->artifact_keys(i)); + this->artifact_trackers(i)); } // string principal = 6; @@ -2732,7 +2432,7 @@ void CloudEventExecutionStart::MergeFrom(const CloudEventExecutionStart& from) { (void) cached_has_bits; artifact_ids_.MergeFrom(from.artifact_ids_); - artifact_keys_.MergeFrom(from.artifact_keys_); + artifact_trackers_.MergeFrom(from.artifact_trackers_); if (from.principal().size() > 0) { principal_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.principal_); @@ -2774,7 +2474,7 @@ void CloudEventExecutionStart::InternalSwap(CloudEventExecutionStart* other) { using std::swap; _internal_metadata_.Swap(&other->_internal_metadata_); CastToBase(&artifact_ids_)->InternalSwap(CastToBase(&other->artifact_ids_)); - artifact_keys_.InternalSwap(CastToBase(&other->artifact_keys_)); + artifact_trackers_.InternalSwap(CastToBase(&other->artifact_trackers_)); principal_.Swap(&other->principal_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); swap(execution_id_, other->execution_id_); diff --git a/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.h b/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.h index 999d5a1137..ac0a1cc959 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/event/cloudevents.pb.h @@ -178,10 +178,10 @@ class CloudEventWorkflowExecution final : // accessors ------------------------------------------------------- - // repeated .flyteidl.core.ArtifactID artifact_ids = 5; + // repeated .flyteidl.core.ArtifactID artifact_ids = 3; int artifact_ids_size() const; void clear_artifact_ids(); - static const int kArtifactIdsFieldNumber = 5; + static const int kArtifactIdsFieldNumber = 3; ::flyteidl::core::ArtifactID* mutable_artifact_ids(int index); ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >* mutable_artifact_ids(); @@ -190,9 +190,9 @@ class CloudEventWorkflowExecution final : const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >& artifact_ids() const; - // string principal = 7; + // string principal = 5; void clear_principal(); - static const int kPrincipalFieldNumber = 7; + static const int kPrincipalFieldNumber = 5; const ::std::string& principal() const; void set_principal(const ::std::string& value); #if LANG_CXX11 @@ -213,46 +213,28 @@ class CloudEventWorkflowExecution final : ::flyteidl::event::WorkflowExecutionEvent* mutable_raw_event(); void set_allocated_raw_event(::flyteidl::event::WorkflowExecutionEvent* raw_event); - // .flyteidl.core.LiteralMap output_data = 2; - bool has_output_data() const; - void clear_output_data(); - static const int kOutputDataFieldNumber = 2; - const ::flyteidl::core::LiteralMap& output_data() const; - ::flyteidl::core::LiteralMap* release_output_data(); - ::flyteidl::core::LiteralMap* mutable_output_data(); - void set_allocated_output_data(::flyteidl::core::LiteralMap* output_data); - - // .flyteidl.core.TypedInterface output_interface = 3; + // .flyteidl.core.TypedInterface output_interface = 2; bool has_output_interface() const; void clear_output_interface(); - static const int kOutputInterfaceFieldNumber = 3; + static const int kOutputInterfaceFieldNumber = 2; const ::flyteidl::core::TypedInterface& output_interface() const; ::flyteidl::core::TypedInterface* release_output_interface(); ::flyteidl::core::TypedInterface* mutable_output_interface(); void set_allocated_output_interface(::flyteidl::core::TypedInterface* output_interface); - // .flyteidl.core.LiteralMap input_data = 4; - bool has_input_data() const; - void clear_input_data(); - static const int kInputDataFieldNumber = 4; - const ::flyteidl::core::LiteralMap& input_data() const; - ::flyteidl::core::LiteralMap* release_input_data(); - ::flyteidl::core::LiteralMap* mutable_input_data(); - void set_allocated_input_data(::flyteidl::core::LiteralMap* input_data); - - // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + // .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; bool has_reference_execution() const; void clear_reference_execution(); - static const int kReferenceExecutionFieldNumber = 6; + static const int kReferenceExecutionFieldNumber = 4; const ::flyteidl::core::WorkflowExecutionIdentifier& reference_execution() const; ::flyteidl::core::WorkflowExecutionIdentifier* release_reference_execution(); ::flyteidl::core::WorkflowExecutionIdentifier* mutable_reference_execution(); void set_allocated_reference_execution(::flyteidl::core::WorkflowExecutionIdentifier* reference_execution); - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; bool has_launch_plan_id() const; void clear_launch_plan_id(); - static const int kLaunchPlanIdFieldNumber = 8; + static const int kLaunchPlanIdFieldNumber = 6; const ::flyteidl::core::Identifier& launch_plan_id() const; ::flyteidl::core::Identifier* release_launch_plan_id(); ::flyteidl::core::Identifier* mutable_launch_plan_id(); @@ -266,9 +248,7 @@ class CloudEventWorkflowExecution final : ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID > artifact_ids_; ::google::protobuf::internal::ArenaStringPtr principal_; ::flyteidl::event::WorkflowExecutionEvent* raw_event_; - ::flyteidl::core::LiteralMap* output_data_; ::flyteidl::core::TypedInterface* output_interface_; - ::flyteidl::core::LiteralMap* input_data_; ::flyteidl::core::WorkflowExecutionIdentifier* reference_execution_; ::flyteidl::core::Identifier* launch_plan_id_; mutable ::google::protobuf::internal::CachedSize _cached_size_; @@ -371,10 +351,10 @@ class CloudEventNodeExecution final : // accessors ------------------------------------------------------- - // repeated .flyteidl.core.ArtifactID artifact_ids = 6; + // repeated .flyteidl.core.ArtifactID artifact_ids = 4; int artifact_ids_size() const; void clear_artifact_ids(); - static const int kArtifactIdsFieldNumber = 6; + static const int kArtifactIdsFieldNumber = 4; ::flyteidl::core::ArtifactID* mutable_artifact_ids(int index); ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >* mutable_artifact_ids(); @@ -383,9 +363,9 @@ class CloudEventNodeExecution final : const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >& artifact_ids() const; - // string principal = 7; + // string principal = 5; void clear_principal(); - static const int kPrincipalFieldNumber = 7; + static const int kPrincipalFieldNumber = 5; const ::std::string& principal() const; void set_principal(const ::std::string& value); #if LANG_CXX11 @@ -415,37 +395,19 @@ class CloudEventNodeExecution final : ::flyteidl::core::TaskExecutionIdentifier* mutable_task_exec_id(); void set_allocated_task_exec_id(::flyteidl::core::TaskExecutionIdentifier* task_exec_id); - // .flyteidl.core.LiteralMap output_data = 3; - bool has_output_data() const; - void clear_output_data(); - static const int kOutputDataFieldNumber = 3; - const ::flyteidl::core::LiteralMap& output_data() const; - ::flyteidl::core::LiteralMap* release_output_data(); - ::flyteidl::core::LiteralMap* mutable_output_data(); - void set_allocated_output_data(::flyteidl::core::LiteralMap* output_data); - - // .flyteidl.core.TypedInterface output_interface = 4; + // .flyteidl.core.TypedInterface output_interface = 3; bool has_output_interface() const; void clear_output_interface(); - static const int kOutputInterfaceFieldNumber = 4; + static const int kOutputInterfaceFieldNumber = 3; const ::flyteidl::core::TypedInterface& output_interface() const; ::flyteidl::core::TypedInterface* release_output_interface(); ::flyteidl::core::TypedInterface* mutable_output_interface(); void set_allocated_output_interface(::flyteidl::core::TypedInterface* output_interface); - // .flyteidl.core.LiteralMap input_data = 5; - bool has_input_data() const; - void clear_input_data(); - static const int kInputDataFieldNumber = 5; - const ::flyteidl::core::LiteralMap& input_data() const; - ::flyteidl::core::LiteralMap* release_input_data(); - ::flyteidl::core::LiteralMap* mutable_input_data(); - void set_allocated_input_data(::flyteidl::core::LiteralMap* input_data); - - // .flyteidl.core.Identifier launch_plan_id = 8; + // .flyteidl.core.Identifier launch_plan_id = 6; bool has_launch_plan_id() const; void clear_launch_plan_id(); - static const int kLaunchPlanIdFieldNumber = 8; + static const int kLaunchPlanIdFieldNumber = 6; const ::flyteidl::core::Identifier& launch_plan_id() const; ::flyteidl::core::Identifier* release_launch_plan_id(); ::flyteidl::core::Identifier* mutable_launch_plan_id(); @@ -460,9 +422,7 @@ class CloudEventNodeExecution final : ::google::protobuf::internal::ArenaStringPtr principal_; ::flyteidl::event::NodeExecutionEvent* raw_event_; ::flyteidl::core::TaskExecutionIdentifier* task_exec_id_; - ::flyteidl::core::LiteralMap* output_data_; ::flyteidl::core::TypedInterface* output_interface_; - ::flyteidl::core::LiteralMap* input_data_; ::flyteidl::core::Identifier* launch_plan_id_; mutable ::google::protobuf::internal::CachedSize _cached_size_; friend struct ::TableStruct_flyteidl_2fevent_2fcloudevents_2eproto; @@ -691,27 +651,27 @@ class CloudEventExecutionStart final : const ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID >& artifact_ids() const; - // repeated string artifact_keys = 5; - int artifact_keys_size() const; - void clear_artifact_keys(); - static const int kArtifactKeysFieldNumber = 5; - const ::std::string& artifact_keys(int index) const; - ::std::string* mutable_artifact_keys(int index); - void set_artifact_keys(int index, const ::std::string& value); + // repeated string artifact_trackers = 5; + int artifact_trackers_size() const; + void clear_artifact_trackers(); + static const int kArtifactTrackersFieldNumber = 5; + const ::std::string& artifact_trackers(int index) const; + ::std::string* mutable_artifact_trackers(int index); + void set_artifact_trackers(int index, const ::std::string& value); #if LANG_CXX11 - void set_artifact_keys(int index, ::std::string&& value); + void set_artifact_trackers(int index, ::std::string&& value); #endif - void set_artifact_keys(int index, const char* value); - void set_artifact_keys(int index, const char* value, size_t size); - ::std::string* add_artifact_keys(); - void add_artifact_keys(const ::std::string& value); + void set_artifact_trackers(int index, const char* value); + void set_artifact_trackers(int index, const char* value, size_t size); + ::std::string* add_artifact_trackers(); + void add_artifact_trackers(const ::std::string& value); #if LANG_CXX11 - void add_artifact_keys(::std::string&& value); + void add_artifact_trackers(::std::string&& value); #endif - void add_artifact_keys(const char* value); - void add_artifact_keys(const char* value, size_t size); - const ::google::protobuf::RepeatedPtrField<::std::string>& artifact_keys() const; - ::google::protobuf::RepeatedPtrField<::std::string>* mutable_artifact_keys(); + void add_artifact_trackers(const char* value); + void add_artifact_trackers(const char* value, size_t size); + const ::google::protobuf::RepeatedPtrField<::std::string>& artifact_trackers() const; + ::google::protobuf::RepeatedPtrField<::std::string>* mutable_artifact_trackers(); // string principal = 6; void clear_principal(); @@ -760,7 +720,7 @@ class CloudEventExecutionStart final : ::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_; ::google::protobuf::RepeatedPtrField< ::flyteidl::core::ArtifactID > artifact_ids_; - ::google::protobuf::RepeatedPtrField<::std::string> artifact_keys_; + ::google::protobuf::RepeatedPtrField<::std::string> artifact_trackers_; ::google::protobuf::internal::ArenaStringPtr principal_; ::flyteidl::core::WorkflowExecutionIdentifier* execution_id_; ::flyteidl::core::Identifier* launch_plan_id_; @@ -824,52 +784,7 @@ inline void CloudEventWorkflowExecution::set_allocated_raw_event(::flyteidl::eve // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventWorkflowExecution.raw_event) } -// .flyteidl.core.LiteralMap output_data = 2; -inline bool CloudEventWorkflowExecution::has_output_data() const { - return this != internal_default_instance() && output_data_ != nullptr; -} -inline const ::flyteidl::core::LiteralMap& CloudEventWorkflowExecution::output_data() const { - const ::flyteidl::core::LiteralMap* p = output_data_; - // @@protoc_insertion_point(field_get:flyteidl.event.CloudEventWorkflowExecution.output_data) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_LiteralMap_default_instance_); -} -inline ::flyteidl::core::LiteralMap* CloudEventWorkflowExecution::release_output_data() { - // @@protoc_insertion_point(field_release:flyteidl.event.CloudEventWorkflowExecution.output_data) - - ::flyteidl::core::LiteralMap* temp = output_data_; - output_data_ = nullptr; - return temp; -} -inline ::flyteidl::core::LiteralMap* CloudEventWorkflowExecution::mutable_output_data() { - - if (output_data_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); - output_data_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.event.CloudEventWorkflowExecution.output_data) - return output_data_; -} -inline void CloudEventWorkflowExecution::set_allocated_output_data(::flyteidl::core::LiteralMap* output_data) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(output_data_); - } - if (output_data) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - output_data = ::google::protobuf::internal::GetOwnedMessage( - message_arena, output_data, submessage_arena); - } - - } else { - - } - output_data_ = output_data; - // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventWorkflowExecution.output_data) -} - -// .flyteidl.core.TypedInterface output_interface = 3; +// .flyteidl.core.TypedInterface output_interface = 2; inline bool CloudEventWorkflowExecution::has_output_interface() const { return this != internal_default_instance() && output_interface_ != nullptr; } @@ -914,52 +829,7 @@ inline void CloudEventWorkflowExecution::set_allocated_output_interface(::flytei // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventWorkflowExecution.output_interface) } -// .flyteidl.core.LiteralMap input_data = 4; -inline bool CloudEventWorkflowExecution::has_input_data() const { - return this != internal_default_instance() && input_data_ != nullptr; -} -inline const ::flyteidl::core::LiteralMap& CloudEventWorkflowExecution::input_data() const { - const ::flyteidl::core::LiteralMap* p = input_data_; - // @@protoc_insertion_point(field_get:flyteidl.event.CloudEventWorkflowExecution.input_data) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_LiteralMap_default_instance_); -} -inline ::flyteidl::core::LiteralMap* CloudEventWorkflowExecution::release_input_data() { - // @@protoc_insertion_point(field_release:flyteidl.event.CloudEventWorkflowExecution.input_data) - - ::flyteidl::core::LiteralMap* temp = input_data_; - input_data_ = nullptr; - return temp; -} -inline ::flyteidl::core::LiteralMap* CloudEventWorkflowExecution::mutable_input_data() { - - if (input_data_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); - input_data_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.event.CloudEventWorkflowExecution.input_data) - return input_data_; -} -inline void CloudEventWorkflowExecution::set_allocated_input_data(::flyteidl::core::LiteralMap* input_data) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(input_data_); - } - if (input_data) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - input_data = ::google::protobuf::internal::GetOwnedMessage( - message_arena, input_data, submessage_arena); - } - - } else { - - } - input_data_ = input_data; - // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventWorkflowExecution.input_data) -} - -// repeated .flyteidl.core.ArtifactID artifact_ids = 5; +// repeated .flyteidl.core.ArtifactID artifact_ids = 3; inline int CloudEventWorkflowExecution::artifact_ids_size() const { return artifact_ids_.size(); } @@ -986,7 +856,7 @@ CloudEventWorkflowExecution::artifact_ids() const { return artifact_ids_; } -// .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; +// .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; inline bool CloudEventWorkflowExecution::has_reference_execution() const { return this != internal_default_instance() && reference_execution_ != nullptr; } @@ -1031,7 +901,7 @@ inline void CloudEventWorkflowExecution::set_allocated_reference_execution(::fly // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventWorkflowExecution.reference_execution) } -// string principal = 7; +// string principal = 5; inline void CloudEventWorkflowExecution::clear_principal() { principal_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } @@ -1084,7 +954,7 @@ inline void CloudEventWorkflowExecution::set_allocated_principal(::std::string* // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventWorkflowExecution.principal) } -// .flyteidl.core.Identifier launch_plan_id = 8; +// .flyteidl.core.Identifier launch_plan_id = 6; inline bool CloudEventWorkflowExecution::has_launch_plan_id() const { return this != internal_default_instance() && launch_plan_id_ != nullptr; } @@ -1223,52 +1093,7 @@ inline void CloudEventNodeExecution::set_allocated_task_exec_id(::flyteidl::core // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventNodeExecution.task_exec_id) } -// .flyteidl.core.LiteralMap output_data = 3; -inline bool CloudEventNodeExecution::has_output_data() const { - return this != internal_default_instance() && output_data_ != nullptr; -} -inline const ::flyteidl::core::LiteralMap& CloudEventNodeExecution::output_data() const { - const ::flyteidl::core::LiteralMap* p = output_data_; - // @@protoc_insertion_point(field_get:flyteidl.event.CloudEventNodeExecution.output_data) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_LiteralMap_default_instance_); -} -inline ::flyteidl::core::LiteralMap* CloudEventNodeExecution::release_output_data() { - // @@protoc_insertion_point(field_release:flyteidl.event.CloudEventNodeExecution.output_data) - - ::flyteidl::core::LiteralMap* temp = output_data_; - output_data_ = nullptr; - return temp; -} -inline ::flyteidl::core::LiteralMap* CloudEventNodeExecution::mutable_output_data() { - - if (output_data_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); - output_data_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.event.CloudEventNodeExecution.output_data) - return output_data_; -} -inline void CloudEventNodeExecution::set_allocated_output_data(::flyteidl::core::LiteralMap* output_data) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(output_data_); - } - if (output_data) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - output_data = ::google::protobuf::internal::GetOwnedMessage( - message_arena, output_data, submessage_arena); - } - - } else { - - } - output_data_ = output_data; - // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventNodeExecution.output_data) -} - -// .flyteidl.core.TypedInterface output_interface = 4; +// .flyteidl.core.TypedInterface output_interface = 3; inline bool CloudEventNodeExecution::has_output_interface() const { return this != internal_default_instance() && output_interface_ != nullptr; } @@ -1313,52 +1138,7 @@ inline void CloudEventNodeExecution::set_allocated_output_interface(::flyteidl:: // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventNodeExecution.output_interface) } -// .flyteidl.core.LiteralMap input_data = 5; -inline bool CloudEventNodeExecution::has_input_data() const { - return this != internal_default_instance() && input_data_ != nullptr; -} -inline const ::flyteidl::core::LiteralMap& CloudEventNodeExecution::input_data() const { - const ::flyteidl::core::LiteralMap* p = input_data_; - // @@protoc_insertion_point(field_get:flyteidl.event.CloudEventNodeExecution.input_data) - return p != nullptr ? *p : *reinterpret_cast( - &::flyteidl::core::_LiteralMap_default_instance_); -} -inline ::flyteidl::core::LiteralMap* CloudEventNodeExecution::release_input_data() { - // @@protoc_insertion_point(field_release:flyteidl.event.CloudEventNodeExecution.input_data) - - ::flyteidl::core::LiteralMap* temp = input_data_; - input_data_ = nullptr; - return temp; -} -inline ::flyteidl::core::LiteralMap* CloudEventNodeExecution::mutable_input_data() { - - if (input_data_ == nullptr) { - auto* p = CreateMaybeMessage<::flyteidl::core::LiteralMap>(GetArenaNoVirtual()); - input_data_ = p; - } - // @@protoc_insertion_point(field_mutable:flyteidl.event.CloudEventNodeExecution.input_data) - return input_data_; -} -inline void CloudEventNodeExecution::set_allocated_input_data(::flyteidl::core::LiteralMap* input_data) { - ::google::protobuf::Arena* message_arena = GetArenaNoVirtual(); - if (message_arena == nullptr) { - delete reinterpret_cast< ::google::protobuf::MessageLite*>(input_data_); - } - if (input_data) { - ::google::protobuf::Arena* submessage_arena = nullptr; - if (message_arena != submessage_arena) { - input_data = ::google::protobuf::internal::GetOwnedMessage( - message_arena, input_data, submessage_arena); - } - - } else { - - } - input_data_ = input_data; - // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventNodeExecution.input_data) -} - -// repeated .flyteidl.core.ArtifactID artifact_ids = 6; +// repeated .flyteidl.core.ArtifactID artifact_ids = 4; inline int CloudEventNodeExecution::artifact_ids_size() const { return artifact_ids_.size(); } @@ -1385,7 +1165,7 @@ CloudEventNodeExecution::artifact_ids() const { return artifact_ids_; } -// string principal = 7; +// string principal = 5; inline void CloudEventNodeExecution::clear_principal() { principal_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); } @@ -1438,7 +1218,7 @@ inline void CloudEventNodeExecution::set_allocated_principal(::std::string* prin // @@protoc_insertion_point(field_set_allocated:flyteidl.event.CloudEventNodeExecution.principal) } -// .flyteidl.core.Identifier launch_plan_id = 8; +// .flyteidl.core.Identifier launch_plan_id = 6; inline bool CloudEventNodeExecution::has_launch_plan_id() const { return this != internal_default_instance() && launch_plan_id_ != nullptr; } @@ -1698,73 +1478,73 @@ CloudEventExecutionStart::artifact_ids() const { return artifact_ids_; } -// repeated string artifact_keys = 5; -inline int CloudEventExecutionStart::artifact_keys_size() const { - return artifact_keys_.size(); +// repeated string artifact_trackers = 5; +inline int CloudEventExecutionStart::artifact_trackers_size() const { + return artifact_trackers_.size(); } -inline void CloudEventExecutionStart::clear_artifact_keys() { - artifact_keys_.Clear(); +inline void CloudEventExecutionStart::clear_artifact_trackers() { + artifact_trackers_.Clear(); } -inline const ::std::string& CloudEventExecutionStart::artifact_keys(int index) const { - // @@protoc_insertion_point(field_get:flyteidl.event.CloudEventExecutionStart.artifact_keys) - return artifact_keys_.Get(index); +inline const ::std::string& CloudEventExecutionStart::artifact_trackers(int index) const { + // @@protoc_insertion_point(field_get:flyteidl.event.CloudEventExecutionStart.artifact_trackers) + return artifact_trackers_.Get(index); } -inline ::std::string* CloudEventExecutionStart::mutable_artifact_keys(int index) { - // @@protoc_insertion_point(field_mutable:flyteidl.event.CloudEventExecutionStart.artifact_keys) - return artifact_keys_.Mutable(index); +inline ::std::string* CloudEventExecutionStart::mutable_artifact_trackers(int index) { + // @@protoc_insertion_point(field_mutable:flyteidl.event.CloudEventExecutionStart.artifact_trackers) + return artifact_trackers_.Mutable(index); } -inline void CloudEventExecutionStart::set_artifact_keys(int index, const ::std::string& value) { - // @@protoc_insertion_point(field_set:flyteidl.event.CloudEventExecutionStart.artifact_keys) - artifact_keys_.Mutable(index)->assign(value); +inline void CloudEventExecutionStart::set_artifact_trackers(int index, const ::std::string& value) { + // @@protoc_insertion_point(field_set:flyteidl.event.CloudEventExecutionStart.artifact_trackers) + artifact_trackers_.Mutable(index)->assign(value); } #if LANG_CXX11 -inline void CloudEventExecutionStart::set_artifact_keys(int index, ::std::string&& value) { - // @@protoc_insertion_point(field_set:flyteidl.event.CloudEventExecutionStart.artifact_keys) - artifact_keys_.Mutable(index)->assign(std::move(value)); +inline void CloudEventExecutionStart::set_artifact_trackers(int index, ::std::string&& value) { + // @@protoc_insertion_point(field_set:flyteidl.event.CloudEventExecutionStart.artifact_trackers) + artifact_trackers_.Mutable(index)->assign(std::move(value)); } #endif -inline void CloudEventExecutionStart::set_artifact_keys(int index, const char* value) { +inline void CloudEventExecutionStart::set_artifact_trackers(int index, const char* value) { GOOGLE_DCHECK(value != nullptr); - artifact_keys_.Mutable(index)->assign(value); - // @@protoc_insertion_point(field_set_char:flyteidl.event.CloudEventExecutionStart.artifact_keys) + artifact_trackers_.Mutable(index)->assign(value); + // @@protoc_insertion_point(field_set_char:flyteidl.event.CloudEventExecutionStart.artifact_trackers) } -inline void CloudEventExecutionStart::set_artifact_keys(int index, const char* value, size_t size) { - artifact_keys_.Mutable(index)->assign( +inline void CloudEventExecutionStart::set_artifact_trackers(int index, const char* value, size_t size) { + artifact_trackers_.Mutable(index)->assign( reinterpret_cast(value), size); - // @@protoc_insertion_point(field_set_pointer:flyteidl.event.CloudEventExecutionStart.artifact_keys) + // @@protoc_insertion_point(field_set_pointer:flyteidl.event.CloudEventExecutionStart.artifact_trackers) } -inline ::std::string* CloudEventExecutionStart::add_artifact_keys() { - // @@protoc_insertion_point(field_add_mutable:flyteidl.event.CloudEventExecutionStart.artifact_keys) - return artifact_keys_.Add(); +inline ::std::string* CloudEventExecutionStart::add_artifact_trackers() { + // @@protoc_insertion_point(field_add_mutable:flyteidl.event.CloudEventExecutionStart.artifact_trackers) + return artifact_trackers_.Add(); } -inline void CloudEventExecutionStart::add_artifact_keys(const ::std::string& value) { - artifact_keys_.Add()->assign(value); - // @@protoc_insertion_point(field_add:flyteidl.event.CloudEventExecutionStart.artifact_keys) +inline void CloudEventExecutionStart::add_artifact_trackers(const ::std::string& value) { + artifact_trackers_.Add()->assign(value); + // @@protoc_insertion_point(field_add:flyteidl.event.CloudEventExecutionStart.artifact_trackers) } #if LANG_CXX11 -inline void CloudEventExecutionStart::add_artifact_keys(::std::string&& value) { - artifact_keys_.Add(std::move(value)); - // @@protoc_insertion_point(field_add:flyteidl.event.CloudEventExecutionStart.artifact_keys) +inline void CloudEventExecutionStart::add_artifact_trackers(::std::string&& value) { + artifact_trackers_.Add(std::move(value)); + // @@protoc_insertion_point(field_add:flyteidl.event.CloudEventExecutionStart.artifact_trackers) } #endif -inline void CloudEventExecutionStart::add_artifact_keys(const char* value) { +inline void CloudEventExecutionStart::add_artifact_trackers(const char* value) { GOOGLE_DCHECK(value != nullptr); - artifact_keys_.Add()->assign(value); - // @@protoc_insertion_point(field_add_char:flyteidl.event.CloudEventExecutionStart.artifact_keys) + artifact_trackers_.Add()->assign(value); + // @@protoc_insertion_point(field_add_char:flyteidl.event.CloudEventExecutionStart.artifact_trackers) } -inline void CloudEventExecutionStart::add_artifact_keys(const char* value, size_t size) { - artifact_keys_.Add()->assign(reinterpret_cast(value), size); - // @@protoc_insertion_point(field_add_pointer:flyteidl.event.CloudEventExecutionStart.artifact_keys) +inline void CloudEventExecutionStart::add_artifact_trackers(const char* value, size_t size) { + artifact_trackers_.Add()->assign(reinterpret_cast(value), size); + // @@protoc_insertion_point(field_add_pointer:flyteidl.event.CloudEventExecutionStart.artifact_trackers) } inline const ::google::protobuf::RepeatedPtrField<::std::string>& -CloudEventExecutionStart::artifact_keys() const { - // @@protoc_insertion_point(field_list:flyteidl.event.CloudEventExecutionStart.artifact_keys) - return artifact_keys_; +CloudEventExecutionStart::artifact_trackers() const { + // @@protoc_insertion_point(field_list:flyteidl.event.CloudEventExecutionStart.artifact_trackers) + return artifact_trackers_; } inline ::google::protobuf::RepeatedPtrField<::std::string>* -CloudEventExecutionStart::mutable_artifact_keys() { - // @@protoc_insertion_point(field_mutable_list:flyteidl.event.CloudEventExecutionStart.artifact_keys) - return &artifact_keys_; +CloudEventExecutionStart::mutable_artifact_trackers() { + // @@protoc_insertion_point(field_mutable_list:flyteidl.event.CloudEventExecutionStart.artifact_trackers) + return &artifact_trackers_; } // string principal = 6; diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.cc index cf8c19b8a1..2af2441047 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.cc @@ -44,6 +44,7 @@ static const char* AdminService_method_names[] = { "/flyteidl.service.AdminService/ListExecutions", "/flyteidl.service.AdminService/TerminateExecution", "/flyteidl.service.AdminService/GetNodeExecution", + "/flyteidl.service.AdminService/GetDynamicNodeWorkflow", "/flyteidl.service.AdminService/ListNodeExecutions", "/flyteidl.service.AdminService/ListNodeExecutionsForTask", "/flyteidl.service.AdminService/GetNodeExecutionData", @@ -106,35 +107,36 @@ AdminService::Stub::Stub(const std::shared_ptr< ::grpc::ChannelInterface>& chann , rpcmethod_ListExecutions_(AdminService_method_names[21], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) , rpcmethod_TerminateExecution_(AdminService_method_names[22], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) , rpcmethod_GetNodeExecution_(AdminService_method_names[23], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_ListNodeExecutions_(AdminService_method_names[24], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_ListNodeExecutionsForTask_(AdminService_method_names[25], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetNodeExecutionData_(AdminService_method_names[26], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_RegisterProject_(AdminService_method_names[27], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_UpdateProject_(AdminService_method_names[28], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_ListProjects_(AdminService_method_names[29], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_CreateWorkflowEvent_(AdminService_method_names[30], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_CreateNodeEvent_(AdminService_method_names[31], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_CreateTaskEvent_(AdminService_method_names[32], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetTaskExecution_(AdminService_method_names[33], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_ListTaskExecutions_(AdminService_method_names[34], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetTaskExecutionData_(AdminService_method_names[35], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_UpdateProjectDomainAttributes_(AdminService_method_names[36], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetProjectDomainAttributes_(AdminService_method_names[37], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_DeleteProjectDomainAttributes_(AdminService_method_names[38], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_UpdateProjectAttributes_(AdminService_method_names[39], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetProjectAttributes_(AdminService_method_names[40], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_DeleteProjectAttributes_(AdminService_method_names[41], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_UpdateWorkflowAttributes_(AdminService_method_names[42], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetWorkflowAttributes_(AdminService_method_names[43], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_DeleteWorkflowAttributes_(AdminService_method_names[44], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_ListMatchableAttributes_(AdminService_method_names[45], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_ListNamedEntities_(AdminService_method_names[46], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetNamedEntity_(AdminService_method_names[47], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_UpdateNamedEntity_(AdminService_method_names[48], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetVersion_(AdminService_method_names[49], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetDescriptionEntity_(AdminService_method_names[50], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_ListDescriptionEntities_(AdminService_method_names[51], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) - , rpcmethod_GetExecutionMetrics_(AdminService_method_names[52], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetDynamicNodeWorkflow_(AdminService_method_names[24], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_ListNodeExecutions_(AdminService_method_names[25], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_ListNodeExecutionsForTask_(AdminService_method_names[26], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetNodeExecutionData_(AdminService_method_names[27], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_RegisterProject_(AdminService_method_names[28], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_UpdateProject_(AdminService_method_names[29], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_ListProjects_(AdminService_method_names[30], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_CreateWorkflowEvent_(AdminService_method_names[31], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_CreateNodeEvent_(AdminService_method_names[32], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_CreateTaskEvent_(AdminService_method_names[33], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetTaskExecution_(AdminService_method_names[34], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_ListTaskExecutions_(AdminService_method_names[35], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetTaskExecutionData_(AdminService_method_names[36], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_UpdateProjectDomainAttributes_(AdminService_method_names[37], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetProjectDomainAttributes_(AdminService_method_names[38], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_DeleteProjectDomainAttributes_(AdminService_method_names[39], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_UpdateProjectAttributes_(AdminService_method_names[40], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetProjectAttributes_(AdminService_method_names[41], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_DeleteProjectAttributes_(AdminService_method_names[42], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_UpdateWorkflowAttributes_(AdminService_method_names[43], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetWorkflowAttributes_(AdminService_method_names[44], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_DeleteWorkflowAttributes_(AdminService_method_names[45], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_ListMatchableAttributes_(AdminService_method_names[46], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_ListNamedEntities_(AdminService_method_names[47], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetNamedEntity_(AdminService_method_names[48], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_UpdateNamedEntity_(AdminService_method_names[49], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetVersion_(AdminService_method_names[50], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetDescriptionEntity_(AdminService_method_names[51], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_ListDescriptionEntities_(AdminService_method_names[52], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetExecutionMetrics_(AdminService_method_names[53], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) {} ::grpc::Status AdminService::Stub::CreateTask(::grpc::ClientContext* context, const ::flyteidl::admin::TaskCreateRequest& request, ::flyteidl::admin::TaskCreateResponse* response) { @@ -809,6 +811,34 @@ ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecution>* AdminServi return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::NodeExecution>::Create(channel_.get(), cq, rpcmethod_GetNodeExecution_, context, request, false); } +::grpc::Status AdminService::Stub::GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) { + return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_GetDynamicNodeWorkflow_, context, request, response); +} + +void AdminService::Stub::experimental_async::GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, std::function f) { + ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetDynamicNodeWorkflow_, context, request, response, std::move(f)); +} + +void AdminService::Stub::experimental_async::GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, std::function f) { + ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetDynamicNodeWorkflow_, context, request, response, std::move(f)); +} + +void AdminService::Stub::experimental_async::GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { + ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetDynamicNodeWorkflow_, context, request, response, reactor); +} + +void AdminService::Stub::experimental_async::GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { + ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetDynamicNodeWorkflow_, context, request, response, reactor); +} + +::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>* AdminService::Stub::AsyncGetDynamicNodeWorkflowRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) { + return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::DynamicNodeWorkflowResponse>::Create(channel_.get(), cq, rpcmethod_GetDynamicNodeWorkflow_, context, request, true); +} + +::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>* AdminService::Stub::PrepareAsyncGetDynamicNodeWorkflowRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) { + return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::DynamicNodeWorkflowResponse>::Create(channel_.get(), cq, rpcmethod_GetDynamicNodeWorkflow_, context, request, false); +} + ::grpc::Status AdminService::Stub::ListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::flyteidl::admin::NodeExecutionList* response) { return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_ListNodeExecutions_, context, request, response); } @@ -1745,145 +1775,150 @@ AdminService::Service::Service() { AddMethod(new ::grpc::internal::RpcServiceMethod( AdminService_method_names[24], ::grpc::internal::RpcMethod::NORMAL_RPC, + new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::GetDynamicNodeWorkflowRequest, ::flyteidl::admin::DynamicNodeWorkflowResponse>( + std::mem_fn(&AdminService::Service::GetDynamicNodeWorkflow), this))); + AddMethod(new ::grpc::internal::RpcServiceMethod( + AdminService_method_names[25], + ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::NodeExecutionListRequest, ::flyteidl::admin::NodeExecutionList>( std::mem_fn(&AdminService::Service::ListNodeExecutions), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[25], + AdminService_method_names[26], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::NodeExecutionForTaskListRequest, ::flyteidl::admin::NodeExecutionList>( std::mem_fn(&AdminService::Service::ListNodeExecutionsForTask), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[26], + AdminService_method_names[27], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::NodeExecutionGetDataRequest, ::flyteidl::admin::NodeExecutionGetDataResponse>( std::mem_fn(&AdminService::Service::GetNodeExecutionData), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[27], + AdminService_method_names[28], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectRegisterRequest, ::flyteidl::admin::ProjectRegisterResponse>( std::mem_fn(&AdminService::Service::RegisterProject), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[28], + AdminService_method_names[29], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::Project, ::flyteidl::admin::ProjectUpdateResponse>( std::mem_fn(&AdminService::Service::UpdateProject), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[29], + AdminService_method_names[30], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectListRequest, ::flyteidl::admin::Projects>( std::mem_fn(&AdminService::Service::ListProjects), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[30], + AdminService_method_names[31], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::WorkflowExecutionEventRequest, ::flyteidl::admin::WorkflowExecutionEventResponse>( std::mem_fn(&AdminService::Service::CreateWorkflowEvent), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[31], + AdminService_method_names[32], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::NodeExecutionEventRequest, ::flyteidl::admin::NodeExecutionEventResponse>( std::mem_fn(&AdminService::Service::CreateNodeEvent), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[32], + AdminService_method_names[33], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::TaskExecutionEventRequest, ::flyteidl::admin::TaskExecutionEventResponse>( std::mem_fn(&AdminService::Service::CreateTaskEvent), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[33], + AdminService_method_names[34], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::TaskExecutionGetRequest, ::flyteidl::admin::TaskExecution>( std::mem_fn(&AdminService::Service::GetTaskExecution), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[34], + AdminService_method_names[35], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::TaskExecutionListRequest, ::flyteidl::admin::TaskExecutionList>( std::mem_fn(&AdminService::Service::ListTaskExecutions), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[35], + AdminService_method_names[36], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::TaskExecutionGetDataRequest, ::flyteidl::admin::TaskExecutionGetDataResponse>( std::mem_fn(&AdminService::Service::GetTaskExecutionData), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[36], + AdminService_method_names[37], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectDomainAttributesUpdateRequest, ::flyteidl::admin::ProjectDomainAttributesUpdateResponse>( std::mem_fn(&AdminService::Service::UpdateProjectDomainAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[37], + AdminService_method_names[38], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectDomainAttributesGetRequest, ::flyteidl::admin::ProjectDomainAttributesGetResponse>( std::mem_fn(&AdminService::Service::GetProjectDomainAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[38], + AdminService_method_names[39], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectDomainAttributesDeleteRequest, ::flyteidl::admin::ProjectDomainAttributesDeleteResponse>( std::mem_fn(&AdminService::Service::DeleteProjectDomainAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[39], + AdminService_method_names[40], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectAttributesUpdateRequest, ::flyteidl::admin::ProjectAttributesUpdateResponse>( std::mem_fn(&AdminService::Service::UpdateProjectAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[40], + AdminService_method_names[41], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectAttributesGetRequest, ::flyteidl::admin::ProjectAttributesGetResponse>( std::mem_fn(&AdminService::Service::GetProjectAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[41], + AdminService_method_names[42], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ProjectAttributesDeleteRequest, ::flyteidl::admin::ProjectAttributesDeleteResponse>( std::mem_fn(&AdminService::Service::DeleteProjectAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[42], + AdminService_method_names[43], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::WorkflowAttributesUpdateRequest, ::flyteidl::admin::WorkflowAttributesUpdateResponse>( std::mem_fn(&AdminService::Service::UpdateWorkflowAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[43], + AdminService_method_names[44], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::WorkflowAttributesGetRequest, ::flyteidl::admin::WorkflowAttributesGetResponse>( std::mem_fn(&AdminService::Service::GetWorkflowAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[44], + AdminService_method_names[45], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::WorkflowAttributesDeleteRequest, ::flyteidl::admin::WorkflowAttributesDeleteResponse>( std::mem_fn(&AdminService::Service::DeleteWorkflowAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[45], + AdminService_method_names[46], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ListMatchableAttributesRequest, ::flyteidl::admin::ListMatchableAttributesResponse>( std::mem_fn(&AdminService::Service::ListMatchableAttributes), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[46], + AdminService_method_names[47], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::NamedEntityListRequest, ::flyteidl::admin::NamedEntityList>( std::mem_fn(&AdminService::Service::ListNamedEntities), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[47], + AdminService_method_names[48], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::NamedEntityGetRequest, ::flyteidl::admin::NamedEntity>( std::mem_fn(&AdminService::Service::GetNamedEntity), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[48], + AdminService_method_names[49], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::NamedEntityUpdateRequest, ::flyteidl::admin::NamedEntityUpdateResponse>( std::mem_fn(&AdminService::Service::UpdateNamedEntity), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[49], + AdminService_method_names[50], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::GetVersionRequest, ::flyteidl::admin::GetVersionResponse>( std::mem_fn(&AdminService::Service::GetVersion), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[50], + AdminService_method_names[51], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::ObjectGetRequest, ::flyteidl::admin::DescriptionEntity>( std::mem_fn(&AdminService::Service::GetDescriptionEntity), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[51], + AdminService_method_names[52], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::DescriptionEntityListRequest, ::flyteidl::admin::DescriptionEntityList>( std::mem_fn(&AdminService::Service::ListDescriptionEntities), this))); AddMethod(new ::grpc::internal::RpcServiceMethod( - AdminService_method_names[52], + AdminService_method_names[53], ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AdminService::Service, ::flyteidl::admin::WorkflowExecutionGetMetricsRequest, ::flyteidl::admin::WorkflowExecutionGetMetricsResponse>( std::mem_fn(&AdminService::Service::GetExecutionMetrics), this))); @@ -2060,6 +2095,13 @@ ::grpc::Status AdminService::Service::GetNodeExecution(::grpc::ServerContext* co return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } +::grpc::Status AdminService::Service::GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) { + (void) context; + (void) request; + (void) response; + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); +} + ::grpc::Status AdminService::Service::ListNodeExecutions(::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionListRequest* request, ::flyteidl::admin::NodeExecutionList* response) { (void) context; (void) request; diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.h b/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.h index 77f3dc1487..102f1003b4 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/service/admin.grpc.pb.h @@ -66,7 +66,7 @@ class AdminService final { std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::Task>> PrepareAsyncGetTask(::grpc::ClientContext* context, const ::flyteidl::admin::ObjectGetRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::Task>>(PrepareAsyncGetTaskRaw(context, request, cq)); } - // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. virtual ::grpc::Status ListTaskIds(::grpc::ClientContext* context, const ::flyteidl::admin::NamedEntityIdentifierListRequest& request, ::flyteidl::admin::NamedEntityIdentifierList* response) = 0; std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NamedEntityIdentifierList>> AsyncListTaskIds(::grpc::ClientContext* context, const ::flyteidl::admin::NamedEntityIdentifierListRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NamedEntityIdentifierList>>(AsyncListTaskIdsRaw(context, request, cq)); @@ -246,6 +246,14 @@ class AdminService final { std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecution>> PrepareAsyncGetNodeExecution(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecution>>(PrepareAsyncGetNodeExecutionRaw(context, request, cq)); } + // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + virtual ::grpc::Status GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) = 0; + std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DynamicNodeWorkflowResponse>> AsyncGetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DynamicNodeWorkflowResponse>>(AsyncGetDynamicNodeWorkflowRaw(context, request, cq)); + } + std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DynamicNodeWorkflowResponse>> PrepareAsyncGetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DynamicNodeWorkflowResponse>>(PrepareAsyncGetDynamicNodeWorkflowRaw(context, request, cq)); + } // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. virtual ::grpc::Status ListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::flyteidl::admin::NodeExecutionList* response) = 0; std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecutionList>> AsyncListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::grpc::CompletionQueue* cq) { @@ -278,7 +286,7 @@ class AdminService final { std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::ProjectRegisterResponse>> PrepareAsyncRegisterProject(::grpc::ClientContext* context, const ::flyteidl::admin::ProjectRegisterRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::ProjectRegisterResponse>>(PrepareAsyncRegisterProjectRaw(context, request, cq)); } - // Updates an existing :ref:`ref_flyteidl.admin.Project` + // Updates an existing :ref:`ref_flyteidl.admin.Project` // flyteidl.admin.Project should be passed but the domains property should be empty; // it will be ignored in the handler as domains cannot be updated via this API. virtual ::grpc::Status UpdateProject(::grpc::ClientContext* context, const ::flyteidl::admin::Project& request, ::flyteidl::admin::ProjectUpdateResponse* response) = 0; @@ -288,7 +296,7 @@ class AdminService final { std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::ProjectUpdateResponse>> PrepareAsyncUpdateProject(::grpc::ClientContext* context, const ::flyteidl::admin::Project& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::ProjectUpdateResponse>>(PrepareAsyncUpdateProjectRaw(context, request, cq)); } - // Fetches a list of :ref:`ref_flyteidl.admin.Project` + // Fetches a list of :ref:`ref_flyteidl.admin.Project` virtual ::grpc::Status ListProjects(::grpc::ClientContext* context, const ::flyteidl::admin::ProjectListRequest& request, ::flyteidl::admin::Projects* response) = 0; std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::Projects>> AsyncListProjects(::grpc::ClientContext* context, const ::flyteidl::admin::ProjectListRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::Projects>>(AsyncListProjectsRaw(context, request, cq)); @@ -492,7 +500,7 @@ class AdminService final { virtual void GetTask(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::Task* response, std::function) = 0; virtual void GetTask(::grpc::ClientContext* context, const ::flyteidl::admin::ObjectGetRequest* request, ::flyteidl::admin::Task* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; virtual void GetTask(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::Task* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. virtual void ListTaskIds(::grpc::ClientContext* context, const ::flyteidl::admin::NamedEntityIdentifierListRequest* request, ::flyteidl::admin::NamedEntityIdentifierList* response, std::function) = 0; virtual void ListTaskIds(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::NamedEntityIdentifierList* response, std::function) = 0; virtual void ListTaskIds(::grpc::ClientContext* context, const ::flyteidl::admin::NamedEntityIdentifierListRequest* request, ::flyteidl::admin::NamedEntityIdentifierList* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; @@ -606,6 +614,11 @@ class AdminService final { virtual void GetNodeExecution(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::NodeExecution* response, std::function) = 0; virtual void GetNodeExecution(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest* request, ::flyteidl::admin::NodeExecution* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; virtual void GetNodeExecution(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::NodeExecution* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; + // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + virtual void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, std::function) = 0; + virtual void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, std::function) = 0; + virtual void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; + virtual void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. virtual void ListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest* request, ::flyteidl::admin::NodeExecutionList* response, std::function) = 0; virtual void ListNodeExecutions(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::NodeExecutionList* response, std::function) = 0; @@ -626,14 +639,14 @@ class AdminService final { virtual void RegisterProject(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::ProjectRegisterResponse* response, std::function) = 0; virtual void RegisterProject(::grpc::ClientContext* context, const ::flyteidl::admin::ProjectRegisterRequest* request, ::flyteidl::admin::ProjectRegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; virtual void RegisterProject(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::ProjectRegisterResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - // Updates an existing :ref:`ref_flyteidl.admin.Project` + // Updates an existing :ref:`ref_flyteidl.admin.Project` // flyteidl.admin.Project should be passed but the domains property should be empty; // it will be ignored in the handler as domains cannot be updated via this API. virtual void UpdateProject(::grpc::ClientContext* context, const ::flyteidl::admin::Project* request, ::flyteidl::admin::ProjectUpdateResponse* response, std::function) = 0; virtual void UpdateProject(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::ProjectUpdateResponse* response, std::function) = 0; virtual void UpdateProject(::grpc::ClientContext* context, const ::flyteidl::admin::Project* request, ::flyteidl::admin::ProjectUpdateResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; virtual void UpdateProject(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::ProjectUpdateResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; - // Fetches a list of :ref:`ref_flyteidl.admin.Project` + // Fetches a list of :ref:`ref_flyteidl.admin.Project` virtual void ListProjects(::grpc::ClientContext* context, const ::flyteidl::admin::ProjectListRequest* request, ::flyteidl::admin::Projects* response, std::function) = 0; virtual void ListProjects(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::Projects* response, std::function) = 0; virtual void ListProjects(::grpc::ClientContext* context, const ::flyteidl::admin::ProjectListRequest* request, ::flyteidl::admin::Projects* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; @@ -803,6 +816,8 @@ class AdminService final { virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::ExecutionTerminateResponse>* PrepareAsyncTerminateExecutionRaw(::grpc::ClientContext* context, const ::flyteidl::admin::ExecutionTerminateRequest& request, ::grpc::CompletionQueue* cq) = 0; virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecution>* AsyncGetNodeExecutionRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest& request, ::grpc::CompletionQueue* cq) = 0; virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecution>* PrepareAsyncGetNodeExecutionRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest& request, ::grpc::CompletionQueue* cq) = 0; + virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DynamicNodeWorkflowResponse>* AsyncGetDynamicNodeWorkflowRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) = 0; + virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DynamicNodeWorkflowResponse>* PrepareAsyncGetDynamicNodeWorkflowRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) = 0; virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecutionList>* AsyncListNodeExecutionsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::grpc::CompletionQueue* cq) = 0; virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecutionList>* PrepareAsyncListNodeExecutionsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::grpc::CompletionQueue* cq) = 0; virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::NodeExecutionList>* AsyncListNodeExecutionsForTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionForTaskListRequest& request, ::grpc::CompletionQueue* cq) = 0; @@ -1033,6 +1048,13 @@ class AdminService final { std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecution>> PrepareAsyncGetNodeExecution(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecution>>(PrepareAsyncGetNodeExecutionRaw(context, request, cq)); } + ::grpc::Status GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) override; + std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>> AsyncGetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>>(AsyncGetDynamicNodeWorkflowRaw(context, request, cq)); + } + std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>> PrepareAsyncGetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>>(PrepareAsyncGetDynamicNodeWorkflowRaw(context, request, cq)); + } ::grpc::Status ListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::flyteidl::admin::NodeExecutionList* response) override; std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecutionList>> AsyncListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecutionList>>(AsyncListNodeExecutionsRaw(context, request, cq)); @@ -1335,6 +1357,10 @@ class AdminService final { void GetNodeExecution(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::NodeExecution* response, std::function) override; void GetNodeExecution(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest* request, ::flyteidl::admin::NodeExecution* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; void GetNodeExecution(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::NodeExecution* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; + void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, std::function) override; + void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, std::function) override; + void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; + void GetDynamicNodeWorkflow(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; void ListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest* request, ::flyteidl::admin::NodeExecutionList* response, std::function) override; void ListNodeExecutions(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::NodeExecutionList* response, std::function) override; void ListNodeExecutions(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest* request, ::flyteidl::admin::NodeExecutionList* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; @@ -1510,6 +1536,8 @@ class AdminService final { ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::ExecutionTerminateResponse>* PrepareAsyncTerminateExecutionRaw(::grpc::ClientContext* context, const ::flyteidl::admin::ExecutionTerminateRequest& request, ::grpc::CompletionQueue* cq) override; ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecution>* AsyncGetNodeExecutionRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest& request, ::grpc::CompletionQueue* cq) override; ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecution>* PrepareAsyncGetNodeExecutionRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionGetRequest& request, ::grpc::CompletionQueue* cq) override; + ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>* AsyncGetDynamicNodeWorkflowRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) override; + ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DynamicNodeWorkflowResponse>* PrepareAsyncGetDynamicNodeWorkflowRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest& request, ::grpc::CompletionQueue* cq) override; ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecutionList>* AsyncListNodeExecutionsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::grpc::CompletionQueue* cq) override; ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecutionList>* PrepareAsyncListNodeExecutionsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionListRequest& request, ::grpc::CompletionQueue* cq) override; ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::NodeExecutionList>* AsyncListNodeExecutionsForTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::NodeExecutionForTaskListRequest& request, ::grpc::CompletionQueue* cq) override; @@ -1592,6 +1620,7 @@ class AdminService final { const ::grpc::internal::RpcMethod rpcmethod_ListExecutions_; const ::grpc::internal::RpcMethod rpcmethod_TerminateExecution_; const ::grpc::internal::RpcMethod rpcmethod_GetNodeExecution_; + const ::grpc::internal::RpcMethod rpcmethod_GetDynamicNodeWorkflow_; const ::grpc::internal::RpcMethod rpcmethod_ListNodeExecutions_; const ::grpc::internal::RpcMethod rpcmethod_ListNodeExecutionsForTask_; const ::grpc::internal::RpcMethod rpcmethod_GetNodeExecutionData_; @@ -1632,7 +1661,7 @@ class AdminService final { virtual ::grpc::Status CreateTask(::grpc::ServerContext* context, const ::flyteidl::admin::TaskCreateRequest* request, ::flyteidl::admin::TaskCreateResponse* response); // Fetch a :ref:`ref_flyteidl.admin.Task` definition. virtual ::grpc::Status GetTask(::grpc::ServerContext* context, const ::flyteidl::admin::ObjectGetRequest* request, ::flyteidl::admin::Task* response); - // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. virtual ::grpc::Status ListTaskIds(::grpc::ServerContext* context, const ::flyteidl::admin::NamedEntityIdentifierListRequest* request, ::flyteidl::admin::NamedEntityIdentifierList* response); // Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. virtual ::grpc::Status ListTasks(::grpc::ServerContext* context, const ::flyteidl::admin::ResourceListRequest* request, ::flyteidl::admin::TaskList* response); @@ -1680,6 +1709,8 @@ class AdminService final { virtual ::grpc::Status TerminateExecution(::grpc::ServerContext* context, const ::flyteidl::admin::ExecutionTerminateRequest* request, ::flyteidl::admin::ExecutionTerminateResponse* response); // Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. virtual ::grpc::Status GetNodeExecution(::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionGetRequest* request, ::flyteidl::admin::NodeExecution* response); + // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + virtual ::grpc::Status GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response); // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. virtual ::grpc::Status ListNodeExecutions(::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionListRequest* request, ::flyteidl::admin::NodeExecutionList* response); // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. @@ -1688,11 +1719,11 @@ class AdminService final { virtual ::grpc::Status GetNodeExecutionData(::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionGetDataRequest* request, ::flyteidl::admin::NodeExecutionGetDataResponse* response); // Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. virtual ::grpc::Status RegisterProject(::grpc::ServerContext* context, const ::flyteidl::admin::ProjectRegisterRequest* request, ::flyteidl::admin::ProjectRegisterResponse* response); - // Updates an existing :ref:`ref_flyteidl.admin.Project` + // Updates an existing :ref:`ref_flyteidl.admin.Project` // flyteidl.admin.Project should be passed but the domains property should be empty; // it will be ignored in the handler as domains cannot be updated via this API. virtual ::grpc::Status UpdateProject(::grpc::ServerContext* context, const ::flyteidl::admin::Project* request, ::flyteidl::admin::ProjectUpdateResponse* response); - // Fetches a list of :ref:`ref_flyteidl.admin.Project` + // Fetches a list of :ref:`ref_flyteidl.admin.Project` virtual ::grpc::Status ListProjects(::grpc::ServerContext* context, const ::flyteidl::admin::ProjectListRequest* request, ::flyteidl::admin::Projects* response); // Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. virtual ::grpc::Status CreateWorkflowEvent(::grpc::ServerContext* context, const ::flyteidl::admin::WorkflowExecutionEventRequest* request, ::flyteidl::admin::WorkflowExecutionEventResponse* response); @@ -2221,12 +2252,32 @@ class AdminService final { } }; template + class WithAsyncMethod_GetDynamicNodeWorkflow : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithAsyncMethod_GetDynamicNodeWorkflow() { + ::grpc::Service::MarkMethodAsync(24); + } + ~WithAsyncMethod_GetDynamicNodeWorkflow() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + void RequestGetDynamicNodeWorkflow(::grpc::ServerContext* context, ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::DynamicNodeWorkflowResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { + ::grpc::Service::RequestAsyncUnary(24, context, request, response, new_call_cq, notification_cq, tag); + } + }; + template class WithAsyncMethod_ListNodeExecutions : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_ListNodeExecutions() { - ::grpc::Service::MarkMethodAsync(24); + ::grpc::Service::MarkMethodAsync(25); } ~WithAsyncMethod_ListNodeExecutions() override { BaseClassMustBeDerivedFromService(this); @@ -2237,7 +2288,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListNodeExecutions(::grpc::ServerContext* context, ::flyteidl::admin::NodeExecutionListRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::NodeExecutionList>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(24, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(25, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2246,7 +2297,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_ListNodeExecutionsForTask() { - ::grpc::Service::MarkMethodAsync(25); + ::grpc::Service::MarkMethodAsync(26); } ~WithAsyncMethod_ListNodeExecutionsForTask() override { BaseClassMustBeDerivedFromService(this); @@ -2257,7 +2308,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListNodeExecutionsForTask(::grpc::ServerContext* context, ::flyteidl::admin::NodeExecutionForTaskListRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::NodeExecutionList>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(25, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(26, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2266,7 +2317,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetNodeExecutionData() { - ::grpc::Service::MarkMethodAsync(26); + ::grpc::Service::MarkMethodAsync(27); } ~WithAsyncMethod_GetNodeExecutionData() override { BaseClassMustBeDerivedFromService(this); @@ -2277,7 +2328,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetNodeExecutionData(::grpc::ServerContext* context, ::flyteidl::admin::NodeExecutionGetDataRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::NodeExecutionGetDataResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(26, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(27, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2286,7 +2337,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_RegisterProject() { - ::grpc::Service::MarkMethodAsync(27); + ::grpc::Service::MarkMethodAsync(28); } ~WithAsyncMethod_RegisterProject() override { BaseClassMustBeDerivedFromService(this); @@ -2297,7 +2348,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestRegisterProject(::grpc::ServerContext* context, ::flyteidl::admin::ProjectRegisterRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectRegisterResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(27, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(28, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2306,7 +2357,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_UpdateProject() { - ::grpc::Service::MarkMethodAsync(28); + ::grpc::Service::MarkMethodAsync(29); } ~WithAsyncMethod_UpdateProject() override { BaseClassMustBeDerivedFromService(this); @@ -2317,7 +2368,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateProject(::grpc::ServerContext* context, ::flyteidl::admin::Project* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectUpdateResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(28, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(29, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2326,7 +2377,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_ListProjects() { - ::grpc::Service::MarkMethodAsync(29); + ::grpc::Service::MarkMethodAsync(30); } ~WithAsyncMethod_ListProjects() override { BaseClassMustBeDerivedFromService(this); @@ -2337,7 +2388,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListProjects(::grpc::ServerContext* context, ::flyteidl::admin::ProjectListRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::Projects>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(29, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(30, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2346,7 +2397,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_CreateWorkflowEvent() { - ::grpc::Service::MarkMethodAsync(30); + ::grpc::Service::MarkMethodAsync(31); } ~WithAsyncMethod_CreateWorkflowEvent() override { BaseClassMustBeDerivedFromService(this); @@ -2357,7 +2408,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestCreateWorkflowEvent(::grpc::ServerContext* context, ::flyteidl::admin::WorkflowExecutionEventRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::WorkflowExecutionEventResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(30, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(31, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2366,7 +2417,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_CreateNodeEvent() { - ::grpc::Service::MarkMethodAsync(31); + ::grpc::Service::MarkMethodAsync(32); } ~WithAsyncMethod_CreateNodeEvent() override { BaseClassMustBeDerivedFromService(this); @@ -2377,7 +2428,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestCreateNodeEvent(::grpc::ServerContext* context, ::flyteidl::admin::NodeExecutionEventRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::NodeExecutionEventResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(31, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(32, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2386,7 +2437,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_CreateTaskEvent() { - ::grpc::Service::MarkMethodAsync(32); + ::grpc::Service::MarkMethodAsync(33); } ~WithAsyncMethod_CreateTaskEvent() override { BaseClassMustBeDerivedFromService(this); @@ -2397,7 +2448,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestCreateTaskEvent(::grpc::ServerContext* context, ::flyteidl::admin::TaskExecutionEventRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::TaskExecutionEventResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(32, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(33, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2406,7 +2457,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetTaskExecution() { - ::grpc::Service::MarkMethodAsync(33); + ::grpc::Service::MarkMethodAsync(34); } ~WithAsyncMethod_GetTaskExecution() override { BaseClassMustBeDerivedFromService(this); @@ -2417,7 +2468,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetTaskExecution(::grpc::ServerContext* context, ::flyteidl::admin::TaskExecutionGetRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::TaskExecution>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(33, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(34, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2426,7 +2477,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_ListTaskExecutions() { - ::grpc::Service::MarkMethodAsync(34); + ::grpc::Service::MarkMethodAsync(35); } ~WithAsyncMethod_ListTaskExecutions() override { BaseClassMustBeDerivedFromService(this); @@ -2437,7 +2488,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListTaskExecutions(::grpc::ServerContext* context, ::flyteidl::admin::TaskExecutionListRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::TaskExecutionList>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(34, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(35, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2446,7 +2497,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetTaskExecutionData() { - ::grpc::Service::MarkMethodAsync(35); + ::grpc::Service::MarkMethodAsync(36); } ~WithAsyncMethod_GetTaskExecutionData() override { BaseClassMustBeDerivedFromService(this); @@ -2457,7 +2508,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetTaskExecutionData(::grpc::ServerContext* context, ::flyteidl::admin::TaskExecutionGetDataRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::TaskExecutionGetDataResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(35, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(36, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2466,7 +2517,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_UpdateProjectDomainAttributes() { - ::grpc::Service::MarkMethodAsync(36); + ::grpc::Service::MarkMethodAsync(37); } ~WithAsyncMethod_UpdateProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2477,7 +2528,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateProjectDomainAttributes(::grpc::ServerContext* context, ::flyteidl::admin::ProjectDomainAttributesUpdateRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectDomainAttributesUpdateResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(36, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(37, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2486,7 +2537,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetProjectDomainAttributes() { - ::grpc::Service::MarkMethodAsync(37); + ::grpc::Service::MarkMethodAsync(38); } ~WithAsyncMethod_GetProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2497,7 +2548,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetProjectDomainAttributes(::grpc::ServerContext* context, ::flyteidl::admin::ProjectDomainAttributesGetRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectDomainAttributesGetResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(37, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(38, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2506,7 +2557,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_DeleteProjectDomainAttributes() { - ::grpc::Service::MarkMethodAsync(38); + ::grpc::Service::MarkMethodAsync(39); } ~WithAsyncMethod_DeleteProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2517,7 +2568,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestDeleteProjectDomainAttributes(::grpc::ServerContext* context, ::flyteidl::admin::ProjectDomainAttributesDeleteRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectDomainAttributesDeleteResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(38, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(39, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2526,7 +2577,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_UpdateProjectAttributes() { - ::grpc::Service::MarkMethodAsync(39); + ::grpc::Service::MarkMethodAsync(40); } ~WithAsyncMethod_UpdateProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2537,7 +2588,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateProjectAttributes(::grpc::ServerContext* context, ::flyteidl::admin::ProjectAttributesUpdateRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectAttributesUpdateResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(39, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(40, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2546,7 +2597,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetProjectAttributes() { - ::grpc::Service::MarkMethodAsync(40); + ::grpc::Service::MarkMethodAsync(41); } ~WithAsyncMethod_GetProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2557,7 +2608,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetProjectAttributes(::grpc::ServerContext* context, ::flyteidl::admin::ProjectAttributesGetRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectAttributesGetResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(40, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(41, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2566,7 +2617,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_DeleteProjectAttributes() { - ::grpc::Service::MarkMethodAsync(41); + ::grpc::Service::MarkMethodAsync(42); } ~WithAsyncMethod_DeleteProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2577,7 +2628,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestDeleteProjectAttributes(::grpc::ServerContext* context, ::flyteidl::admin::ProjectAttributesDeleteRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ProjectAttributesDeleteResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(41, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(42, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2586,7 +2637,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_UpdateWorkflowAttributes() { - ::grpc::Service::MarkMethodAsync(42); + ::grpc::Service::MarkMethodAsync(43); } ~WithAsyncMethod_UpdateWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2597,7 +2648,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateWorkflowAttributes(::grpc::ServerContext* context, ::flyteidl::admin::WorkflowAttributesUpdateRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::WorkflowAttributesUpdateResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(42, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(43, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2606,7 +2657,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetWorkflowAttributes() { - ::grpc::Service::MarkMethodAsync(43); + ::grpc::Service::MarkMethodAsync(44); } ~WithAsyncMethod_GetWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2617,7 +2668,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetWorkflowAttributes(::grpc::ServerContext* context, ::flyteidl::admin::WorkflowAttributesGetRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::WorkflowAttributesGetResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(43, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(44, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2626,7 +2677,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_DeleteWorkflowAttributes() { - ::grpc::Service::MarkMethodAsync(44); + ::grpc::Service::MarkMethodAsync(45); } ~WithAsyncMethod_DeleteWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2637,7 +2688,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestDeleteWorkflowAttributes(::grpc::ServerContext* context, ::flyteidl::admin::WorkflowAttributesDeleteRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::WorkflowAttributesDeleteResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(44, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(45, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2646,7 +2697,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_ListMatchableAttributes() { - ::grpc::Service::MarkMethodAsync(45); + ::grpc::Service::MarkMethodAsync(46); } ~WithAsyncMethod_ListMatchableAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -2657,7 +2708,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListMatchableAttributes(::grpc::ServerContext* context, ::flyteidl::admin::ListMatchableAttributesRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::ListMatchableAttributesResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(45, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(46, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2666,7 +2717,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_ListNamedEntities() { - ::grpc::Service::MarkMethodAsync(46); + ::grpc::Service::MarkMethodAsync(47); } ~WithAsyncMethod_ListNamedEntities() override { BaseClassMustBeDerivedFromService(this); @@ -2677,7 +2728,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListNamedEntities(::grpc::ServerContext* context, ::flyteidl::admin::NamedEntityListRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::NamedEntityList>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(46, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(47, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2686,7 +2737,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetNamedEntity() { - ::grpc::Service::MarkMethodAsync(47); + ::grpc::Service::MarkMethodAsync(48); } ~WithAsyncMethod_GetNamedEntity() override { BaseClassMustBeDerivedFromService(this); @@ -2697,7 +2748,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetNamedEntity(::grpc::ServerContext* context, ::flyteidl::admin::NamedEntityGetRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::NamedEntity>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(47, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(48, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2706,7 +2757,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_UpdateNamedEntity() { - ::grpc::Service::MarkMethodAsync(48); + ::grpc::Service::MarkMethodAsync(49); } ~WithAsyncMethod_UpdateNamedEntity() override { BaseClassMustBeDerivedFromService(this); @@ -2717,7 +2768,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateNamedEntity(::grpc::ServerContext* context, ::flyteidl::admin::NamedEntityUpdateRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::NamedEntityUpdateResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(48, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(49, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2726,7 +2777,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetVersion() { - ::grpc::Service::MarkMethodAsync(49); + ::grpc::Service::MarkMethodAsync(50); } ~WithAsyncMethod_GetVersion() override { BaseClassMustBeDerivedFromService(this); @@ -2737,7 +2788,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetVersion(::grpc::ServerContext* context, ::flyteidl::admin::GetVersionRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::GetVersionResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(49, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(50, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2746,7 +2797,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetDescriptionEntity() { - ::grpc::Service::MarkMethodAsync(50); + ::grpc::Service::MarkMethodAsync(51); } ~WithAsyncMethod_GetDescriptionEntity() override { BaseClassMustBeDerivedFromService(this); @@ -2757,7 +2808,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetDescriptionEntity(::grpc::ServerContext* context, ::flyteidl::admin::ObjectGetRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::DescriptionEntity>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(50, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(51, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2766,7 +2817,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_ListDescriptionEntities() { - ::grpc::Service::MarkMethodAsync(51); + ::grpc::Service::MarkMethodAsync(52); } ~WithAsyncMethod_ListDescriptionEntities() override { BaseClassMustBeDerivedFromService(this); @@ -2777,7 +2828,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListDescriptionEntities(::grpc::ServerContext* context, ::flyteidl::admin::DescriptionEntityListRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::DescriptionEntityList>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(51, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(52, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -2786,7 +2837,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithAsyncMethod_GetExecutionMetrics() { - ::grpc::Service::MarkMethodAsync(52); + ::grpc::Service::MarkMethodAsync(53); } ~WithAsyncMethod_GetExecutionMetrics() override { BaseClassMustBeDerivedFromService(this); @@ -2797,10 +2848,10 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetExecutionMetrics(::grpc::ServerContext* context, ::flyteidl::admin::WorkflowExecutionGetMetricsRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::WorkflowExecutionGetMetricsResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(52, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(53, context, request, response, new_call_cq, notification_cq, tag); } }; - typedef WithAsyncMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > AsyncService; + typedef WithAsyncMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > AsyncService; template class ExperimentalWithCallbackMethod_CreateTask : public BaseClass { private: @@ -3546,12 +3597,43 @@ class AdminService final { virtual void GetNodeExecution(::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionGetRequest* request, ::flyteidl::admin::NodeExecution* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } }; template + class ExperimentalWithCallbackMethod_GetDynamicNodeWorkflow : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + ExperimentalWithCallbackMethod_GetDynamicNodeWorkflow() { + ::grpc::Service::experimental().MarkMethodCallback(24, + new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetDynamicNodeWorkflowRequest, ::flyteidl::admin::DynamicNodeWorkflowResponse>( + [this](::grpc::ServerContext* context, + const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, + ::flyteidl::admin::DynamicNodeWorkflowResponse* response, + ::grpc::experimental::ServerCallbackRpcController* controller) { + return this->GetDynamicNodeWorkflow(context, request, response, controller); + })); + } + void SetMessageAllocatorFor_GetDynamicNodeWorkflow( + ::grpc::experimental::MessageAllocator< ::flyteidl::admin::GetDynamicNodeWorkflowRequest, ::flyteidl::admin::DynamicNodeWorkflowResponse>* allocator) { + static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetDynamicNodeWorkflowRequest, ::flyteidl::admin::DynamicNodeWorkflowResponse>*>( + ::grpc::Service::experimental().GetHandler(24)) + ->SetMessageAllocator(allocator); + } + ~ExperimentalWithCallbackMethod_GetDynamicNodeWorkflow() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + virtual void GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } + }; + template class ExperimentalWithCallbackMethod_ListNodeExecutions : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_ListNodeExecutions() { - ::grpc::Service::experimental().MarkMethodCallback(24, + ::grpc::Service::experimental().MarkMethodCallback(25, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionListRequest, ::flyteidl::admin::NodeExecutionList>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionListRequest* request, @@ -3563,7 +3645,7 @@ class AdminService final { void SetMessageAllocatorFor_ListNodeExecutions( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::NodeExecutionListRequest, ::flyteidl::admin::NodeExecutionList>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionListRequest, ::flyteidl::admin::NodeExecutionList>*>( - ::grpc::Service::experimental().GetHandler(24)) + ::grpc::Service::experimental().GetHandler(25)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_ListNodeExecutions() override { @@ -3582,7 +3664,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_ListNodeExecutionsForTask() { - ::grpc::Service::experimental().MarkMethodCallback(25, + ::grpc::Service::experimental().MarkMethodCallback(26, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionForTaskListRequest, ::flyteidl::admin::NodeExecutionList>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionForTaskListRequest* request, @@ -3594,7 +3676,7 @@ class AdminService final { void SetMessageAllocatorFor_ListNodeExecutionsForTask( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::NodeExecutionForTaskListRequest, ::flyteidl::admin::NodeExecutionList>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionForTaskListRequest, ::flyteidl::admin::NodeExecutionList>*>( - ::grpc::Service::experimental().GetHandler(25)) + ::grpc::Service::experimental().GetHandler(26)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_ListNodeExecutionsForTask() override { @@ -3613,7 +3695,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetNodeExecutionData() { - ::grpc::Service::experimental().MarkMethodCallback(26, + ::grpc::Service::experimental().MarkMethodCallback(27, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionGetDataRequest, ::flyteidl::admin::NodeExecutionGetDataResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionGetDataRequest* request, @@ -3625,7 +3707,7 @@ class AdminService final { void SetMessageAllocatorFor_GetNodeExecutionData( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::NodeExecutionGetDataRequest, ::flyteidl::admin::NodeExecutionGetDataResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionGetDataRequest, ::flyteidl::admin::NodeExecutionGetDataResponse>*>( - ::grpc::Service::experimental().GetHandler(26)) + ::grpc::Service::experimental().GetHandler(27)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetNodeExecutionData() override { @@ -3644,7 +3726,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_RegisterProject() { - ::grpc::Service::experimental().MarkMethodCallback(27, + ::grpc::Service::experimental().MarkMethodCallback(28, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectRegisterRequest, ::flyteidl::admin::ProjectRegisterResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectRegisterRequest* request, @@ -3656,7 +3738,7 @@ class AdminService final { void SetMessageAllocatorFor_RegisterProject( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectRegisterRequest, ::flyteidl::admin::ProjectRegisterResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectRegisterRequest, ::flyteidl::admin::ProjectRegisterResponse>*>( - ::grpc::Service::experimental().GetHandler(27)) + ::grpc::Service::experimental().GetHandler(28)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_RegisterProject() override { @@ -3675,7 +3757,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_UpdateProject() { - ::grpc::Service::experimental().MarkMethodCallback(28, + ::grpc::Service::experimental().MarkMethodCallback(29, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::Project, ::flyteidl::admin::ProjectUpdateResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::Project* request, @@ -3687,7 +3769,7 @@ class AdminService final { void SetMessageAllocatorFor_UpdateProject( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::Project, ::flyteidl::admin::ProjectUpdateResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::Project, ::flyteidl::admin::ProjectUpdateResponse>*>( - ::grpc::Service::experimental().GetHandler(28)) + ::grpc::Service::experimental().GetHandler(29)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_UpdateProject() override { @@ -3706,7 +3788,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_ListProjects() { - ::grpc::Service::experimental().MarkMethodCallback(29, + ::grpc::Service::experimental().MarkMethodCallback(30, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectListRequest, ::flyteidl::admin::Projects>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectListRequest* request, @@ -3718,7 +3800,7 @@ class AdminService final { void SetMessageAllocatorFor_ListProjects( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectListRequest, ::flyteidl::admin::Projects>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectListRequest, ::flyteidl::admin::Projects>*>( - ::grpc::Service::experimental().GetHandler(29)) + ::grpc::Service::experimental().GetHandler(30)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_ListProjects() override { @@ -3737,7 +3819,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_CreateWorkflowEvent() { - ::grpc::Service::experimental().MarkMethodCallback(30, + ::grpc::Service::experimental().MarkMethodCallback(31, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowExecutionEventRequest, ::flyteidl::admin::WorkflowExecutionEventResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::WorkflowExecutionEventRequest* request, @@ -3749,7 +3831,7 @@ class AdminService final { void SetMessageAllocatorFor_CreateWorkflowEvent( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::WorkflowExecutionEventRequest, ::flyteidl::admin::WorkflowExecutionEventResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowExecutionEventRequest, ::flyteidl::admin::WorkflowExecutionEventResponse>*>( - ::grpc::Service::experimental().GetHandler(30)) + ::grpc::Service::experimental().GetHandler(31)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_CreateWorkflowEvent() override { @@ -3768,7 +3850,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_CreateNodeEvent() { - ::grpc::Service::experimental().MarkMethodCallback(31, + ::grpc::Service::experimental().MarkMethodCallback(32, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionEventRequest, ::flyteidl::admin::NodeExecutionEventResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::NodeExecutionEventRequest* request, @@ -3780,7 +3862,7 @@ class AdminService final { void SetMessageAllocatorFor_CreateNodeEvent( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::NodeExecutionEventRequest, ::flyteidl::admin::NodeExecutionEventResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NodeExecutionEventRequest, ::flyteidl::admin::NodeExecutionEventResponse>*>( - ::grpc::Service::experimental().GetHandler(31)) + ::grpc::Service::experimental().GetHandler(32)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_CreateNodeEvent() override { @@ -3799,7 +3881,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_CreateTaskEvent() { - ::grpc::Service::experimental().MarkMethodCallback(32, + ::grpc::Service::experimental().MarkMethodCallback(33, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionEventRequest, ::flyteidl::admin::TaskExecutionEventResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::TaskExecutionEventRequest* request, @@ -3811,7 +3893,7 @@ class AdminService final { void SetMessageAllocatorFor_CreateTaskEvent( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::TaskExecutionEventRequest, ::flyteidl::admin::TaskExecutionEventResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionEventRequest, ::flyteidl::admin::TaskExecutionEventResponse>*>( - ::grpc::Service::experimental().GetHandler(32)) + ::grpc::Service::experimental().GetHandler(33)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_CreateTaskEvent() override { @@ -3830,7 +3912,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetTaskExecution() { - ::grpc::Service::experimental().MarkMethodCallback(33, + ::grpc::Service::experimental().MarkMethodCallback(34, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionGetRequest, ::flyteidl::admin::TaskExecution>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::TaskExecutionGetRequest* request, @@ -3842,7 +3924,7 @@ class AdminService final { void SetMessageAllocatorFor_GetTaskExecution( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::TaskExecutionGetRequest, ::flyteidl::admin::TaskExecution>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionGetRequest, ::flyteidl::admin::TaskExecution>*>( - ::grpc::Service::experimental().GetHandler(33)) + ::grpc::Service::experimental().GetHandler(34)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetTaskExecution() override { @@ -3861,7 +3943,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_ListTaskExecutions() { - ::grpc::Service::experimental().MarkMethodCallback(34, + ::grpc::Service::experimental().MarkMethodCallback(35, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionListRequest, ::flyteidl::admin::TaskExecutionList>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::TaskExecutionListRequest* request, @@ -3873,7 +3955,7 @@ class AdminService final { void SetMessageAllocatorFor_ListTaskExecutions( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::TaskExecutionListRequest, ::flyteidl::admin::TaskExecutionList>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionListRequest, ::flyteidl::admin::TaskExecutionList>*>( - ::grpc::Service::experimental().GetHandler(34)) + ::grpc::Service::experimental().GetHandler(35)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_ListTaskExecutions() override { @@ -3892,7 +3974,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetTaskExecutionData() { - ::grpc::Service::experimental().MarkMethodCallback(35, + ::grpc::Service::experimental().MarkMethodCallback(36, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionGetDataRequest, ::flyteidl::admin::TaskExecutionGetDataResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::TaskExecutionGetDataRequest* request, @@ -3904,7 +3986,7 @@ class AdminService final { void SetMessageAllocatorFor_GetTaskExecutionData( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::TaskExecutionGetDataRequest, ::flyteidl::admin::TaskExecutionGetDataResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::TaskExecutionGetDataRequest, ::flyteidl::admin::TaskExecutionGetDataResponse>*>( - ::grpc::Service::experimental().GetHandler(35)) + ::grpc::Service::experimental().GetHandler(36)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetTaskExecutionData() override { @@ -3923,7 +4005,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_UpdateProjectDomainAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(36, + ::grpc::Service::experimental().MarkMethodCallback(37, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesUpdateRequest, ::flyteidl::admin::ProjectDomainAttributesUpdateResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectDomainAttributesUpdateRequest* request, @@ -3935,7 +4017,7 @@ class AdminService final { void SetMessageAllocatorFor_UpdateProjectDomainAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectDomainAttributesUpdateRequest, ::flyteidl::admin::ProjectDomainAttributesUpdateResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesUpdateRequest, ::flyteidl::admin::ProjectDomainAttributesUpdateResponse>*>( - ::grpc::Service::experimental().GetHandler(36)) + ::grpc::Service::experimental().GetHandler(37)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_UpdateProjectDomainAttributes() override { @@ -3954,7 +4036,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetProjectDomainAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(37, + ::grpc::Service::experimental().MarkMethodCallback(38, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesGetRequest, ::flyteidl::admin::ProjectDomainAttributesGetResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectDomainAttributesGetRequest* request, @@ -3966,7 +4048,7 @@ class AdminService final { void SetMessageAllocatorFor_GetProjectDomainAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectDomainAttributesGetRequest, ::flyteidl::admin::ProjectDomainAttributesGetResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesGetRequest, ::flyteidl::admin::ProjectDomainAttributesGetResponse>*>( - ::grpc::Service::experimental().GetHandler(37)) + ::grpc::Service::experimental().GetHandler(38)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetProjectDomainAttributes() override { @@ -3985,7 +4067,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_DeleteProjectDomainAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(38, + ::grpc::Service::experimental().MarkMethodCallback(39, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesDeleteRequest, ::flyteidl::admin::ProjectDomainAttributesDeleteResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectDomainAttributesDeleteRequest* request, @@ -3997,7 +4079,7 @@ class AdminService final { void SetMessageAllocatorFor_DeleteProjectDomainAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectDomainAttributesDeleteRequest, ::flyteidl::admin::ProjectDomainAttributesDeleteResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesDeleteRequest, ::flyteidl::admin::ProjectDomainAttributesDeleteResponse>*>( - ::grpc::Service::experimental().GetHandler(38)) + ::grpc::Service::experimental().GetHandler(39)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_DeleteProjectDomainAttributes() override { @@ -4016,7 +4098,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_UpdateProjectAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(39, + ::grpc::Service::experimental().MarkMethodCallback(40, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectAttributesUpdateRequest, ::flyteidl::admin::ProjectAttributesUpdateResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectAttributesUpdateRequest* request, @@ -4028,7 +4110,7 @@ class AdminService final { void SetMessageAllocatorFor_UpdateProjectAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectAttributesUpdateRequest, ::flyteidl::admin::ProjectAttributesUpdateResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectAttributesUpdateRequest, ::flyteidl::admin::ProjectAttributesUpdateResponse>*>( - ::grpc::Service::experimental().GetHandler(39)) + ::grpc::Service::experimental().GetHandler(40)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_UpdateProjectAttributes() override { @@ -4047,7 +4129,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetProjectAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(40, + ::grpc::Service::experimental().MarkMethodCallback(41, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectAttributesGetRequest, ::flyteidl::admin::ProjectAttributesGetResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectAttributesGetRequest* request, @@ -4059,7 +4141,7 @@ class AdminService final { void SetMessageAllocatorFor_GetProjectAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectAttributesGetRequest, ::flyteidl::admin::ProjectAttributesGetResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectAttributesGetRequest, ::flyteidl::admin::ProjectAttributesGetResponse>*>( - ::grpc::Service::experimental().GetHandler(40)) + ::grpc::Service::experimental().GetHandler(41)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetProjectAttributes() override { @@ -4078,7 +4160,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_DeleteProjectAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(41, + ::grpc::Service::experimental().MarkMethodCallback(42, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectAttributesDeleteRequest, ::flyteidl::admin::ProjectAttributesDeleteResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ProjectAttributesDeleteRequest* request, @@ -4090,7 +4172,7 @@ class AdminService final { void SetMessageAllocatorFor_DeleteProjectAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ProjectAttributesDeleteRequest, ::flyteidl::admin::ProjectAttributesDeleteResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ProjectAttributesDeleteRequest, ::flyteidl::admin::ProjectAttributesDeleteResponse>*>( - ::grpc::Service::experimental().GetHandler(41)) + ::grpc::Service::experimental().GetHandler(42)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_DeleteProjectAttributes() override { @@ -4109,7 +4191,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_UpdateWorkflowAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(42, + ::grpc::Service::experimental().MarkMethodCallback(43, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowAttributesUpdateRequest, ::flyteidl::admin::WorkflowAttributesUpdateResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::WorkflowAttributesUpdateRequest* request, @@ -4121,7 +4203,7 @@ class AdminService final { void SetMessageAllocatorFor_UpdateWorkflowAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::WorkflowAttributesUpdateRequest, ::flyteidl::admin::WorkflowAttributesUpdateResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowAttributesUpdateRequest, ::flyteidl::admin::WorkflowAttributesUpdateResponse>*>( - ::grpc::Service::experimental().GetHandler(42)) + ::grpc::Service::experimental().GetHandler(43)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_UpdateWorkflowAttributes() override { @@ -4140,7 +4222,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetWorkflowAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(43, + ::grpc::Service::experimental().MarkMethodCallback(44, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowAttributesGetRequest, ::flyteidl::admin::WorkflowAttributesGetResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::WorkflowAttributesGetRequest* request, @@ -4152,7 +4234,7 @@ class AdminService final { void SetMessageAllocatorFor_GetWorkflowAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::WorkflowAttributesGetRequest, ::flyteidl::admin::WorkflowAttributesGetResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowAttributesGetRequest, ::flyteidl::admin::WorkflowAttributesGetResponse>*>( - ::grpc::Service::experimental().GetHandler(43)) + ::grpc::Service::experimental().GetHandler(44)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetWorkflowAttributes() override { @@ -4171,7 +4253,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_DeleteWorkflowAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(44, + ::grpc::Service::experimental().MarkMethodCallback(45, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowAttributesDeleteRequest, ::flyteidl::admin::WorkflowAttributesDeleteResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::WorkflowAttributesDeleteRequest* request, @@ -4183,7 +4265,7 @@ class AdminService final { void SetMessageAllocatorFor_DeleteWorkflowAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::WorkflowAttributesDeleteRequest, ::flyteidl::admin::WorkflowAttributesDeleteResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowAttributesDeleteRequest, ::flyteidl::admin::WorkflowAttributesDeleteResponse>*>( - ::grpc::Service::experimental().GetHandler(44)) + ::grpc::Service::experimental().GetHandler(45)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_DeleteWorkflowAttributes() override { @@ -4202,7 +4284,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_ListMatchableAttributes() { - ::grpc::Service::experimental().MarkMethodCallback(45, + ::grpc::Service::experimental().MarkMethodCallback(46, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ListMatchableAttributesRequest, ::flyteidl::admin::ListMatchableAttributesResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ListMatchableAttributesRequest* request, @@ -4214,7 +4296,7 @@ class AdminService final { void SetMessageAllocatorFor_ListMatchableAttributes( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ListMatchableAttributesRequest, ::flyteidl::admin::ListMatchableAttributesResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ListMatchableAttributesRequest, ::flyteidl::admin::ListMatchableAttributesResponse>*>( - ::grpc::Service::experimental().GetHandler(45)) + ::grpc::Service::experimental().GetHandler(46)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_ListMatchableAttributes() override { @@ -4233,7 +4315,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_ListNamedEntities() { - ::grpc::Service::experimental().MarkMethodCallback(46, + ::grpc::Service::experimental().MarkMethodCallback(47, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NamedEntityListRequest, ::flyteidl::admin::NamedEntityList>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::NamedEntityListRequest* request, @@ -4245,7 +4327,7 @@ class AdminService final { void SetMessageAllocatorFor_ListNamedEntities( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::NamedEntityListRequest, ::flyteidl::admin::NamedEntityList>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NamedEntityListRequest, ::flyteidl::admin::NamedEntityList>*>( - ::grpc::Service::experimental().GetHandler(46)) + ::grpc::Service::experimental().GetHandler(47)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_ListNamedEntities() override { @@ -4264,7 +4346,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetNamedEntity() { - ::grpc::Service::experimental().MarkMethodCallback(47, + ::grpc::Service::experimental().MarkMethodCallback(48, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NamedEntityGetRequest, ::flyteidl::admin::NamedEntity>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::NamedEntityGetRequest* request, @@ -4276,7 +4358,7 @@ class AdminService final { void SetMessageAllocatorFor_GetNamedEntity( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::NamedEntityGetRequest, ::flyteidl::admin::NamedEntity>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NamedEntityGetRequest, ::flyteidl::admin::NamedEntity>*>( - ::grpc::Service::experimental().GetHandler(47)) + ::grpc::Service::experimental().GetHandler(48)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetNamedEntity() override { @@ -4295,7 +4377,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_UpdateNamedEntity() { - ::grpc::Service::experimental().MarkMethodCallback(48, + ::grpc::Service::experimental().MarkMethodCallback(49, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NamedEntityUpdateRequest, ::flyteidl::admin::NamedEntityUpdateResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::NamedEntityUpdateRequest* request, @@ -4307,7 +4389,7 @@ class AdminService final { void SetMessageAllocatorFor_UpdateNamedEntity( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::NamedEntityUpdateRequest, ::flyteidl::admin::NamedEntityUpdateResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::NamedEntityUpdateRequest, ::flyteidl::admin::NamedEntityUpdateResponse>*>( - ::grpc::Service::experimental().GetHandler(48)) + ::grpc::Service::experimental().GetHandler(49)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_UpdateNamedEntity() override { @@ -4326,7 +4408,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetVersion() { - ::grpc::Service::experimental().MarkMethodCallback(49, + ::grpc::Service::experimental().MarkMethodCallback(50, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetVersionRequest, ::flyteidl::admin::GetVersionResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::GetVersionRequest* request, @@ -4338,7 +4420,7 @@ class AdminService final { void SetMessageAllocatorFor_GetVersion( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::GetVersionRequest, ::flyteidl::admin::GetVersionResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetVersionRequest, ::flyteidl::admin::GetVersionResponse>*>( - ::grpc::Service::experimental().GetHandler(49)) + ::grpc::Service::experimental().GetHandler(50)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetVersion() override { @@ -4357,7 +4439,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetDescriptionEntity() { - ::grpc::Service::experimental().MarkMethodCallback(50, + ::grpc::Service::experimental().MarkMethodCallback(51, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ObjectGetRequest, ::flyteidl::admin::DescriptionEntity>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::ObjectGetRequest* request, @@ -4369,7 +4451,7 @@ class AdminService final { void SetMessageAllocatorFor_GetDescriptionEntity( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::ObjectGetRequest, ::flyteidl::admin::DescriptionEntity>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::ObjectGetRequest, ::flyteidl::admin::DescriptionEntity>*>( - ::grpc::Service::experimental().GetHandler(50)) + ::grpc::Service::experimental().GetHandler(51)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetDescriptionEntity() override { @@ -4388,7 +4470,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_ListDescriptionEntities() { - ::grpc::Service::experimental().MarkMethodCallback(51, + ::grpc::Service::experimental().MarkMethodCallback(52, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::DescriptionEntityListRequest, ::flyteidl::admin::DescriptionEntityList>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::DescriptionEntityListRequest* request, @@ -4400,7 +4482,7 @@ class AdminService final { void SetMessageAllocatorFor_ListDescriptionEntities( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::DescriptionEntityListRequest, ::flyteidl::admin::DescriptionEntityList>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::DescriptionEntityListRequest, ::flyteidl::admin::DescriptionEntityList>*>( - ::grpc::Service::experimental().GetHandler(51)) + ::grpc::Service::experimental().GetHandler(52)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_ListDescriptionEntities() override { @@ -4419,7 +4501,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithCallbackMethod_GetExecutionMetrics() { - ::grpc::Service::experimental().MarkMethodCallback(52, + ::grpc::Service::experimental().MarkMethodCallback(53, new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowExecutionGetMetricsRequest, ::flyteidl::admin::WorkflowExecutionGetMetricsResponse>( [this](::grpc::ServerContext* context, const ::flyteidl::admin::WorkflowExecutionGetMetricsRequest* request, @@ -4431,7 +4513,7 @@ class AdminService final { void SetMessageAllocatorFor_GetExecutionMetrics( ::grpc::experimental::MessageAllocator< ::flyteidl::admin::WorkflowExecutionGetMetricsRequest, ::flyteidl::admin::WorkflowExecutionGetMetricsResponse>* allocator) { static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::WorkflowExecutionGetMetricsRequest, ::flyteidl::admin::WorkflowExecutionGetMetricsResponse>*>( - ::grpc::Service::experimental().GetHandler(52)) + ::grpc::Service::experimental().GetHandler(53)) ->SetMessageAllocator(allocator); } ~ExperimentalWithCallbackMethod_GetExecutionMetrics() override { @@ -4444,7 +4526,7 @@ class AdminService final { } virtual void GetExecutionMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::WorkflowExecutionGetMetricsRequest* request, ::flyteidl::admin::WorkflowExecutionGetMetricsResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } }; - typedef ExperimentalWithCallbackMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > ExperimentalCallbackService; + typedef ExperimentalWithCallbackMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > ExperimentalCallbackService; template class WithGenericMethod_CreateTask : public BaseClass { private: @@ -4854,12 +4936,29 @@ class AdminService final { } }; template + class WithGenericMethod_GetDynamicNodeWorkflow : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithGenericMethod_GetDynamicNodeWorkflow() { + ::grpc::Service::MarkMethodGeneric(24); + } + ~WithGenericMethod_GetDynamicNodeWorkflow() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + }; + template class WithGenericMethod_ListNodeExecutions : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_ListNodeExecutions() { - ::grpc::Service::MarkMethodGeneric(24); + ::grpc::Service::MarkMethodGeneric(25); } ~WithGenericMethod_ListNodeExecutions() override { BaseClassMustBeDerivedFromService(this); @@ -4876,7 +4975,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_ListNodeExecutionsForTask() { - ::grpc::Service::MarkMethodGeneric(25); + ::grpc::Service::MarkMethodGeneric(26); } ~WithGenericMethod_ListNodeExecutionsForTask() override { BaseClassMustBeDerivedFromService(this); @@ -4893,7 +4992,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetNodeExecutionData() { - ::grpc::Service::MarkMethodGeneric(26); + ::grpc::Service::MarkMethodGeneric(27); } ~WithGenericMethod_GetNodeExecutionData() override { BaseClassMustBeDerivedFromService(this); @@ -4910,7 +5009,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_RegisterProject() { - ::grpc::Service::MarkMethodGeneric(27); + ::grpc::Service::MarkMethodGeneric(28); } ~WithGenericMethod_RegisterProject() override { BaseClassMustBeDerivedFromService(this); @@ -4927,7 +5026,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_UpdateProject() { - ::grpc::Service::MarkMethodGeneric(28); + ::grpc::Service::MarkMethodGeneric(29); } ~WithGenericMethod_UpdateProject() override { BaseClassMustBeDerivedFromService(this); @@ -4944,7 +5043,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_ListProjects() { - ::grpc::Service::MarkMethodGeneric(29); + ::grpc::Service::MarkMethodGeneric(30); } ~WithGenericMethod_ListProjects() override { BaseClassMustBeDerivedFromService(this); @@ -4961,7 +5060,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_CreateWorkflowEvent() { - ::grpc::Service::MarkMethodGeneric(30); + ::grpc::Service::MarkMethodGeneric(31); } ~WithGenericMethod_CreateWorkflowEvent() override { BaseClassMustBeDerivedFromService(this); @@ -4978,7 +5077,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_CreateNodeEvent() { - ::grpc::Service::MarkMethodGeneric(31); + ::grpc::Service::MarkMethodGeneric(32); } ~WithGenericMethod_CreateNodeEvent() override { BaseClassMustBeDerivedFromService(this); @@ -4995,7 +5094,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_CreateTaskEvent() { - ::grpc::Service::MarkMethodGeneric(32); + ::grpc::Service::MarkMethodGeneric(33); } ~WithGenericMethod_CreateTaskEvent() override { BaseClassMustBeDerivedFromService(this); @@ -5012,7 +5111,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetTaskExecution() { - ::grpc::Service::MarkMethodGeneric(33); + ::grpc::Service::MarkMethodGeneric(34); } ~WithGenericMethod_GetTaskExecution() override { BaseClassMustBeDerivedFromService(this); @@ -5029,7 +5128,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_ListTaskExecutions() { - ::grpc::Service::MarkMethodGeneric(34); + ::grpc::Service::MarkMethodGeneric(35); } ~WithGenericMethod_ListTaskExecutions() override { BaseClassMustBeDerivedFromService(this); @@ -5046,7 +5145,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetTaskExecutionData() { - ::grpc::Service::MarkMethodGeneric(35); + ::grpc::Service::MarkMethodGeneric(36); } ~WithGenericMethod_GetTaskExecutionData() override { BaseClassMustBeDerivedFromService(this); @@ -5063,7 +5162,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_UpdateProjectDomainAttributes() { - ::grpc::Service::MarkMethodGeneric(36); + ::grpc::Service::MarkMethodGeneric(37); } ~WithGenericMethod_UpdateProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5080,7 +5179,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetProjectDomainAttributes() { - ::grpc::Service::MarkMethodGeneric(37); + ::grpc::Service::MarkMethodGeneric(38); } ~WithGenericMethod_GetProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5097,7 +5196,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_DeleteProjectDomainAttributes() { - ::grpc::Service::MarkMethodGeneric(38); + ::grpc::Service::MarkMethodGeneric(39); } ~WithGenericMethod_DeleteProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5114,7 +5213,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_UpdateProjectAttributes() { - ::grpc::Service::MarkMethodGeneric(39); + ::grpc::Service::MarkMethodGeneric(40); } ~WithGenericMethod_UpdateProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5131,7 +5230,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetProjectAttributes() { - ::grpc::Service::MarkMethodGeneric(40); + ::grpc::Service::MarkMethodGeneric(41); } ~WithGenericMethod_GetProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5148,7 +5247,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_DeleteProjectAttributes() { - ::grpc::Service::MarkMethodGeneric(41); + ::grpc::Service::MarkMethodGeneric(42); } ~WithGenericMethod_DeleteProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5165,7 +5264,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_UpdateWorkflowAttributes() { - ::grpc::Service::MarkMethodGeneric(42); + ::grpc::Service::MarkMethodGeneric(43); } ~WithGenericMethod_UpdateWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5182,7 +5281,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetWorkflowAttributes() { - ::grpc::Service::MarkMethodGeneric(43); + ::grpc::Service::MarkMethodGeneric(44); } ~WithGenericMethod_GetWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5199,7 +5298,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_DeleteWorkflowAttributes() { - ::grpc::Service::MarkMethodGeneric(44); + ::grpc::Service::MarkMethodGeneric(45); } ~WithGenericMethod_DeleteWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5216,7 +5315,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_ListMatchableAttributes() { - ::grpc::Service::MarkMethodGeneric(45); + ::grpc::Service::MarkMethodGeneric(46); } ~WithGenericMethod_ListMatchableAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -5233,7 +5332,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_ListNamedEntities() { - ::grpc::Service::MarkMethodGeneric(46); + ::grpc::Service::MarkMethodGeneric(47); } ~WithGenericMethod_ListNamedEntities() override { BaseClassMustBeDerivedFromService(this); @@ -5250,7 +5349,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetNamedEntity() { - ::grpc::Service::MarkMethodGeneric(47); + ::grpc::Service::MarkMethodGeneric(48); } ~WithGenericMethod_GetNamedEntity() override { BaseClassMustBeDerivedFromService(this); @@ -5267,7 +5366,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_UpdateNamedEntity() { - ::grpc::Service::MarkMethodGeneric(48); + ::grpc::Service::MarkMethodGeneric(49); } ~WithGenericMethod_UpdateNamedEntity() override { BaseClassMustBeDerivedFromService(this); @@ -5284,7 +5383,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetVersion() { - ::grpc::Service::MarkMethodGeneric(49); + ::grpc::Service::MarkMethodGeneric(50); } ~WithGenericMethod_GetVersion() override { BaseClassMustBeDerivedFromService(this); @@ -5301,7 +5400,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetDescriptionEntity() { - ::grpc::Service::MarkMethodGeneric(50); + ::grpc::Service::MarkMethodGeneric(51); } ~WithGenericMethod_GetDescriptionEntity() override { BaseClassMustBeDerivedFromService(this); @@ -5318,7 +5417,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_ListDescriptionEntities() { - ::grpc::Service::MarkMethodGeneric(51); + ::grpc::Service::MarkMethodGeneric(52); } ~WithGenericMethod_ListDescriptionEntities() override { BaseClassMustBeDerivedFromService(this); @@ -5335,7 +5434,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithGenericMethod_GetExecutionMetrics() { - ::grpc::Service::MarkMethodGeneric(52); + ::grpc::Service::MarkMethodGeneric(53); } ~WithGenericMethod_GetExecutionMetrics() override { BaseClassMustBeDerivedFromService(this); @@ -5827,12 +5926,32 @@ class AdminService final { } }; template + class WithRawMethod_GetDynamicNodeWorkflow : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithRawMethod_GetDynamicNodeWorkflow() { + ::grpc::Service::MarkMethodRaw(24); + } + ~WithRawMethod_GetDynamicNodeWorkflow() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + void RequestGetDynamicNodeWorkflow(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { + ::grpc::Service::RequestAsyncUnary(24, context, request, response, new_call_cq, notification_cq, tag); + } + }; + template class WithRawMethod_ListNodeExecutions : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_ListNodeExecutions() { - ::grpc::Service::MarkMethodRaw(24); + ::grpc::Service::MarkMethodRaw(25); } ~WithRawMethod_ListNodeExecutions() override { BaseClassMustBeDerivedFromService(this); @@ -5843,7 +5962,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListNodeExecutions(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(24, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(25, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5852,7 +5971,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_ListNodeExecutionsForTask() { - ::grpc::Service::MarkMethodRaw(25); + ::grpc::Service::MarkMethodRaw(26); } ~WithRawMethod_ListNodeExecutionsForTask() override { BaseClassMustBeDerivedFromService(this); @@ -5863,7 +5982,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListNodeExecutionsForTask(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(25, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(26, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5872,7 +5991,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetNodeExecutionData() { - ::grpc::Service::MarkMethodRaw(26); + ::grpc::Service::MarkMethodRaw(27); } ~WithRawMethod_GetNodeExecutionData() override { BaseClassMustBeDerivedFromService(this); @@ -5883,7 +6002,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetNodeExecutionData(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(26, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(27, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5892,7 +6011,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_RegisterProject() { - ::grpc::Service::MarkMethodRaw(27); + ::grpc::Service::MarkMethodRaw(28); } ~WithRawMethod_RegisterProject() override { BaseClassMustBeDerivedFromService(this); @@ -5903,7 +6022,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestRegisterProject(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(27, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(28, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5912,7 +6031,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_UpdateProject() { - ::grpc::Service::MarkMethodRaw(28); + ::grpc::Service::MarkMethodRaw(29); } ~WithRawMethod_UpdateProject() override { BaseClassMustBeDerivedFromService(this); @@ -5923,7 +6042,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateProject(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(28, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(29, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5932,7 +6051,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_ListProjects() { - ::grpc::Service::MarkMethodRaw(29); + ::grpc::Service::MarkMethodRaw(30); } ~WithRawMethod_ListProjects() override { BaseClassMustBeDerivedFromService(this); @@ -5943,7 +6062,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListProjects(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(29, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(30, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5952,7 +6071,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_CreateWorkflowEvent() { - ::grpc::Service::MarkMethodRaw(30); + ::grpc::Service::MarkMethodRaw(31); } ~WithRawMethod_CreateWorkflowEvent() override { BaseClassMustBeDerivedFromService(this); @@ -5963,7 +6082,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestCreateWorkflowEvent(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(30, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(31, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5972,7 +6091,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_CreateNodeEvent() { - ::grpc::Service::MarkMethodRaw(31); + ::grpc::Service::MarkMethodRaw(32); } ~WithRawMethod_CreateNodeEvent() override { BaseClassMustBeDerivedFromService(this); @@ -5983,7 +6102,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestCreateNodeEvent(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(31, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(32, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -5992,7 +6111,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_CreateTaskEvent() { - ::grpc::Service::MarkMethodRaw(32); + ::grpc::Service::MarkMethodRaw(33); } ~WithRawMethod_CreateTaskEvent() override { BaseClassMustBeDerivedFromService(this); @@ -6003,7 +6122,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestCreateTaskEvent(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(32, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(33, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6012,7 +6131,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetTaskExecution() { - ::grpc::Service::MarkMethodRaw(33); + ::grpc::Service::MarkMethodRaw(34); } ~WithRawMethod_GetTaskExecution() override { BaseClassMustBeDerivedFromService(this); @@ -6023,7 +6142,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetTaskExecution(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(33, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(34, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6032,7 +6151,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_ListTaskExecutions() { - ::grpc::Service::MarkMethodRaw(34); + ::grpc::Service::MarkMethodRaw(35); } ~WithRawMethod_ListTaskExecutions() override { BaseClassMustBeDerivedFromService(this); @@ -6043,7 +6162,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListTaskExecutions(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(34, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(35, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6052,7 +6171,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetTaskExecutionData() { - ::grpc::Service::MarkMethodRaw(35); + ::grpc::Service::MarkMethodRaw(36); } ~WithRawMethod_GetTaskExecutionData() override { BaseClassMustBeDerivedFromService(this); @@ -6063,7 +6182,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetTaskExecutionData(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(35, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(36, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6072,7 +6191,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_UpdateProjectDomainAttributes() { - ::grpc::Service::MarkMethodRaw(36); + ::grpc::Service::MarkMethodRaw(37); } ~WithRawMethod_UpdateProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6083,7 +6202,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateProjectDomainAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(36, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(37, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6092,7 +6211,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetProjectDomainAttributes() { - ::grpc::Service::MarkMethodRaw(37); + ::grpc::Service::MarkMethodRaw(38); } ~WithRawMethod_GetProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6103,7 +6222,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetProjectDomainAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(37, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(38, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6112,7 +6231,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_DeleteProjectDomainAttributes() { - ::grpc::Service::MarkMethodRaw(38); + ::grpc::Service::MarkMethodRaw(39); } ~WithRawMethod_DeleteProjectDomainAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6123,7 +6242,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestDeleteProjectDomainAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(38, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(39, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6132,7 +6251,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_UpdateProjectAttributes() { - ::grpc::Service::MarkMethodRaw(39); + ::grpc::Service::MarkMethodRaw(40); } ~WithRawMethod_UpdateProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6143,7 +6262,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateProjectAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(39, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(40, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6152,7 +6271,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetProjectAttributes() { - ::grpc::Service::MarkMethodRaw(40); + ::grpc::Service::MarkMethodRaw(41); } ~WithRawMethod_GetProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6163,7 +6282,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetProjectAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(40, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(41, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6172,7 +6291,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_DeleteProjectAttributes() { - ::grpc::Service::MarkMethodRaw(41); + ::grpc::Service::MarkMethodRaw(42); } ~WithRawMethod_DeleteProjectAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6183,7 +6302,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestDeleteProjectAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(41, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(42, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6192,7 +6311,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_UpdateWorkflowAttributes() { - ::grpc::Service::MarkMethodRaw(42); + ::grpc::Service::MarkMethodRaw(43); } ~WithRawMethod_UpdateWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6203,7 +6322,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateWorkflowAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(42, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(43, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6212,7 +6331,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetWorkflowAttributes() { - ::grpc::Service::MarkMethodRaw(43); + ::grpc::Service::MarkMethodRaw(44); } ~WithRawMethod_GetWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6223,7 +6342,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetWorkflowAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(43, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(44, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6232,7 +6351,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_DeleteWorkflowAttributes() { - ::grpc::Service::MarkMethodRaw(44); + ::grpc::Service::MarkMethodRaw(45); } ~WithRawMethod_DeleteWorkflowAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6243,7 +6362,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestDeleteWorkflowAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(44, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(45, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6252,7 +6371,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_ListMatchableAttributes() { - ::grpc::Service::MarkMethodRaw(45); + ::grpc::Service::MarkMethodRaw(46); } ~WithRawMethod_ListMatchableAttributes() override { BaseClassMustBeDerivedFromService(this); @@ -6263,7 +6382,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListMatchableAttributes(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(45, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(46, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6272,7 +6391,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_ListNamedEntities() { - ::grpc::Service::MarkMethodRaw(46); + ::grpc::Service::MarkMethodRaw(47); } ~WithRawMethod_ListNamedEntities() override { BaseClassMustBeDerivedFromService(this); @@ -6283,7 +6402,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListNamedEntities(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(46, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(47, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6292,7 +6411,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetNamedEntity() { - ::grpc::Service::MarkMethodRaw(47); + ::grpc::Service::MarkMethodRaw(48); } ~WithRawMethod_GetNamedEntity() override { BaseClassMustBeDerivedFromService(this); @@ -6303,7 +6422,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetNamedEntity(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(47, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(48, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6312,7 +6431,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_UpdateNamedEntity() { - ::grpc::Service::MarkMethodRaw(48); + ::grpc::Service::MarkMethodRaw(49); } ~WithRawMethod_UpdateNamedEntity() override { BaseClassMustBeDerivedFromService(this); @@ -6323,7 +6442,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestUpdateNamedEntity(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(48, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(49, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6332,7 +6451,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetVersion() { - ::grpc::Service::MarkMethodRaw(49); + ::grpc::Service::MarkMethodRaw(50); } ~WithRawMethod_GetVersion() override { BaseClassMustBeDerivedFromService(this); @@ -6343,7 +6462,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetVersion(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(49, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(50, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6352,7 +6471,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetDescriptionEntity() { - ::grpc::Service::MarkMethodRaw(50); + ::grpc::Service::MarkMethodRaw(51); } ~WithRawMethod_GetDescriptionEntity() override { BaseClassMustBeDerivedFromService(this); @@ -6363,7 +6482,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetDescriptionEntity(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(50, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(51, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6372,7 +6491,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_ListDescriptionEntities() { - ::grpc::Service::MarkMethodRaw(51); + ::grpc::Service::MarkMethodRaw(52); } ~WithRawMethod_ListDescriptionEntities() override { BaseClassMustBeDerivedFromService(this); @@ -6383,7 +6502,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestListDescriptionEntities(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(51, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(52, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -6392,7 +6511,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithRawMethod_GetExecutionMetrics() { - ::grpc::Service::MarkMethodRaw(52); + ::grpc::Service::MarkMethodRaw(53); } ~WithRawMethod_GetExecutionMetrics() override { BaseClassMustBeDerivedFromService(this); @@ -6403,7 +6522,7 @@ class AdminService final { return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } void RequestGetExecutionMetrics(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { - ::grpc::Service::RequestAsyncUnary(52, context, request, response, new_call_cq, notification_cq, tag); + ::grpc::Service::RequestAsyncUnary(53, context, request, response, new_call_cq, notification_cq, tag); } }; template @@ -7007,12 +7126,37 @@ class AdminService final { virtual void GetNodeExecution(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } }; template + class ExperimentalWithRawCallbackMethod_GetDynamicNodeWorkflow : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + ExperimentalWithRawCallbackMethod_GetDynamicNodeWorkflow() { + ::grpc::Service::experimental().MarkMethodRawCallback(24, + new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( + [this](::grpc::ServerContext* context, + const ::grpc::ByteBuffer* request, + ::grpc::ByteBuffer* response, + ::grpc::experimental::ServerCallbackRpcController* controller) { + this->GetDynamicNodeWorkflow(context, request, response, controller); + })); + } + ~ExperimentalWithRawCallbackMethod_GetDynamicNodeWorkflow() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + virtual void GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } + }; + template class ExperimentalWithRawCallbackMethod_ListNodeExecutions : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_ListNodeExecutions() { - ::grpc::Service::experimental().MarkMethodRawCallback(24, + ::grpc::Service::experimental().MarkMethodRawCallback(25, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7037,7 +7181,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_ListNodeExecutionsForTask() { - ::grpc::Service::experimental().MarkMethodRawCallback(25, + ::grpc::Service::experimental().MarkMethodRawCallback(26, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7062,7 +7206,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetNodeExecutionData() { - ::grpc::Service::experimental().MarkMethodRawCallback(26, + ::grpc::Service::experimental().MarkMethodRawCallback(27, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7087,7 +7231,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_RegisterProject() { - ::grpc::Service::experimental().MarkMethodRawCallback(27, + ::grpc::Service::experimental().MarkMethodRawCallback(28, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7112,7 +7256,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_UpdateProject() { - ::grpc::Service::experimental().MarkMethodRawCallback(28, + ::grpc::Service::experimental().MarkMethodRawCallback(29, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7137,7 +7281,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_ListProjects() { - ::grpc::Service::experimental().MarkMethodRawCallback(29, + ::grpc::Service::experimental().MarkMethodRawCallback(30, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7162,7 +7306,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_CreateWorkflowEvent() { - ::grpc::Service::experimental().MarkMethodRawCallback(30, + ::grpc::Service::experimental().MarkMethodRawCallback(31, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7187,7 +7331,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_CreateNodeEvent() { - ::grpc::Service::experimental().MarkMethodRawCallback(31, + ::grpc::Service::experimental().MarkMethodRawCallback(32, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7212,7 +7356,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_CreateTaskEvent() { - ::grpc::Service::experimental().MarkMethodRawCallback(32, + ::grpc::Service::experimental().MarkMethodRawCallback(33, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7237,7 +7381,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetTaskExecution() { - ::grpc::Service::experimental().MarkMethodRawCallback(33, + ::grpc::Service::experimental().MarkMethodRawCallback(34, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7262,7 +7406,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_ListTaskExecutions() { - ::grpc::Service::experimental().MarkMethodRawCallback(34, + ::grpc::Service::experimental().MarkMethodRawCallback(35, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7287,7 +7431,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetTaskExecutionData() { - ::grpc::Service::experimental().MarkMethodRawCallback(35, + ::grpc::Service::experimental().MarkMethodRawCallback(36, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7312,7 +7456,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_UpdateProjectDomainAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(36, + ::grpc::Service::experimental().MarkMethodRawCallback(37, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7337,7 +7481,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetProjectDomainAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(37, + ::grpc::Service::experimental().MarkMethodRawCallback(38, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7362,7 +7506,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_DeleteProjectDomainAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(38, + ::grpc::Service::experimental().MarkMethodRawCallback(39, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7387,7 +7531,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_UpdateProjectAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(39, + ::grpc::Service::experimental().MarkMethodRawCallback(40, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7412,7 +7556,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetProjectAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(40, + ::grpc::Service::experimental().MarkMethodRawCallback(41, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7437,7 +7581,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_DeleteProjectAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(41, + ::grpc::Service::experimental().MarkMethodRawCallback(42, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7462,7 +7606,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_UpdateWorkflowAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(42, + ::grpc::Service::experimental().MarkMethodRawCallback(43, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7487,7 +7631,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetWorkflowAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(43, + ::grpc::Service::experimental().MarkMethodRawCallback(44, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7512,7 +7656,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_DeleteWorkflowAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(44, + ::grpc::Service::experimental().MarkMethodRawCallback(45, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7537,7 +7681,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_ListMatchableAttributes() { - ::grpc::Service::experimental().MarkMethodRawCallback(45, + ::grpc::Service::experimental().MarkMethodRawCallback(46, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7562,7 +7706,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_ListNamedEntities() { - ::grpc::Service::experimental().MarkMethodRawCallback(46, + ::grpc::Service::experimental().MarkMethodRawCallback(47, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7587,7 +7731,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetNamedEntity() { - ::grpc::Service::experimental().MarkMethodRawCallback(47, + ::grpc::Service::experimental().MarkMethodRawCallback(48, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7612,7 +7756,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_UpdateNamedEntity() { - ::grpc::Service::experimental().MarkMethodRawCallback(48, + ::grpc::Service::experimental().MarkMethodRawCallback(49, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7637,7 +7781,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetVersion() { - ::grpc::Service::experimental().MarkMethodRawCallback(49, + ::grpc::Service::experimental().MarkMethodRawCallback(50, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7662,7 +7806,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetDescriptionEntity() { - ::grpc::Service::experimental().MarkMethodRawCallback(50, + ::grpc::Service::experimental().MarkMethodRawCallback(51, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7687,7 +7831,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_ListDescriptionEntities() { - ::grpc::Service::experimental().MarkMethodRawCallback(51, + ::grpc::Service::experimental().MarkMethodRawCallback(52, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -7712,7 +7856,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: ExperimentalWithRawCallbackMethod_GetExecutionMetrics() { - ::grpc::Service::experimental().MarkMethodRawCallback(52, + ::grpc::Service::experimental().MarkMethodRawCallback(53, new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( [this](::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, @@ -8212,12 +8356,32 @@ class AdminService final { virtual ::grpc::Status StreamedGetNodeExecution(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::admin::NodeExecutionGetRequest,::flyteidl::admin::NodeExecution>* server_unary_streamer) = 0; }; template + class WithStreamedUnaryMethod_GetDynamicNodeWorkflow : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithStreamedUnaryMethod_GetDynamicNodeWorkflow() { + ::grpc::Service::MarkMethodStreamed(24, + new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::GetDynamicNodeWorkflowRequest, ::flyteidl::admin::DynamicNodeWorkflowResponse>(std::bind(&WithStreamedUnaryMethod_GetDynamicNodeWorkflow::StreamedGetDynamicNodeWorkflow, this, std::placeholders::_1, std::placeholders::_2))); + } + ~WithStreamedUnaryMethod_GetDynamicNodeWorkflow() override { + BaseClassMustBeDerivedFromService(this); + } + // disable regular version of this method + ::grpc::Status GetDynamicNodeWorkflow(::grpc::ServerContext* context, const ::flyteidl::admin::GetDynamicNodeWorkflowRequest* request, ::flyteidl::admin::DynamicNodeWorkflowResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + // replace default version of method with streamed unary + virtual ::grpc::Status StreamedGetDynamicNodeWorkflow(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::admin::GetDynamicNodeWorkflowRequest,::flyteidl::admin::DynamicNodeWorkflowResponse>* server_unary_streamer) = 0; + }; + template class WithStreamedUnaryMethod_ListNodeExecutions : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_ListNodeExecutions() { - ::grpc::Service::MarkMethodStreamed(24, + ::grpc::Service::MarkMethodStreamed(25, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::NodeExecutionListRequest, ::flyteidl::admin::NodeExecutionList>(std::bind(&WithStreamedUnaryMethod_ListNodeExecutions::StreamedListNodeExecutions, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_ListNodeExecutions() override { @@ -8237,7 +8401,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_ListNodeExecutionsForTask() { - ::grpc::Service::MarkMethodStreamed(25, + ::grpc::Service::MarkMethodStreamed(26, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::NodeExecutionForTaskListRequest, ::flyteidl::admin::NodeExecutionList>(std::bind(&WithStreamedUnaryMethod_ListNodeExecutionsForTask::StreamedListNodeExecutionsForTask, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_ListNodeExecutionsForTask() override { @@ -8257,7 +8421,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetNodeExecutionData() { - ::grpc::Service::MarkMethodStreamed(26, + ::grpc::Service::MarkMethodStreamed(27, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::NodeExecutionGetDataRequest, ::flyteidl::admin::NodeExecutionGetDataResponse>(std::bind(&WithStreamedUnaryMethod_GetNodeExecutionData::StreamedGetNodeExecutionData, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetNodeExecutionData() override { @@ -8277,7 +8441,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_RegisterProject() { - ::grpc::Service::MarkMethodStreamed(27, + ::grpc::Service::MarkMethodStreamed(28, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectRegisterRequest, ::flyteidl::admin::ProjectRegisterResponse>(std::bind(&WithStreamedUnaryMethod_RegisterProject::StreamedRegisterProject, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_RegisterProject() override { @@ -8297,7 +8461,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_UpdateProject() { - ::grpc::Service::MarkMethodStreamed(28, + ::grpc::Service::MarkMethodStreamed(29, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::Project, ::flyteidl::admin::ProjectUpdateResponse>(std::bind(&WithStreamedUnaryMethod_UpdateProject::StreamedUpdateProject, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_UpdateProject() override { @@ -8317,7 +8481,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_ListProjects() { - ::grpc::Service::MarkMethodStreamed(29, + ::grpc::Service::MarkMethodStreamed(30, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectListRequest, ::flyteidl::admin::Projects>(std::bind(&WithStreamedUnaryMethod_ListProjects::StreamedListProjects, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_ListProjects() override { @@ -8337,7 +8501,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_CreateWorkflowEvent() { - ::grpc::Service::MarkMethodStreamed(30, + ::grpc::Service::MarkMethodStreamed(31, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::WorkflowExecutionEventRequest, ::flyteidl::admin::WorkflowExecutionEventResponse>(std::bind(&WithStreamedUnaryMethod_CreateWorkflowEvent::StreamedCreateWorkflowEvent, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_CreateWorkflowEvent() override { @@ -8357,7 +8521,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_CreateNodeEvent() { - ::grpc::Service::MarkMethodStreamed(31, + ::grpc::Service::MarkMethodStreamed(32, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::NodeExecutionEventRequest, ::flyteidl::admin::NodeExecutionEventResponse>(std::bind(&WithStreamedUnaryMethod_CreateNodeEvent::StreamedCreateNodeEvent, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_CreateNodeEvent() override { @@ -8377,7 +8541,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_CreateTaskEvent() { - ::grpc::Service::MarkMethodStreamed(32, + ::grpc::Service::MarkMethodStreamed(33, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::TaskExecutionEventRequest, ::flyteidl::admin::TaskExecutionEventResponse>(std::bind(&WithStreamedUnaryMethod_CreateTaskEvent::StreamedCreateTaskEvent, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_CreateTaskEvent() override { @@ -8397,7 +8561,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetTaskExecution() { - ::grpc::Service::MarkMethodStreamed(33, + ::grpc::Service::MarkMethodStreamed(34, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::TaskExecutionGetRequest, ::flyteidl::admin::TaskExecution>(std::bind(&WithStreamedUnaryMethod_GetTaskExecution::StreamedGetTaskExecution, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetTaskExecution() override { @@ -8417,7 +8581,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_ListTaskExecutions() { - ::grpc::Service::MarkMethodStreamed(34, + ::grpc::Service::MarkMethodStreamed(35, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::TaskExecutionListRequest, ::flyteidl::admin::TaskExecutionList>(std::bind(&WithStreamedUnaryMethod_ListTaskExecutions::StreamedListTaskExecutions, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_ListTaskExecutions() override { @@ -8437,7 +8601,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetTaskExecutionData() { - ::grpc::Service::MarkMethodStreamed(35, + ::grpc::Service::MarkMethodStreamed(36, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::TaskExecutionGetDataRequest, ::flyteidl::admin::TaskExecutionGetDataResponse>(std::bind(&WithStreamedUnaryMethod_GetTaskExecutionData::StreamedGetTaskExecutionData, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetTaskExecutionData() override { @@ -8457,7 +8621,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_UpdateProjectDomainAttributes() { - ::grpc::Service::MarkMethodStreamed(36, + ::grpc::Service::MarkMethodStreamed(37, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesUpdateRequest, ::flyteidl::admin::ProjectDomainAttributesUpdateResponse>(std::bind(&WithStreamedUnaryMethod_UpdateProjectDomainAttributes::StreamedUpdateProjectDomainAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_UpdateProjectDomainAttributes() override { @@ -8477,7 +8641,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetProjectDomainAttributes() { - ::grpc::Service::MarkMethodStreamed(37, + ::grpc::Service::MarkMethodStreamed(38, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesGetRequest, ::flyteidl::admin::ProjectDomainAttributesGetResponse>(std::bind(&WithStreamedUnaryMethod_GetProjectDomainAttributes::StreamedGetProjectDomainAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetProjectDomainAttributes() override { @@ -8497,7 +8661,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_DeleteProjectDomainAttributes() { - ::grpc::Service::MarkMethodStreamed(38, + ::grpc::Service::MarkMethodStreamed(39, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectDomainAttributesDeleteRequest, ::flyteidl::admin::ProjectDomainAttributesDeleteResponse>(std::bind(&WithStreamedUnaryMethod_DeleteProjectDomainAttributes::StreamedDeleteProjectDomainAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_DeleteProjectDomainAttributes() override { @@ -8517,7 +8681,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_UpdateProjectAttributes() { - ::grpc::Service::MarkMethodStreamed(39, + ::grpc::Service::MarkMethodStreamed(40, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectAttributesUpdateRequest, ::flyteidl::admin::ProjectAttributesUpdateResponse>(std::bind(&WithStreamedUnaryMethod_UpdateProjectAttributes::StreamedUpdateProjectAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_UpdateProjectAttributes() override { @@ -8537,7 +8701,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetProjectAttributes() { - ::grpc::Service::MarkMethodStreamed(40, + ::grpc::Service::MarkMethodStreamed(41, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectAttributesGetRequest, ::flyteidl::admin::ProjectAttributesGetResponse>(std::bind(&WithStreamedUnaryMethod_GetProjectAttributes::StreamedGetProjectAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetProjectAttributes() override { @@ -8557,7 +8721,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_DeleteProjectAttributes() { - ::grpc::Service::MarkMethodStreamed(41, + ::grpc::Service::MarkMethodStreamed(42, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ProjectAttributesDeleteRequest, ::flyteidl::admin::ProjectAttributesDeleteResponse>(std::bind(&WithStreamedUnaryMethod_DeleteProjectAttributes::StreamedDeleteProjectAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_DeleteProjectAttributes() override { @@ -8577,7 +8741,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_UpdateWorkflowAttributes() { - ::grpc::Service::MarkMethodStreamed(42, + ::grpc::Service::MarkMethodStreamed(43, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::WorkflowAttributesUpdateRequest, ::flyteidl::admin::WorkflowAttributesUpdateResponse>(std::bind(&WithStreamedUnaryMethod_UpdateWorkflowAttributes::StreamedUpdateWorkflowAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_UpdateWorkflowAttributes() override { @@ -8597,7 +8761,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetWorkflowAttributes() { - ::grpc::Service::MarkMethodStreamed(43, + ::grpc::Service::MarkMethodStreamed(44, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::WorkflowAttributesGetRequest, ::flyteidl::admin::WorkflowAttributesGetResponse>(std::bind(&WithStreamedUnaryMethod_GetWorkflowAttributes::StreamedGetWorkflowAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetWorkflowAttributes() override { @@ -8617,7 +8781,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_DeleteWorkflowAttributes() { - ::grpc::Service::MarkMethodStreamed(44, + ::grpc::Service::MarkMethodStreamed(45, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::WorkflowAttributesDeleteRequest, ::flyteidl::admin::WorkflowAttributesDeleteResponse>(std::bind(&WithStreamedUnaryMethod_DeleteWorkflowAttributes::StreamedDeleteWorkflowAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_DeleteWorkflowAttributes() override { @@ -8637,7 +8801,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_ListMatchableAttributes() { - ::grpc::Service::MarkMethodStreamed(45, + ::grpc::Service::MarkMethodStreamed(46, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ListMatchableAttributesRequest, ::flyteidl::admin::ListMatchableAttributesResponse>(std::bind(&WithStreamedUnaryMethod_ListMatchableAttributes::StreamedListMatchableAttributes, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_ListMatchableAttributes() override { @@ -8657,7 +8821,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_ListNamedEntities() { - ::grpc::Service::MarkMethodStreamed(46, + ::grpc::Service::MarkMethodStreamed(47, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::NamedEntityListRequest, ::flyteidl::admin::NamedEntityList>(std::bind(&WithStreamedUnaryMethod_ListNamedEntities::StreamedListNamedEntities, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_ListNamedEntities() override { @@ -8677,7 +8841,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetNamedEntity() { - ::grpc::Service::MarkMethodStreamed(47, + ::grpc::Service::MarkMethodStreamed(48, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::NamedEntityGetRequest, ::flyteidl::admin::NamedEntity>(std::bind(&WithStreamedUnaryMethod_GetNamedEntity::StreamedGetNamedEntity, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetNamedEntity() override { @@ -8697,7 +8861,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_UpdateNamedEntity() { - ::grpc::Service::MarkMethodStreamed(48, + ::grpc::Service::MarkMethodStreamed(49, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::NamedEntityUpdateRequest, ::flyteidl::admin::NamedEntityUpdateResponse>(std::bind(&WithStreamedUnaryMethod_UpdateNamedEntity::StreamedUpdateNamedEntity, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_UpdateNamedEntity() override { @@ -8717,7 +8881,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetVersion() { - ::grpc::Service::MarkMethodStreamed(49, + ::grpc::Service::MarkMethodStreamed(50, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::GetVersionRequest, ::flyteidl::admin::GetVersionResponse>(std::bind(&WithStreamedUnaryMethod_GetVersion::StreamedGetVersion, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetVersion() override { @@ -8737,7 +8901,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetDescriptionEntity() { - ::grpc::Service::MarkMethodStreamed(50, + ::grpc::Service::MarkMethodStreamed(51, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::ObjectGetRequest, ::flyteidl::admin::DescriptionEntity>(std::bind(&WithStreamedUnaryMethod_GetDescriptionEntity::StreamedGetDescriptionEntity, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetDescriptionEntity() override { @@ -8757,7 +8921,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_ListDescriptionEntities() { - ::grpc::Service::MarkMethodStreamed(51, + ::grpc::Service::MarkMethodStreamed(52, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::DescriptionEntityListRequest, ::flyteidl::admin::DescriptionEntityList>(std::bind(&WithStreamedUnaryMethod_ListDescriptionEntities::StreamedListDescriptionEntities, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_ListDescriptionEntities() override { @@ -8777,7 +8941,7 @@ class AdminService final { void BaseClassMustBeDerivedFromService(const Service *service) {} public: WithStreamedUnaryMethod_GetExecutionMetrics() { - ::grpc::Service::MarkMethodStreamed(52, + ::grpc::Service::MarkMethodStreamed(53, new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::WorkflowExecutionGetMetricsRequest, ::flyteidl::admin::WorkflowExecutionGetMetricsResponse>(std::bind(&WithStreamedUnaryMethod_GetExecutionMetrics::StreamedGetExecutionMetrics, this, std::placeholders::_1, std::placeholders::_2))); } ~WithStreamedUnaryMethod_GetExecutionMetrics() override { @@ -8791,9 +8955,9 @@ class AdminService final { // replace default version of method with streamed unary virtual ::grpc::Status StreamedGetExecutionMetrics(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::admin::WorkflowExecutionGetMetricsRequest,::flyteidl::admin::WorkflowExecutionGetMetricsResponse>* server_unary_streamer) = 0; }; - typedef WithStreamedUnaryMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > StreamedUnaryService; + typedef WithStreamedUnaryMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > StreamedUnaryService; typedef Service SplitStreamedService; - typedef WithStreamedUnaryMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > StreamedService; + typedef WithStreamedUnaryMethod_CreateTask > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > StreamedService; }; } // namespace service diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/admin.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/admin.pb.cc index 991c8bb312..cb35496b69 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/admin.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/admin.pb.cc @@ -52,263 +52,402 @@ const char descriptor_table_protodef_flyteidl_2fservice_2fadmin_2eproto[] = "admin/task_execution.proto\032\034flyteidl/adm" "in/version.proto\032\033flyteidl/admin/common." "proto\032\'flyteidl/admin/description_entity" - ".proto2\204N\n\014AdminService\022m\n\nCreateTask\022!." - "flyteidl.admin.TaskCreateRequest\032\".flyte" - "idl.admin.TaskCreateResponse\"\030\202\323\344\223\002\022\"\r/a" - "pi/v1/tasks:\001*\022\210\001\n\007GetTask\022 .flyteidl.ad" - "min.ObjectGetRequest\032\024.flyteidl.admin.Ta" - "sk\"E\202\323\344\223\002\?\022=/api/v1/tasks/{id.project}/{" - "id.domain}/{id.name}/{id.version}\022\227\001\n\013Li" - "stTaskIds\0220.flyteidl.admin.NamedEntityId" - "entifierListRequest\032).flyteidl.admin.Nam" - "edEntityIdentifierList\"+\202\323\344\223\002%\022#/api/v1/" - "task_ids/{project}/{domain}\022\256\001\n\tListTask" + ".proto2\270y\n\014AdminService\022\216\001\n\nCreateTask\022!" + ".flyteidl.admin.TaskCreateRequest\032\".flyt" + "eidl.admin.TaskCreateResponse\"9\202\323\344\223\0023\"\r/" + "api/v1/tasks:\001*Z\037\"\032/api/v1/tasks/org/{id" + ".org}:\001*\022\330\001\n\007GetTask\022 .flyteidl.admin.Ob" + "jectGetRequest\032\024.flyteidl.admin.Task\"\224\001\202" + "\323\344\223\002\215\001\022=/api/v1/tasks/{id.project}/{id.d" + "omain}/{id.name}/{id.version}ZL\022J/api/v1" + "/tasks/org/{id.org}/{id.project}/{id.dom" + "ain}/{id.name}/{id.version}\022\305\001\n\013ListTask" + "Ids\0220.flyteidl.admin.NamedEntityIdentifi" + "erListRequest\032).flyteidl.admin.NamedEnti" + "tyIdentifierList\"Y\202\323\344\223\002S\022#/api/v1/task_i" + "ds/{project}/{domain}Z,\022*/api/v1/tasks/o" + "rg/{org}/{project}/{domain}\022\250\002\n\tListTask" "s\022#.flyteidl.admin.ResourceListRequest\032\030" - ".flyteidl.admin.TaskList\"b\202\323\344\223\002\\\0220/api/v" - "1/tasks/{id.project}/{id.domain}/{id.nam" - "e}Z(\022&/api/v1/tasks/{id.project}/{id.dom" - "ain}\022}\n\016CreateWorkflow\022%.flyteidl.admin." - "WorkflowCreateRequest\032&.flyteidl.admin.W" - "orkflowCreateResponse\"\034\202\323\344\223\002\026\"\021/api/v1/w" - "orkflows:\001*\022\224\001\n\013GetWorkflow\022 .flyteidl.a" + ".flyteidl.admin.TaskList\"\333\001\202\323\344\223\002\324\001\0220/api" + "/v1/tasks/{id.project}/{id.domain}/{id.n" + "ame}Z\?\022=/api/v1/tasks/org/{id.org}/{id.p" + "roject}/{id.domain}/{id.name}Z(\022&/api/v1" + "/tasks/{id.project}/{id.domain}Z5\0223/api/" + "v1/tasks/org/{id.org}/{id.project}/{id.d" + "omain}\022\242\001\n\016CreateWorkflow\022%.flyteidl.adm" + "in.WorkflowCreateRequest\032&.flyteidl.admi" + "n.WorkflowCreateResponse\"A\202\323\344\223\002;\"\021/api/v" + "1/workflows:\001*Z#\"\036/api/v1/workflows/org/" + "{id.org}:\001*\022\350\001\n\013GetWorkflow\022 .flyteidl.a" "dmin.ObjectGetRequest\032\030.flyteidl.admin.W" - "orkflow\"I\202\323\344\223\002C\022A/api/v1/workflows/{id.p" - "roject}/{id.domain}/{id.name}/{id.versio" - "n}\022\237\001\n\017ListWorkflowIds\0220.flyteidl.admin." - "NamedEntityIdentifierListRequest\032).flyte" - "idl.admin.NamedEntityIdentifierList\"/\202\323\344" - "\223\002)\022\'/api/v1/workflow_ids/{project}/{dom" - "ain}\022\276\001\n\rListWorkflows\022#.flyteidl.admin." - "ResourceListRequest\032\034.flyteidl.admin.Wor" - "kflowList\"j\202\323\344\223\002d\0224/api/v1/workflows/{id" - ".project}/{id.domain}/{id.name}Z,\022*/api/" - "v1/workflows/{id.project}/{id.domain}\022\206\001" - "\n\020CreateLaunchPlan\022\'.flyteidl.admin.Laun" - "chPlanCreateRequest\032(.flyteidl.admin.Lau" - "nchPlanCreateResponse\"\037\202\323\344\223\002\031\"\024/api/v1/l" - "aunch_plans:\001*\022\233\001\n\rGetLaunchPlan\022 .flyte" - "idl.admin.ObjectGetRequest\032\032.flyteidl.ad" - "min.LaunchPlan\"L\202\323\344\223\002F\022D/api/v1/launch_p" - "lans/{id.project}/{id.domain}/{id.name}/" - "{id.version}\022\242\001\n\023GetActiveLaunchPlan\022\'.f" - "lyteidl.admin.ActiveLaunchPlanRequest\032\032." - "flyteidl.admin.LaunchPlan\"F\202\323\344\223\002@\022>/api/" - "v1/active_launch_plans/{id.project}/{id." - "domain}/{id.name}\022\234\001\n\025ListActiveLaunchPl" - "ans\022+.flyteidl.admin.ActiveLaunchPlanLis" - "tRequest\032\036.flyteidl.admin.LaunchPlanList" - "\"6\202\323\344\223\0020\022./api/v1/active_launch_plans/{p" - "roject}/{domain}\022\244\001\n\021ListLaunchPlanIds\0220" - ".flyteidl.admin.NamedEntityIdentifierLis" - "tRequest\032).flyteidl.admin.NamedEntityIde" - "ntifierList\"2\202\323\344\223\002,\022*/api/v1/launch_plan" - "_ids/{project}/{domain}\022\310\001\n\017ListLaunchPl" - "ans\022#.flyteidl.admin.ResourceListRequest" - "\032\036.flyteidl.admin.LaunchPlanList\"p\202\323\344\223\002j" - "\0227/api/v1/launch_plans/{id.project}/{id." - "domain}/{id.name}Z/\022-/api/v1/launch_plan" - "s/{id.project}/{id.domain}\022\266\001\n\020UpdateLau" - "nchPlan\022\'.flyteidl.admin.LaunchPlanUpdat" - "eRequest\032(.flyteidl.admin.LaunchPlanUpda" - "teResponse\"O\202\323\344\223\002I\032D/api/v1/launch_plans" - "/{id.project}/{id.domain}/{id.name}/{id." - "version}:\001*\022\201\001\n\017CreateExecution\022&.flytei" - "dl.admin.ExecutionCreateRequest\032\'.flytei" - "dl.admin.ExecutionCreateResponse\"\035\202\323\344\223\002\027" - "\"\022/api/v1/executions:\001*\022\216\001\n\021RelaunchExec" - "ution\022(.flyteidl.admin.ExecutionRelaunch" - "Request\032\'.flyteidl.admin.ExecutionCreate" - "Response\"&\202\323\344\223\002 \"\033/api/v1/executions/rel" - "aunch:\001*\022\213\001\n\020RecoverExecution\022\'.flyteidl" - ".admin.ExecutionRecoverRequest\032\'.flyteid" - "l.admin.ExecutionCreateResponse\"%\202\323\344\223\002\037\"" - "\032/api/v1/executions/recover:\001*\022\225\001\n\014GetEx" - "ecution\022+.flyteidl.admin.WorkflowExecuti" - "onGetRequest\032\031.flyteidl.admin.Execution\"" - "=\202\323\344\223\0027\0225/api/v1/executions/{id.project}" - "/{id.domain}/{id.name}\022\244\001\n\017UpdateExecuti" - "on\022&.flyteidl.admin.ExecutionUpdateReque" - "st\032\'.flyteidl.admin.ExecutionUpdateRespo" - "nse\"@\202\323\344\223\002:\0325/api/v1/executions/{id.proj" - "ect}/{id.domain}/{id.name}:\001*\022\271\001\n\020GetExe" - "cutionData\022/.flyteidl.admin.WorkflowExec" - "utionGetDataRequest\0320.flyteidl.admin.Wor" - "kflowExecutionGetDataResponse\"B\202\323\344\223\002<\022:/" - "api/v1/data/executions/{id.project}/{id." - "domain}/{id.name}\022\211\001\n\016ListExecutions\022#.f" - "lyteidl.admin.ResourceListRequest\032\035.flyt" - "eidl.admin.ExecutionList\"3\202\323\344\223\002-\022+/api/v" - "1/executions/{id.project}/{id.domain}\022\255\001" - "\n\022TerminateExecution\022).flyteidl.admin.Ex" - "ecutionTerminateRequest\032*.flyteidl.admin" - ".ExecutionTerminateResponse\"@\202\323\344\223\002:*5/ap" - "i/v1/executions/{id.project}/{id.domain}" - "/{id.name}:\001*\022\322\001\n\020GetNodeExecution\022\'.fly" - "teidl.admin.NodeExecutionGetRequest\032\035.fl" - "yteidl.admin.NodeExecution\"v\202\323\344\223\002p\022n/api" - "/v1/node_executions/{id.execution_id.pro" - "ject}/{id.execution_id.domain}/{id.execu" - "tion_id.name}/{id.node_id}\022\336\001\n\022ListNodeE" - "xecutions\022(.flyteidl.admin.NodeExecution" - "ListRequest\032!.flyteidl.admin.NodeExecuti" - "onList\"{\202\323\344\223\002u\022s/api/v1/node_executions/" - "{workflow_execution_id.project}/{workflo" - "w_execution_id.domain}/{workflow_executi" - "on_id.name}\022\245\004\n\031ListNodeExecutionsForTas" - "k\022/.flyteidl.admin.NodeExecutionForTaskL" - "istRequest\032!.flyteidl.admin.NodeExecutio" - "nList\"\263\003\202\323\344\223\002\254\003\022\251\003/api/v1/children/task_" - "executions/{task_execution_id.node_execu" - "tion_id.execution_id.project}/{task_exec" - "ution_id.node_execution_id.execution_id." - "domain}/{task_execution_id.node_executio" - "n_id.execution_id.name}/{task_execution_" - "id.node_execution_id.node_id}/{task_exec" - "ution_id.task_id.project}/{task_executio" - "n_id.task_id.domain}/{task_execution_id." - "task_id.name}/{task_execution_id.task_id" - ".version}/{task_execution_id.retry_attem" - "pt}\022\356\001\n\024GetNodeExecutionData\022+.flyteidl." - "admin.NodeExecutionGetDataRequest\032,.flyt" - "eidl.admin.NodeExecutionGetDataResponse\"" - "{\202\323\344\223\002u\022s/api/v1/data/node_executions/{i" - "d.execution_id.project}/{id.execution_id" - ".domain}/{id.execution_id.name}/{id.node" - "_id}\022\177\n\017RegisterProject\022&.flyteidl.admin" - ".ProjectRegisterRequest\032\'.flyteidl.admin" - ".ProjectRegisterResponse\"\033\202\323\344\223\002\025\"\020/api/v" - "1/projects:\001*\022q\n\rUpdateProject\022\027.flyteid" - "l.admin.Project\032%.flyteidl.admin.Project" - "UpdateResponse\" \202\323\344\223\002\032\032\025/api/v1/projects" - "/{id}:\001*\022f\n\014ListProjects\022\".flyteidl.admi" - "n.ProjectListRequest\032\030.flyteidl.admin.Pr" - "ojects\"\030\202\323\344\223\002\022\022\020/api/v1/projects\022\231\001\n\023Cre" - "ateWorkflowEvent\022-.flyteidl.admin.Workfl" - "owExecutionEventRequest\032..flyteidl.admin" - ".WorkflowExecutionEventResponse\"#\202\323\344\223\002\035\"" - "\030/api/v1/events/workflows:\001*\022\211\001\n\017CreateN" - "odeEvent\022).flyteidl.admin.NodeExecutionE" - "ventRequest\032*.flyteidl.admin.NodeExecuti" - "onEventResponse\"\037\202\323\344\223\002\031\"\024/api/v1/events/" - "nodes:\001*\022\211\001\n\017CreateTaskEvent\022).flyteidl." - "admin.TaskExecutionEventRequest\032*.flytei" - "dl.admin.TaskExecutionEventResponse\"\037\202\323\344" - "\223\002\031\"\024/api/v1/events/tasks:\001*\022\200\003\n\020GetTask" - "Execution\022\'.flyteidl.admin.TaskExecution" - "GetRequest\032\035.flyteidl.admin.TaskExecutio" - "n\"\243\002\202\323\344\223\002\234\002\022\231\002/api/v1/task_executions/{i" - "d.node_execution_id.execution_id.project" - "}/{id.node_execution_id.execution_id.dom" - "ain}/{id.node_execution_id.execution_id." - "name}/{id.node_execution_id.node_id}/{id" - ".task_id.project}/{id.task_id.domain}/{i" - "d.task_id.name}/{id.task_id.version}/{id" - ".retry_attempt}\022\230\002\n\022ListTaskExecutions\022(" - ".flyteidl.admin.TaskExecutionListRequest" - "\032!.flyteidl.admin.TaskExecutionList\"\264\001\202\323" - "\344\223\002\255\001\022\252\001/api/v1/task_executions/{node_ex" - "ecution_id.execution_id.project}/{node_e" - "xecution_id.execution_id.domain}/{node_e" - "xecution_id.execution_id.name}/{node_exe" - "cution_id.node_id}\022\234\003\n\024GetTaskExecutionD" - "ata\022+.flyteidl.admin.TaskExecutionGetDat" - "aRequest\032,.flyteidl.admin.TaskExecutionG" - "etDataResponse\"\250\002\202\323\344\223\002\241\002\022\236\002/api/v1/data/" - "task_executions/{id.node_execution_id.ex" - "ecution_id.project}/{id.node_execution_i" - "d.execution_id.domain}/{id.node_executio" - "n_id.execution_id.name}/{id.node_executi" - "on_id.node_id}/{id.task_id.project}/{id." - "task_id.domain}/{id.task_id.name}/{id.ta" - "sk_id.version}/{id.retry_attempt}\022\343\001\n\035Up" - "dateProjectDomainAttributes\0224.flyteidl.a" - "dmin.ProjectDomainAttributesUpdateReques" - "t\0325.flyteidl.admin.ProjectDomainAttribut" - "esUpdateResponse\"U\202\323\344\223\002O\032J/api/v1/projec" - "t_domain_attributes/{attributes.project}" - "/{attributes.domain}:\001*\022\301\001\n\032GetProjectDo" - "mainAttributes\0221.flyteidl.admin.ProjectD" - "omainAttributesGetRequest\0322.flyteidl.adm" - "in.ProjectDomainAttributesGetResponse\"<\202" - "\323\344\223\0026\0224/api/v1/project_domain_attributes" - "/{project}/{domain}\022\315\001\n\035DeleteProjectDom" - "ainAttributes\0224.flyteidl.admin.ProjectDo" - "mainAttributesDeleteRequest\0325.flyteidl.a" - "dmin.ProjectDomainAttributesDeleteRespon" - "se\"\?\202\323\344\223\0029*4/api/v1/project_domain_attri" - "butes/{project}/{domain}:\001*\022\266\001\n\027UpdatePr" - "ojectAttributes\022..flyteidl.admin.Project" - "AttributesUpdateRequest\032/.flyteidl.admin" - ".ProjectAttributesUpdateResponse\":\202\323\344\223\0024" - "\032//api/v1/project_attributes/{attributes" - ".project}:\001*\022\237\001\n\024GetProjectAttributes\022+." - "flyteidl.admin.ProjectAttributesGetReque" - "st\032,.flyteidl.admin.ProjectAttributesGet" - "Response\",\202\323\344\223\002&\022$/api/v1/project_attrib" - "utes/{project}\022\253\001\n\027DeleteProjectAttribut" - "es\022..flyteidl.admin.ProjectAttributesDel" - "eteRequest\032/.flyteidl.admin.ProjectAttri" - "butesDeleteResponse\"/\202\323\344\223\002)*$/api/v1/pro" - "ject_attributes/{project}:\001*\022\344\001\n\030UpdateW" - "orkflowAttributes\022/.flyteidl.admin.Workf" - "lowAttributesUpdateRequest\0320.flyteidl.ad" - "min.WorkflowAttributesUpdateResponse\"e\202\323" - "\344\223\002_\032Z/api/v1/workflow_attributes/{attri" - "butes.project}/{attributes.domain}/{attr" - "ibutes.workflow}:\001*\022\267\001\n\025GetWorkflowAttri" - "butes\022,.flyteidl.admin.WorkflowAttribute" - "sGetRequest\032-.flyteidl.admin.WorkflowAtt" - "ributesGetResponse\"A\202\323\344\223\002;\0229/api/v1/work" - "flow_attributes/{project}/{domain}/{work" - "flow}\022\303\001\n\030DeleteWorkflowAttributes\022/.fly" - "teidl.admin.WorkflowAttributesDeleteRequ" - "est\0320.flyteidl.admin.WorkflowAttributesD" - "eleteResponse\"D\202\323\344\223\002>*9/api/v1/workflow_" - "attributes/{project}/{domain}/{workflow}" - ":\001*\022\240\001\n\027ListMatchableAttributes\022..flytei" - "dl.admin.ListMatchableAttributesRequest\032" - "/.flyteidl.admin.ListMatchableAttributes" - "Response\"$\202\323\344\223\002\036\022\034/api/v1/matchable_attr" - "ibutes\022\237\001\n\021ListNamedEntities\022&.flyteidl." + "orkflow\"\234\001\202\323\344\223\002\225\001\022A/api/v1/workflows/{id" + ".project}/{id.domain}/{id.name}/{id.vers" + "ion}ZP\022N/api/v1/workflows/org/{id.org}/{" + "id.project}/{id.domain}/{id.name}/{id.ve" + "rsion}\022\321\001\n\017ListWorkflowIds\0220.flyteidl.ad" + "min.NamedEntityIdentifierListRequest\032).f" + "lyteidl.admin.NamedEntityIdentifierList\"" + "a\202\323\344\223\002[\022\'/api/v1/workflow_ids/{project}/" + "{domain}Z0\022./api/v1/workflows/org/{org}/" + "{project}/{domain}\022\300\002\n\rListWorkflows\022#.f" + "lyteidl.admin.ResourceListRequest\032\034.flyt" + "eidl.admin.WorkflowList\"\353\001\202\323\344\223\002\344\001\0224/api/" + "v1/workflows/{id.project}/{id.domain}/{i" + "d.name}ZC\022A/api/v1/workflows/org/{id.org" + "}/{id.project}/{id.domain}/{id.name}Z,\022*" + "/api/v1/workflows/{id.project}/{id.domai" + "n}Z9\0227/api/v1/workflows/org/{id.org}/{id" + ".project}/{id.domain}\022\256\001\n\020CreateLaunchPl" + "an\022\'.flyteidl.admin.LaunchPlanCreateRequ" + "est\032(.flyteidl.admin.LaunchPlanCreateRes" + "ponse\"G\202\323\344\223\002A\"\024/api/v1/launch_plans:\001*Z&" + "\"!/api/v1/launch_plans/org/{id.org}:\001*\022\362" + "\001\n\rGetLaunchPlan\022 .flyteidl.admin.Object" + "GetRequest\032\032.flyteidl.admin.LaunchPlan\"\242" + "\001\202\323\344\223\002\233\001\022D/api/v1/launch_plans/{id.proje" + "ct}/{id.domain}/{id.name}/{id.version}ZS" + "\022Q/api/v1/launch_plans/org/{id.org}/{id." + "project}/{id.domain}/{id.name}/{id.versi" + "on}\022\363\001\n\023GetActiveLaunchPlan\022\'.flyteidl.a" + "dmin.ActiveLaunchPlanRequest\032\032.flyteidl." + "admin.LaunchPlan\"\226\001\202\323\344\223\002\217\001\022>/api/v1/acti" + "ve_launch_plans/{id.project}/{id.domain}" + "/{id.name}ZM\022K/api/v1/active_launch_plan" + "s/org/{id.org}/{id.project}/{id.domain}/" + "{id.name}\022\330\001\n\025ListActiveLaunchPlans\022+.fl" + "yteidl.admin.ActiveLaunchPlanListRequest" + "\032\036.flyteidl.admin.LaunchPlanList\"r\202\323\344\223\002l" + "\022./api/v1/active_launch_plans/{project}/" + "{domain}Z:\0228/api/v1/active_launch_plans/" + "org/{org}/{project}/{domain}\022\334\001\n\021ListLau" + "nchPlanIds\0220.flyteidl.admin.NamedEntityI" + "dentifierListRequest\032).flyteidl.admin.Na" + "medEntityIdentifierList\"j\202\323\344\223\002d\022*/api/v1" + "/launch_plan_ids/{project}/{domain}Z6\0224/" + "api/v1/launch_plan_ids/org/{org}/{projec" + "t}/{domain}\022\320\002\n\017ListLaunchPlans\022#.flytei" + "dl.admin.ResourceListRequest\032\036.flyteidl." + "admin.LaunchPlanList\"\367\001\202\323\344\223\002\360\001\0227/api/v1/" + "launch_plans/{id.project}/{id.domain}/{i" + "d.name}ZF\022D/api/v1/launch_plans/org/{id." + "org}/{id.project}/{id.domain}/{id.name}Z" + "/\022-/api/v1/launch_plans/{id.project}/{id" + ".domain}Z<\022:/api/v1/launch_plans/org/{id" + ".org}/{id.project}/{id.domain}\022\215\002\n\020Updat" + "eLaunchPlan\022\'.flyteidl.admin.LaunchPlanU" + "pdateRequest\032(.flyteidl.admin.LaunchPlan" + "UpdateResponse\"\245\001\202\323\344\223\002\236\001\032D/api/v1/launch" + "_plans/{id.project}/{id.domain}/{id.name" + "}/{id.version}:\001*ZS\032Q/api/v1/launch_plan" + "s/org/{id.org}/{id.project}/{id.domain}/" + "{id.name}/{id.version}\022\244\001\n\017CreateExecuti" + "on\022&.flyteidl.admin.ExecutionCreateReque" + "st\032\'.flyteidl.admin.ExecutionCreateRespo" + "nse\"@\202\323\344\223\002:\"\022/api/v1/executions:\001*Z!\032\034/a" + "pi/v1/executions/org/{org}:\001*\022\275\001\n\021Relaun" + "chExecution\022(.flyteidl.admin.ExecutionRe" + "launchRequest\032\'.flyteidl.admin.Execution" + "CreateResponse\"U\202\323\344\223\002O\"\033/api/v1/executio" + "ns/relaunch:\001*Z-\"(/api/v1/executions/org" + "/{id.org}/relaunch:\001*\022\271\001\n\020RecoverExecuti" + "on\022\'.flyteidl.admin.ExecutionRecoverRequ" + "est\032\'.flyteidl.admin.ExecutionCreateResp" + "onse\"S\202\323\344\223\002M\"\032/api/v1/executions/recover" + ":\001*Z,\"\'/api/v1/executions/org/{id.org}/r" + "ecover:\001*\022\334\001\n\014GetExecution\022+.flyteidl.ad" + "min.WorkflowExecutionGetRequest\032\031.flytei" + "dl.admin.Execution\"\203\001\202\323\344\223\002}\0225/api/v1/exe" + "cutions/{id.project}/{id.domain}/{id.nam" + "e}ZD\022B/api/v1/executions/org/{id.org}/{i" + "d.project}/{id.domain}/{id.name}\022\357\001\n\017Upd" + "ateExecution\022&.flyteidl.admin.ExecutionU" + "pdateRequest\032\'.flyteidl.admin.ExecutionU" + "pdateResponse\"\212\001\202\323\344\223\002\203\001\0325/api/v1/executi" + "ons/{id.project}/{id.domain}/{id.name}:\001" + "*ZG\032B/api/v1/executions/org/{id.org}/{id" + ".project}/{id.domain}/{id.name}:\001*\022\206\002\n\020G" + "etExecutionData\022/.flyteidl.admin.Workflo" + "wExecutionGetDataRequest\0320.flyteidl.admi" + "n.WorkflowExecutionGetDataResponse\"\216\001\202\323\344" + "\223\002\207\001\022:/api/v1/data/executions/{id.projec" + "t}/{id.domain}/{id.name}ZI\022G/api/v1/data" + "/executions/org/{id.org}/{id.project}/{i" + "d.domain}/{id.name}\022\305\001\n\016ListExecutions\022#" + ".flyteidl.admin.ResourceListRequest\032\035.fl" + "yteidl.admin.ExecutionList\"o\202\323\344\223\002i\022+/api" + "/v1/executions/{id.project}/{id.domain}Z" + ":\0228/api/v1/executions/org/{id.org}/{id.p" + "roject}/{id.domain}\022\370\001\n\022TerminateExecuti" + "on\022).flyteidl.admin.ExecutionTerminateRe" + "quest\032*.flyteidl.admin.ExecutionTerminat" + "eResponse\"\212\001\202\323\344\223\002\203\001*5/api/v1/executions/" + "{id.project}/{id.domain}/{id.name}:\001*ZG*" + "B/api/v1/executions/org/{id.org}/{id.pro" + "ject}/{id.domain}/{id.name}:\001*\022\342\002\n\020GetNo" + "deExecution\022\'.flyteidl.admin.NodeExecuti" + "onGetRequest\032\035.flyteidl.admin.NodeExecut" + "ion\"\205\002\202\323\344\223\002\376\001\022n/api/v1/node_executions/{" + "id.execution_id.project}/{id.execution_i" + "d.domain}/{id.execution_id.name}/{id.nod" + "e_id}Z\213\001\022\210\001/api/v1/node_executions/org/{" + "id.execution_id.org}/{id.execution_id.pr" + "oject}/{id.execution_id.domain}/{id.exec" + "ution_id.name}/{id.node_id}\022\236\003\n\026GetDynam" + "icNodeWorkflow\022-.flyteidl.admin.GetDynam" + "icNodeWorkflowRequest\032+.flyteidl.admin.D" + "ynamicNodeWorkflowResponse\"\247\002\202\323\344\223\002\240\002\022\177/a" + "pi/v1/node_executions/{id.execution_id.p" + "roject}/{id.execution_id.domain}/{id.exe" + "cution_id.name}/{id.node_id}/dynamic_wor" + "kflowZ\234\001\022\231\001/api/v1/node_executions/org/{" + "id.execution_id.org}/{id.execution_id.pr" + "oject}/{id.execution_id.domain}/{id.exec" + "ution_id.name}/{id.node_id}/dynamic_work" + "flow\022\371\002\n\022ListNodeExecutions\022(.flyteidl.a" + "dmin.NodeExecutionListRequest\032!.flyteidl" + ".admin.NodeExecutionList\"\225\002\202\323\344\223\002\216\002\022s/api" + "/v1/node_executions/{workflow_execution_" + "id.project}/{workflow_execution_id.domai" + "n}/{workflow_execution_id.name}Z\226\001\022\223\001/ap" + "i/v1/node_executions/org/{workflow_execu" + "tion_id.org}/{workflow_execution_id.proj" + "ect}/{workflow_execution_id.domain}/{wor" + "kflow_execution_id.name}\022\217\010\n\031ListNodeExe" + "cutionsForTask\022/.flyteidl.admin.NodeExec" + "utionForTaskListRequest\032!.flyteidl.admin" + ".NodeExecutionList\"\235\007\202\323\344\223\002\226\007\022\251\003/api/v1/c" + "hildren/task_executions/{task_execution_" + "id.node_execution_id.execution_id.projec" + "t}/{task_execution_id.node_execution_id." + "execution_id.domain}/{task_execution_id." + "node_execution_id.execution_id.name}/{ta" + "sk_execution_id.node_execution_id.node_i" + "d}/{task_execution_id.task_id.project}/{" + "task_execution_id.task_id.domain}/{task_" + "execution_id.task_id.name}/{task_executi" + "on_id.task_id.version}/{task_execution_i" + "d.retry_attempt}Z\347\003\022\344\003/api/v1/children/o" + "rg/{task_execution_id.node_execution_id." + "execution_id.org}/task_executions/{task_" + "execution_id.node_execution_id.execution" + "_id.project}/{task_execution_id.node_exe" + "cution_id.execution_id.domain}/{task_exe" + "cution_id.node_execution_id.execution_id" + ".name}/{task_execution_id.node_execution" + "_id.node_id}/{task_execution_id.task_id." + "project}/{task_execution_id.task_id.doma" + "in}/{task_execution_id.task_id.name}/{ta" + "sk_execution_id.task_id.version}/{task_e" + "xecution_id.retry_attempt}\022\203\003\n\024GetNodeEx" + "ecutionData\022+.flyteidl.admin.NodeExecuti" + "onGetDataRequest\032,.flyteidl.admin.NodeEx" + "ecutionGetDataResponse\"\217\002\202\323\344\223\002\210\002\022s/api/v" + "1/data/node_executions/{id.execution_id." + "project}/{id.execution_id.domain}/{id.ex" + "ecution_id.name}/{id.node_id}Z\220\001\022\215\001/api/" + "v1/data/org/{id.execution_id.org}/node_e" + "xecutions/{id.execution_id.project}/{id." + "execution_id.domain}/{id.execution_id.na" + "me}/{id.node_id}\022\250\001\n\017RegisterProject\022&.f" + "lyteidl.admin.ProjectRegisterRequest\032\'.f" + "lyteidl.admin.ProjectRegisterResponse\"D\202" + "\323\344\223\002>\"\020/api/v1/projects:\001*Z\'\"\"/api/v1/pr" + "ojects/org/{project.org}:\001*\022\227\001\n\rUpdatePr" + "oject\022\027.flyteidl.admin.Project\032%.flyteid" + "l.admin.ProjectUpdateResponse\"F\202\323\344\223\002@\032\025/" + "api/v1/projects/{id}:\001*Z$\032\037/api/v1/proje" + "cts/org/{org}/{id}:\001*\022\204\001\n\014ListProjects\022\"" + ".flyteidl.admin.ProjectListRequest\032\030.fly" + "teidl.admin.Projects\"6\202\323\344\223\0020\022\020/api/v1/pr" + "ojectsZ\034\022\032/api/v1/projects/org/{org}\022\325\001\n" + "\023CreateWorkflowEvent\022-.flyteidl.admin.Wo" + "rkflowExecutionEventRequest\032..flyteidl.a" + "dmin.WorkflowExecutionEventResponse\"_\202\323\344" + "\223\002Y\"\030/api/v1/events/workflows:\001*Z:\"5/api" + "/v1/events/org/{event.execution_id.org}/" + "workflows:\001*\022\304\001\n\017CreateNodeEvent\022).flyte" + "idl.admin.NodeExecutionEventRequest\032*.fl" + "yteidl.admin.NodeExecutionEventResponse\"" + "Z\202\323\344\223\002T\"\024/api/v1/events/nodes:\001*Z9\"4/api" + "/v1/events/org/{event.id.execution_id.or" + "g}/nodes:\001*\022\332\001\n\017CreateTaskEvent\022).flytei" + "dl.admin.TaskExecutionEventRequest\032*.fly" + "teidl.admin.TaskExecutionEventResponse\"p" + "\202\323\344\223\002j\"\024/api/v1/events/tasks:\001*ZO\"J/api/" + "v1/events/org/{event.parent_node_executi" + "on_id.execution_id.org}/tasks:\001*\022\313\005\n\020Get" + "TaskExecution\022\'.flyteidl.admin.TaskExecu" + "tionGetRequest\032\035.flyteidl.admin.TaskExec" + "ution\"\356\004\202\323\344\223\002\347\004\022\231\002/api/v1/task_execution" + "s/{id.node_execution_id.execution_id.pro" + "ject}/{id.node_execution_id.execution_id" + ".domain}/{id.node_execution_id.execution" + "_id.name}/{id.node_execution_id.node_id}" + "/{id.task_id.project}/{id.task_id.domain" + "}/{id.task_id.name}/{id.task_id.version}" + "/{id.retry_attempt}Z\310\002\022\305\002/api/v1/task_ex" + "ecutions/org/{id.node_execution_id.execu" + "tion_id.org}/{id.node_execution_id.execu" + "tion_id.project}/{id.node_execution_id.e" + "xecution_id.domain}/{id.node_execution_i" + "d.execution_id.name}/{id.node_execution_" + "id.node_id}/{id.task_id.project}/{id.tas" + "k_id.domain}/{id.task_id.name}/{id.task_" + "id.version}/{id.retry_attempt}\022\361\003\n\022ListT" + "askExecutions\022(.flyteidl.admin.TaskExecu" + "tionListRequest\032!.flyteidl.admin.TaskExe" + "cutionList\"\215\003\202\323\344\223\002\206\003\022\252\001/api/v1/task_exec" + "utions/{node_execution_id.execution_id.p" + "roject}/{node_execution_id.execution_id." + "domain}/{node_execution_id.execution_id." + "name}/{node_execution_id.node_id}Z\326\001\022\323\001/" + "api/v1/task_executions/org/{node_executi" + "on_id.execution_id.org}/{node_execution_" + "id.execution_id.project}/{node_execution" + "_id.execution_id.domain}/{node_execution" + "_id.execution_id.name}/{node_execution_i" + "d.node_id}\022\354\005\n\024GetTaskExecutionData\022+.fl" + "yteidl.admin.TaskExecutionGetDataRequest" + "\032,.flyteidl.admin.TaskExecutionGetDataRe" + "sponse\"\370\004\202\323\344\223\002\361\004\022\236\002/api/v1/data/task_exe" + "cutions/{id.node_execution_id.execution_" + "id.project}/{id.node_execution_id.execut" + "ion_id.domain}/{id.node_execution_id.exe" + "cution_id.name}/{id.node_execution_id.no" + "de_id}/{id.task_id.project}/{id.task_id." + "domain}/{id.task_id.name}/{id.task_id.ve" + "rsion}/{id.retry_attempt}Z\315\002\022\312\002/api/v1/d" + "ata/org/{id.node_execution_id.execution_" + "id.org}/task_executions/{id.node_executi" + "on_id.execution_id.project}/{id.node_exe" + "cution_id.execution_id.domain}/{id.node_" + "execution_id.execution_id.name}/{id.node" + "_execution_id.node_id}/{id.task_id.proje" + "ct}/{id.task_id.domain}/{id.task_id.name" + "}/{id.task_id.version}/{id.retry_attempt" + "}\022\313\002\n\035UpdateProjectDomainAttributes\0224.fl" + "yteidl.admin.ProjectDomainAttributesUpda" + "teRequest\0325.flyteidl.admin.ProjectDomain" + "AttributesUpdateResponse\"\274\001\202\323\344\223\002\265\001\032J/api" + "/v1/project_domain_attributes/{attribute" + "s.project}/{attributes.domain}:\001*Zd\032_/ap" + "i/v1/project_domain_attributes/org/{attr" + "ibutes.org}/{attributes.project}/{attrib" + "utes.domain}:\001*\022\203\002\n\032GetProjectDomainAttr" + "ibutes\0221.flyteidl.admin.ProjectDomainAtt" + "ributesGetRequest\0322.flyteidl.admin.Proje" + "ctDomainAttributesGetResponse\"~\202\323\344\223\002x\0224/" + "api/v1/project_domain_attributes/{projec" + "t}/{domain}Z@\022>/api/v1/project_domain_at" + "tributes/org/{org}/{project}/{domain}\022\223\002" + "\n\035DeleteProjectDomainAttributes\0224.flytei" + "dl.admin.ProjectDomainAttributesDeleteRe" + "quest\0325.flyteidl.admin.ProjectDomainAttr" + "ibutesDeleteResponse\"\204\001\202\323\344\223\002~*4/api/v1/p" + "roject_domain_attributes/{project}/{doma" + "in}:\001*ZC*>/api/v1/project_domain_attribu" + "tes/org/{org}/{project}/{domain}:\001*\022\212\002\n\027" + "UpdateProjectAttributes\022..flyteidl.admin" + ".ProjectAttributesUpdateRequest\032/.flytei" + "dl.admin.ProjectAttributesUpdateResponse" + "\"\215\001\202\323\344\223\002\206\001\032//api/v1/project_attributes/{" + "attributes.project}:\001*ZP\032K/api/v1/projec" + "t_domain_attributes/org/{attributes.org}" + "/{attributes.project}:\001*\022\330\001\n\024GetProjectA" + "ttributes\022+.flyteidl.admin.ProjectAttrib" + "utesGetRequest\032,.flyteidl.admin.ProjectA" + "ttributesGetResponse\"e\202\323\344\223\002_\022$/api/v1/pr" + "oject_attributes/{project}Z7\0225/api/v1/pr" + "oject_domain_attributes/org/{org}/{proje" + "ct}\022\347\001\n\027DeleteProjectAttributes\022..flytei" + "dl.admin.ProjectAttributesDeleteRequest\032" + "/.flyteidl.admin.ProjectAttributesDelete" + "Response\"k\202\323\344\223\002e*$/api/v1/project_attrib" + "utes/{project}:\001*Z:*5/api/v1/project_dom" + "ain_attributes/org/{org}/{project}:\001*\022\334\002" + "\n\030UpdateWorkflowAttributes\022/.flyteidl.ad" + "min.WorkflowAttributesUpdateRequest\0320.fl" + "yteidl.admin.WorkflowAttributesUpdateRes" + "ponse\"\334\001\202\323\344\223\002\325\001\032Z/api/v1/workflow_attrib" + "utes/{attributes.project}/{attributes.do" + "main}/{attributes.workflow}:\001*Zt\032o/api/v" + "1/workflow_attributes/org/{attributes.or" + "g}/{attributes.project}/{attributes.doma" + "in}/{attributes.workflow}:\001*\022\200\002\n\025GetWork" + "flowAttributes\022,.flyteidl.admin.Workflow" + "AttributesGetRequest\032-.flyteidl.admin.Wo" + "rkflowAttributesGetResponse\"\211\001\202\323\344\223\002\202\001\0229/" + "api/v1/workflow_attributes/{project}/{do" + "main}/{workflow}ZE\022C/api/v1/workflow_att" + "ributes/org/{org}/{project}/{domain}/{wo" + "rkflow}\022\217\002\n\030DeleteWorkflowAttributes\022/.f" + "lyteidl.admin.WorkflowAttributesDeleteRe" + "quest\0320.flyteidl.admin.WorkflowAttribute" + "sDeleteResponse\"\217\001\202\323\344\223\002\210\001*9/api/v1/workf" + "low_attributes/{project}/{domain}/{workf" + "low}:\001*ZH*C/api/v1/workflow_attributes/o" + "rg/{org}/{project}/{domain}/{workflow}:\001" + "*\022\312\001\n\027ListMatchableAttributes\022..flyteidl" + ".admin.ListMatchableAttributesRequest\032/." + "flyteidl.admin.ListMatchableAttributesRe" + "sponse\"N\202\323\344\223\002H\022\034/api/v1/matchable_attrib" + "utesZ(\022&/api/v1/matchable_attributes/org" + "/{org}\022\350\001\n\021ListNamedEntities\022&.flyteidl." "admin.NamedEntityListRequest\032\037.flyteidl." - "admin.NamedEntityList\"A\202\323\344\223\002;\0229/api/v1/n" - "amed_entities/{resource_type}/{project}/" - "{domain}\022\247\001\n\016GetNamedEntity\022%.flyteidl.a" - "dmin.NamedEntityGetRequest\032\033.flyteidl.ad" - "min.NamedEntity\"Q\202\323\344\223\002K\022I/api/v1/named_e" + "admin.NamedEntityList\"\211\001\202\323\344\223\002\202\001\0229/api/v1" + "/named_entities/{resource_type}/{project" + "}/{domain}ZE\022C/api/v1/named_entities/org" + "/{org}/{resource_type}/{project}/{domain" + "}\022\203\002\n\016GetNamedEntity\022%.flyteidl.admin.Na" + "medEntityGetRequest\032\033.flyteidl.admin.Nam" + "edEntity\"\254\001\202\323\344\223\002\245\001\022I/api/v1/named_entiti" + "es/{resource_type}/{id.project}/{id.doma" + "in}/{id.name}ZX\022V/api/v1/named_entities/" + "org/{id.org}/{resource_type}/{id.project" + "}/{id.domain}/{id.name}\022\235\002\n\021UpdateNamedE" + "ntity\022(.flyteidl.admin.NamedEntityUpdate" + "Request\032).flyteidl.admin.NamedEntityUpda" + "teResponse\"\262\001\202\323\344\223\002\253\001\032I/api/v1/named_enti" + "ties/{resource_type}/{id.project}/{id.do" + "main}/{id.name}:\001*Z[\032V/api/v1/named_enti" + "ties/org/{id.org}/{resource_type}/{id.pr" + "oject}/{id.domain}/{id.name}:\001*\022l\n\nGetVe" + "rsion\022!.flyteidl.admin.GetVersionRequest" + "\032\".flyteidl.admin.GetVersionResponse\"\027\202\323" + "\344\223\002\021\022\017/api/v1/version\022\266\002\n\024GetDescription" + "Entity\022 .flyteidl.admin.ObjectGetRequest" + "\032!.flyteidl.admin.DescriptionEntity\"\330\001\202\323" + "\344\223\002\321\001\022_/api/v1/description_entities/{id." + "resource_type}/{id.project}/{id.domain}/" + "{id.name}/{id.version}Zn\022l/api/v1/descri" + "ption_entities/org/{id.org}/{id.resource" + "_type}/{id.project}/{id.domain}/{id.name" + "}/{id.version}\022\310\003\n\027ListDescriptionEntiti" + "es\022,.flyteidl.admin.DescriptionEntityLis" + "tRequest\032%.flyteidl.admin.DescriptionEnt" + "ityList\"\327\002\202\323\344\223\002\320\002\022O/api/v1/description_e" "ntities/{resource_type}/{id.project}/{id" - ".domain}/{id.name}\022\276\001\n\021UpdateNamedEntity" - "\022(.flyteidl.admin.NamedEntityUpdateReque" - "st\032).flyteidl.admin.NamedEntityUpdateRes" - "ponse\"T\202\323\344\223\002N\032I/api/v1/named_entities/{r" - "esource_type}/{id.project}/{id.domain}/{" - "id.name}:\001*\022l\n\nGetVersion\022!.flyteidl.adm" - "in.GetVersionRequest\032\".flyteidl.admin.Ge" - "tVersionResponse\"\027\202\323\344\223\002\021\022\017/api/v1/versio" - "n\022\304\001\n\024GetDescriptionEntity\022 .flyteidl.ad" - "min.ObjectGetRequest\032!.flyteidl.admin.De" - "scriptionEntity\"g\202\323\344\223\002a\022_/api/v1/descrip" - "tion_entities/{id.resource_type}/{id.pro" - "ject}/{id.domain}/{id.name}/{id.version}" - "\022\222\002\n\027ListDescriptionEntities\022,.flyteidl." - "admin.DescriptionEntityListRequest\032%.fly" - "teidl.admin.DescriptionEntityList\"\241\001\202\323\344\223" - "\002\232\001\022O/api/v1/description_entities/{resou" - "rce_type}/{id.project}/{id.domain}/{id.n" - "ame}ZG\022E/api/v1/description_entities/{re" - "source_type}/{id.project}/{id.domain}\022\305\001" - "\n\023GetExecutionMetrics\0222.flyteidl.admin.W" - "orkflowExecutionGetMetricsRequest\0323.flyt" - "eidl.admin.WorkflowExecutionGetMetricsRe" - "sponse\"E\202\323\344\223\002\?\022=/api/v1/metrics/executio" - "ns/{id.project}/{id.domain}/{id.name}B\?Z" - "=github.com/flyteorg/flyte/flyteidl/gen/" - "pb-go/flyteidl/serviceb\006proto3" + ".domain}/{id.name}Z^\022\\/api/v1/descriptio" + "n_entities/org/{id.org}/{resource_type}/" + "{id.project}/{id.domain}/{id.name}ZG\022E/a" + "pi/v1/description_entities/{resource_typ" + "e}/{id.project}/{id.domain}ZT\022R/api/v1/d" + "escription_entities/org/{id.org}/{resour" + "ce_type}/{id.project}/{id.domain}\022\225\002\n\023Ge" + "tExecutionMetrics\0222.flyteidl.admin.Workf" + "lowExecutionGetMetricsRequest\0323.flyteidl" + ".admin.WorkflowExecutionGetMetricsRespon" + "se\"\224\001\202\323\344\223\002\215\001\022=/api/v1/metrics/executions" + "/{id.project}/{id.domain}/{id.name}ZL\022J/" + "api/v1/metrics/executions/org/{id.org}/{" + "id.project}/{id.domain}/{id.name}B\?Z=git" + "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" + "o/flyteidl/serviceb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fservice_2fadmin_2eproto = { false, InitDefaults_flyteidl_2fservice_2fadmin_2eproto, descriptor_table_protodef_flyteidl_2fservice_2fadmin_2eproto, - "flyteidl/service/admin.proto", &assign_descriptors_table_flyteidl_2fservice_2fadmin_2eproto, 10670, + "flyteidl/service/admin.proto", &assign_descriptors_table_flyteidl_2fservice_2fadmin_2eproto, 16226, }; void AddDescriptors_flyteidl_2fservice_2fadmin_2eproto() { diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.cc index 0dd9e3d2d1..a167711437 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.cc @@ -23,6 +23,8 @@ static const char* AsyncAgentService_method_names[] = { "/flyteidl.service.AsyncAgentService/CreateTask", "/flyteidl.service.AsyncAgentService/GetTask", "/flyteidl.service.AsyncAgentService/DeleteTask", + "/flyteidl.service.AsyncAgentService/GetTaskMetrics", + "/flyteidl.service.AsyncAgentService/GetTaskLogs", }; std::unique_ptr< AsyncAgentService::Stub> AsyncAgentService::NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options) { @@ -35,6 +37,8 @@ AsyncAgentService::Stub::Stub(const std::shared_ptr< ::grpc::ChannelInterface>& : channel_(channel), rpcmethod_CreateTask_(AsyncAgentService_method_names[0], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) , rpcmethod_GetTask_(AsyncAgentService_method_names[1], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) , rpcmethod_DeleteTask_(AsyncAgentService_method_names[2], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetTaskMetrics_(AsyncAgentService_method_names[3], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) + , rpcmethod_GetTaskLogs_(AsyncAgentService_method_names[4], ::grpc::internal::RpcMethod::NORMAL_RPC, channel) {} ::grpc::Status AsyncAgentService::Stub::CreateTask(::grpc::ClientContext* context, const ::flyteidl::admin::CreateTaskRequest& request, ::flyteidl::admin::CreateTaskResponse* response) { @@ -121,6 +125,62 @@ ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DeleteTaskResponse>* Async return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::DeleteTaskResponse>::Create(channel_.get(), cq, rpcmethod_DeleteTask_, context, request, false); } +::grpc::Status AsyncAgentService::Stub::GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::flyteidl::admin::GetTaskMetricsResponse* response) { + return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_GetTaskMetrics_, context, request, response); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response, std::function f) { + ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetTaskMetrics_, context, request, response, std::move(f)); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskMetrics(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskMetricsResponse* response, std::function f) { + ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetTaskMetrics_, context, request, response, std::move(f)); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { + ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetTaskMetrics_, context, request, response, reactor); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskMetrics(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskMetricsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { + ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetTaskMetrics_, context, request, response, reactor); +} + +::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>* AsyncAgentService::Stub::AsyncGetTaskMetricsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) { + return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::GetTaskMetricsResponse>::Create(channel_.get(), cq, rpcmethod_GetTaskMetrics_, context, request, true); +} + +::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>* AsyncAgentService::Stub::PrepareAsyncGetTaskMetricsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) { + return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::GetTaskMetricsResponse>::Create(channel_.get(), cq, rpcmethod_GetTaskMetrics_, context, request, false); +} + +::grpc::Status AsyncAgentService::Stub::GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::flyteidl::admin::GetTaskLogsResponse* response) { + return ::grpc::internal::BlockingUnaryCall(channel_.get(), rpcmethod_GetTaskLogs_, context, request, response); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response, std::function f) { + ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetTaskLogs_, context, request, response, std::move(f)); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskLogs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskLogsResponse* response, std::function f) { + ::grpc::internal::CallbackUnaryCall(stub_->channel_.get(), stub_->rpcmethod_GetTaskLogs_, context, request, response, std::move(f)); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { + ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetTaskLogs_, context, request, response, reactor); +} + +void AsyncAgentService::Stub::experimental_async::GetTaskLogs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskLogsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) { + ::grpc::internal::ClientCallbackUnaryFactory::Create(stub_->channel_.get(), stub_->rpcmethod_GetTaskLogs_, context, request, response, reactor); +} + +::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>* AsyncAgentService::Stub::AsyncGetTaskLogsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) { + return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::GetTaskLogsResponse>::Create(channel_.get(), cq, rpcmethod_GetTaskLogs_, context, request, true); +} + +::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>* AsyncAgentService::Stub::PrepareAsyncGetTaskLogsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) { + return ::grpc::internal::ClientAsyncResponseReaderFactory< ::flyteidl::admin::GetTaskLogsResponse>::Create(channel_.get(), cq, rpcmethod_GetTaskLogs_, context, request, false); +} + AsyncAgentService::Service::Service() { AddMethod(new ::grpc::internal::RpcServiceMethod( AsyncAgentService_method_names[0], @@ -137,6 +197,16 @@ AsyncAgentService::Service::Service() { ::grpc::internal::RpcMethod::NORMAL_RPC, new ::grpc::internal::RpcMethodHandler< AsyncAgentService::Service, ::flyteidl::admin::DeleteTaskRequest, ::flyteidl::admin::DeleteTaskResponse>( std::mem_fn(&AsyncAgentService::Service::DeleteTask), this))); + AddMethod(new ::grpc::internal::RpcServiceMethod( + AsyncAgentService_method_names[3], + ::grpc::internal::RpcMethod::NORMAL_RPC, + new ::grpc::internal::RpcMethodHandler< AsyncAgentService::Service, ::flyteidl::admin::GetTaskMetricsRequest, ::flyteidl::admin::GetTaskMetricsResponse>( + std::mem_fn(&AsyncAgentService::Service::GetTaskMetrics), this))); + AddMethod(new ::grpc::internal::RpcServiceMethod( + AsyncAgentService_method_names[4], + ::grpc::internal::RpcMethod::NORMAL_RPC, + new ::grpc::internal::RpcMethodHandler< AsyncAgentService::Service, ::flyteidl::admin::GetTaskLogsRequest, ::flyteidl::admin::GetTaskLogsResponse>( + std::mem_fn(&AsyncAgentService::Service::GetTaskLogs), this))); } AsyncAgentService::Service::~Service() { @@ -163,6 +233,20 @@ ::grpc::Status AsyncAgentService::Service::DeleteTask(::grpc::ServerContext* con return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); } +::grpc::Status AsyncAgentService::Service::GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response) { + (void) context; + (void) request; + (void) response; + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); +} + +::grpc::Status AsyncAgentService::Service::GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response) { + (void) context; + (void) request; + (void) response; + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); +} + static const char* AgentMetadataService_method_names[] = { "/flyteidl.service.AgentMetadataService/GetAgent", diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.h b/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.h index e1cb5da223..e1ab9cd6e5 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/service/agent.grpc.pb.h @@ -73,6 +73,26 @@ class AsyncAgentService final { std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DeleteTaskResponse>> PrepareAsyncDeleteTask(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DeleteTaskResponse>>(PrepareAsyncDeleteTaskRaw(context, request, cq)); } + // GetTaskMetrics returns one or more task execution metrics, if available. + // + // Errors include + // * OutOfRange if metrics are not available for the specified task time range + // * various other errors + virtual ::grpc::Status GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::flyteidl::admin::GetTaskMetricsResponse* response) = 0; + std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskMetricsResponse>> AsyncGetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskMetricsResponse>>(AsyncGetTaskMetricsRaw(context, request, cq)); + } + std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskMetricsResponse>> PrepareAsyncGetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskMetricsResponse>>(PrepareAsyncGetTaskMetricsRaw(context, request, cq)); + } + // GetTaskLogs returns task execution logs, if available. + virtual ::grpc::Status GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::flyteidl::admin::GetTaskLogsResponse* response) = 0; + std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskLogsResponse>> AsyncGetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskLogsResponse>>(AsyncGetTaskLogsRaw(context, request, cq)); + } + std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskLogsResponse>> PrepareAsyncGetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskLogsResponse>>(PrepareAsyncGetTaskLogsRaw(context, request, cq)); + } class experimental_async_interface { public: virtual ~experimental_async_interface() {} @@ -91,6 +111,20 @@ class AsyncAgentService final { virtual void DeleteTask(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DeleteTaskResponse* response, std::function) = 0; virtual void DeleteTask(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest* request, ::flyteidl::admin::DeleteTaskResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; virtual void DeleteTask(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DeleteTaskResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; + // GetTaskMetrics returns one or more task execution metrics, if available. + // + // Errors include + // * OutOfRange if metrics are not available for the specified task time range + // * various other errors + virtual void GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response, std::function) = 0; + virtual void GetTaskMetrics(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskMetricsResponse* response, std::function) = 0; + virtual void GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; + virtual void GetTaskMetrics(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskMetricsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; + // GetTaskLogs returns task execution logs, if available. + virtual void GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response, std::function) = 0; + virtual void GetTaskLogs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskLogsResponse* response, std::function) = 0; + virtual void GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; + virtual void GetTaskLogs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskLogsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) = 0; }; virtual class experimental_async_interface* experimental_async() { return nullptr; } private: @@ -100,6 +134,10 @@ class AsyncAgentService final { virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskResponse>* PrepareAsyncGetTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskRequest& request, ::grpc::CompletionQueue* cq) = 0; virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DeleteTaskResponse>* AsyncDeleteTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest& request, ::grpc::CompletionQueue* cq) = 0; virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::DeleteTaskResponse>* PrepareAsyncDeleteTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest& request, ::grpc::CompletionQueue* cq) = 0; + virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskMetricsResponse>* AsyncGetTaskMetricsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) = 0; + virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskMetricsResponse>* PrepareAsyncGetTaskMetricsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) = 0; + virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskLogsResponse>* AsyncGetTaskLogsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) = 0; + virtual ::grpc::ClientAsyncResponseReaderInterface< ::flyteidl::admin::GetTaskLogsResponse>* PrepareAsyncGetTaskLogsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) = 0; }; class Stub final : public StubInterface { public: @@ -125,6 +163,20 @@ class AsyncAgentService final { std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DeleteTaskResponse>> PrepareAsyncDeleteTask(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest& request, ::grpc::CompletionQueue* cq) { return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DeleteTaskResponse>>(PrepareAsyncDeleteTaskRaw(context, request, cq)); } + ::grpc::Status GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::flyteidl::admin::GetTaskMetricsResponse* response) override; + std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>> AsyncGetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>>(AsyncGetTaskMetricsRaw(context, request, cq)); + } + std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>> PrepareAsyncGetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>>(PrepareAsyncGetTaskMetricsRaw(context, request, cq)); + } + ::grpc::Status GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::flyteidl::admin::GetTaskLogsResponse* response) override; + std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>> AsyncGetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>>(AsyncGetTaskLogsRaw(context, request, cq)); + } + std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>> PrepareAsyncGetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) { + return std::unique_ptr< ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>>(PrepareAsyncGetTaskLogsRaw(context, request, cq)); + } class experimental_async final : public StubInterface::experimental_async_interface { public: @@ -140,6 +192,14 @@ class AsyncAgentService final { void DeleteTask(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DeleteTaskResponse* response, std::function) override; void DeleteTask(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest* request, ::flyteidl::admin::DeleteTaskResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; void DeleteTask(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::DeleteTaskResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; + void GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response, std::function) override; + void GetTaskMetrics(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskMetricsResponse* response, std::function) override; + void GetTaskMetrics(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; + void GetTaskMetrics(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskMetricsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; + void GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response, std::function) override; + void GetTaskLogs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskLogsResponse* response, std::function) override; + void GetTaskLogs(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; + void GetTaskLogs(::grpc::ClientContext* context, const ::grpc::ByteBuffer* request, ::flyteidl::admin::GetTaskLogsResponse* response, ::grpc::experimental::ClientUnaryReactor* reactor) override; private: friend class Stub; explicit experimental_async(Stub* stub): stub_(stub) { } @@ -157,9 +217,15 @@ class AsyncAgentService final { ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskResponse>* PrepareAsyncGetTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskRequest& request, ::grpc::CompletionQueue* cq) override; ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DeleteTaskResponse>* AsyncDeleteTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest& request, ::grpc::CompletionQueue* cq) override; ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::DeleteTaskResponse>* PrepareAsyncDeleteTaskRaw(::grpc::ClientContext* context, const ::flyteidl::admin::DeleteTaskRequest& request, ::grpc::CompletionQueue* cq) override; + ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>* AsyncGetTaskMetricsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) override; + ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskMetricsResponse>* PrepareAsyncGetTaskMetricsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskMetricsRequest& request, ::grpc::CompletionQueue* cq) override; + ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>* AsyncGetTaskLogsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) override; + ::grpc::ClientAsyncResponseReader< ::flyteidl::admin::GetTaskLogsResponse>* PrepareAsyncGetTaskLogsRaw(::grpc::ClientContext* context, const ::flyteidl::admin::GetTaskLogsRequest& request, ::grpc::CompletionQueue* cq) override; const ::grpc::internal::RpcMethod rpcmethod_CreateTask_; const ::grpc::internal::RpcMethod rpcmethod_GetTask_; const ::grpc::internal::RpcMethod rpcmethod_DeleteTask_; + const ::grpc::internal::RpcMethod rpcmethod_GetTaskMetrics_; + const ::grpc::internal::RpcMethod rpcmethod_GetTaskLogs_; }; static std::unique_ptr NewStub(const std::shared_ptr< ::grpc::ChannelInterface>& channel, const ::grpc::StubOptions& options = ::grpc::StubOptions()); @@ -173,6 +239,14 @@ class AsyncAgentService final { virtual ::grpc::Status GetTask(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskRequest* request, ::flyteidl::admin::GetTaskResponse* response); // Delete the task resource. virtual ::grpc::Status DeleteTask(::grpc::ServerContext* context, const ::flyteidl::admin::DeleteTaskRequest* request, ::flyteidl::admin::DeleteTaskResponse* response); + // GetTaskMetrics returns one or more task execution metrics, if available. + // + // Errors include + // * OutOfRange if metrics are not available for the specified task time range + // * various other errors + virtual ::grpc::Status GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response); + // GetTaskLogs returns task execution logs, if available. + virtual ::grpc::Status GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response); }; template class WithAsyncMethod_CreateTask : public BaseClass { @@ -234,7 +308,47 @@ class AsyncAgentService final { ::grpc::Service::RequestAsyncUnary(2, context, request, response, new_call_cq, notification_cq, tag); } }; - typedef WithAsyncMethod_CreateTask > > AsyncService; + template + class WithAsyncMethod_GetTaskMetrics : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithAsyncMethod_GetTaskMetrics() { + ::grpc::Service::MarkMethodAsync(3); + } + ~WithAsyncMethod_GetTaskMetrics() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + void RequestGetTaskMetrics(::grpc::ServerContext* context, ::flyteidl::admin::GetTaskMetricsRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::GetTaskMetricsResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { + ::grpc::Service::RequestAsyncUnary(3, context, request, response, new_call_cq, notification_cq, tag); + } + }; + template + class WithAsyncMethod_GetTaskLogs : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithAsyncMethod_GetTaskLogs() { + ::grpc::Service::MarkMethodAsync(4); + } + ~WithAsyncMethod_GetTaskLogs() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + void RequestGetTaskLogs(::grpc::ServerContext* context, ::flyteidl::admin::GetTaskLogsRequest* request, ::grpc::ServerAsyncResponseWriter< ::flyteidl::admin::GetTaskLogsResponse>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { + ::grpc::Service::RequestAsyncUnary(4, context, request, response, new_call_cq, notification_cq, tag); + } + }; + typedef WithAsyncMethod_CreateTask > > > > AsyncService; template class ExperimentalWithCallbackMethod_CreateTask : public BaseClass { private: @@ -328,7 +442,69 @@ class AsyncAgentService final { } virtual void DeleteTask(::grpc::ServerContext* context, const ::flyteidl::admin::DeleteTaskRequest* request, ::flyteidl::admin::DeleteTaskResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } }; - typedef ExperimentalWithCallbackMethod_CreateTask > > ExperimentalCallbackService; + template + class ExperimentalWithCallbackMethod_GetTaskMetrics : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + ExperimentalWithCallbackMethod_GetTaskMetrics() { + ::grpc::Service::experimental().MarkMethodCallback(3, + new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetTaskMetricsRequest, ::flyteidl::admin::GetTaskMetricsResponse>( + [this](::grpc::ServerContext* context, + const ::flyteidl::admin::GetTaskMetricsRequest* request, + ::flyteidl::admin::GetTaskMetricsResponse* response, + ::grpc::experimental::ServerCallbackRpcController* controller) { + return this->GetTaskMetrics(context, request, response, controller); + })); + } + void SetMessageAllocatorFor_GetTaskMetrics( + ::grpc::experimental::MessageAllocator< ::flyteidl::admin::GetTaskMetricsRequest, ::flyteidl::admin::GetTaskMetricsResponse>* allocator) { + static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetTaskMetricsRequest, ::flyteidl::admin::GetTaskMetricsResponse>*>( + ::grpc::Service::experimental().GetHandler(3)) + ->SetMessageAllocator(allocator); + } + ~ExperimentalWithCallbackMethod_GetTaskMetrics() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + virtual void GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } + }; + template + class ExperimentalWithCallbackMethod_GetTaskLogs : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + ExperimentalWithCallbackMethod_GetTaskLogs() { + ::grpc::Service::experimental().MarkMethodCallback(4, + new ::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetTaskLogsRequest, ::flyteidl::admin::GetTaskLogsResponse>( + [this](::grpc::ServerContext* context, + const ::flyteidl::admin::GetTaskLogsRequest* request, + ::flyteidl::admin::GetTaskLogsResponse* response, + ::grpc::experimental::ServerCallbackRpcController* controller) { + return this->GetTaskLogs(context, request, response, controller); + })); + } + void SetMessageAllocatorFor_GetTaskLogs( + ::grpc::experimental::MessageAllocator< ::flyteidl::admin::GetTaskLogsRequest, ::flyteidl::admin::GetTaskLogsResponse>* allocator) { + static_cast<::grpc::internal::CallbackUnaryHandler< ::flyteidl::admin::GetTaskLogsRequest, ::flyteidl::admin::GetTaskLogsResponse>*>( + ::grpc::Service::experimental().GetHandler(4)) + ->SetMessageAllocator(allocator); + } + ~ExperimentalWithCallbackMethod_GetTaskLogs() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + virtual void GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } + }; + typedef ExperimentalWithCallbackMethod_CreateTask > > > > ExperimentalCallbackService; template class WithGenericMethod_CreateTask : public BaseClass { private: @@ -381,6 +557,40 @@ class AsyncAgentService final { } }; template + class WithGenericMethod_GetTaskMetrics : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithGenericMethod_GetTaskMetrics() { + ::grpc::Service::MarkMethodGeneric(3); + } + ~WithGenericMethod_GetTaskMetrics() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + }; + template + class WithGenericMethod_GetTaskLogs : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithGenericMethod_GetTaskLogs() { + ::grpc::Service::MarkMethodGeneric(4); + } + ~WithGenericMethod_GetTaskLogs() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + }; + template class WithRawMethod_CreateTask : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} @@ -441,6 +651,46 @@ class AsyncAgentService final { } }; template + class WithRawMethod_GetTaskMetrics : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithRawMethod_GetTaskMetrics() { + ::grpc::Service::MarkMethodRaw(3); + } + ~WithRawMethod_GetTaskMetrics() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + void RequestGetTaskMetrics(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { + ::grpc::Service::RequestAsyncUnary(3, context, request, response, new_call_cq, notification_cq, tag); + } + }; + template + class WithRawMethod_GetTaskLogs : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithRawMethod_GetTaskLogs() { + ::grpc::Service::MarkMethodRaw(4); + } + ~WithRawMethod_GetTaskLogs() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + void RequestGetTaskLogs(::grpc::ServerContext* context, ::grpc::ByteBuffer* request, ::grpc::ServerAsyncResponseWriter< ::grpc::ByteBuffer>* response, ::grpc::CompletionQueue* new_call_cq, ::grpc::ServerCompletionQueue* notification_cq, void *tag) { + ::grpc::Service::RequestAsyncUnary(4, context, request, response, new_call_cq, notification_cq, tag); + } + }; + template class ExperimentalWithRawCallbackMethod_CreateTask : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} @@ -516,6 +766,56 @@ class AsyncAgentService final { virtual void DeleteTask(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } }; template + class ExperimentalWithRawCallbackMethod_GetTaskMetrics : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + ExperimentalWithRawCallbackMethod_GetTaskMetrics() { + ::grpc::Service::experimental().MarkMethodRawCallback(3, + new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( + [this](::grpc::ServerContext* context, + const ::grpc::ByteBuffer* request, + ::grpc::ByteBuffer* response, + ::grpc::experimental::ServerCallbackRpcController* controller) { + this->GetTaskMetrics(context, request, response, controller); + })); + } + ~ExperimentalWithRawCallbackMethod_GetTaskMetrics() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + virtual void GetTaskMetrics(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } + }; + template + class ExperimentalWithRawCallbackMethod_GetTaskLogs : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + ExperimentalWithRawCallbackMethod_GetTaskLogs() { + ::grpc::Service::experimental().MarkMethodRawCallback(4, + new ::grpc::internal::CallbackUnaryHandler< ::grpc::ByteBuffer, ::grpc::ByteBuffer>( + [this](::grpc::ServerContext* context, + const ::grpc::ByteBuffer* request, + ::grpc::ByteBuffer* response, + ::grpc::experimental::ServerCallbackRpcController* controller) { + this->GetTaskLogs(context, request, response, controller); + })); + } + ~ExperimentalWithRawCallbackMethod_GetTaskLogs() override { + BaseClassMustBeDerivedFromService(this); + } + // disable synchronous version of this method + ::grpc::Status GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + virtual void GetTaskLogs(::grpc::ServerContext* context, const ::grpc::ByteBuffer* request, ::grpc::ByteBuffer* response, ::grpc::experimental::ServerCallbackRpcController* controller) { controller->Finish(::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, "")); } + }; + template class WithStreamedUnaryMethod_CreateTask : public BaseClass { private: void BaseClassMustBeDerivedFromService(const Service *service) {} @@ -575,9 +875,49 @@ class AsyncAgentService final { // replace default version of method with streamed unary virtual ::grpc::Status StreamedDeleteTask(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::admin::DeleteTaskRequest,::flyteidl::admin::DeleteTaskResponse>* server_unary_streamer) = 0; }; - typedef WithStreamedUnaryMethod_CreateTask > > StreamedUnaryService; + template + class WithStreamedUnaryMethod_GetTaskMetrics : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithStreamedUnaryMethod_GetTaskMetrics() { + ::grpc::Service::MarkMethodStreamed(3, + new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::GetTaskMetricsRequest, ::flyteidl::admin::GetTaskMetricsResponse>(std::bind(&WithStreamedUnaryMethod_GetTaskMetrics::StreamedGetTaskMetrics, this, std::placeholders::_1, std::placeholders::_2))); + } + ~WithStreamedUnaryMethod_GetTaskMetrics() override { + BaseClassMustBeDerivedFromService(this); + } + // disable regular version of this method + ::grpc::Status GetTaskMetrics(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskMetricsRequest* request, ::flyteidl::admin::GetTaskMetricsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + // replace default version of method with streamed unary + virtual ::grpc::Status StreamedGetTaskMetrics(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::admin::GetTaskMetricsRequest,::flyteidl::admin::GetTaskMetricsResponse>* server_unary_streamer) = 0; + }; + template + class WithStreamedUnaryMethod_GetTaskLogs : public BaseClass { + private: + void BaseClassMustBeDerivedFromService(const Service *service) {} + public: + WithStreamedUnaryMethod_GetTaskLogs() { + ::grpc::Service::MarkMethodStreamed(4, + new ::grpc::internal::StreamedUnaryHandler< ::flyteidl::admin::GetTaskLogsRequest, ::flyteidl::admin::GetTaskLogsResponse>(std::bind(&WithStreamedUnaryMethod_GetTaskLogs::StreamedGetTaskLogs, this, std::placeholders::_1, std::placeholders::_2))); + } + ~WithStreamedUnaryMethod_GetTaskLogs() override { + BaseClassMustBeDerivedFromService(this); + } + // disable regular version of this method + ::grpc::Status GetTaskLogs(::grpc::ServerContext* context, const ::flyteidl::admin::GetTaskLogsRequest* request, ::flyteidl::admin::GetTaskLogsResponse* response) override { + abort(); + return ::grpc::Status(::grpc::StatusCode::UNIMPLEMENTED, ""); + } + // replace default version of method with streamed unary + virtual ::grpc::Status StreamedGetTaskLogs(::grpc::ServerContext* context, ::grpc::ServerUnaryStreamer< ::flyteidl::admin::GetTaskLogsRequest,::flyteidl::admin::GetTaskLogsResponse>* server_unary_streamer) = 0; + }; + typedef WithStreamedUnaryMethod_CreateTask > > > > StreamedUnaryService; typedef Service SplitStreamedService; - typedef WithStreamedUnaryMethod_CreateTask > > StreamedService; + typedef WithStreamedUnaryMethod_CreateTask > > > > StreamedService; }; // AgentMetadataService defines an RPC service that is also served over HTTP via grpc-gateway. diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/agent.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/agent.pb.cc index 4b7c5d0557..3d6379889b 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/agent.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/agent.pb.cc @@ -39,27 +39,32 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fservice_2fagent_2eproto[] = "\n\034flyteidl/service/agent.proto\022\020flyteidl" ".service\032\034google/api/annotations.proto\032\032" - "flyteidl/admin/agent.proto2\217\002\n\021AsyncAgen" + "flyteidl/admin/agent.proto2\314\003\n\021AsyncAgen" "tService\022U\n\nCreateTask\022!.flyteidl.admin." "CreateTaskRequest\032\".flyteidl.admin.Creat" "eTaskResponse\"\000\022L\n\007GetTask\022\036.flyteidl.ad" "min.GetTaskRequest\032\037.flyteidl.admin.GetT" "askResponse\"\000\022U\n\nDeleteTask\022!.flyteidl.a" "dmin.DeleteTaskRequest\032\".flyteidl.admin." - "DeleteTaskResponse\"\0002\360\001\n\024AgentMetadataSe" - "rvice\022k\n\010GetAgent\022\037.flyteidl.admin.GetAg" - "entRequest\032 .flyteidl.admin.GetAgentResp" - "onse\"\034\202\323\344\223\002\026\022\024/api/v1/agent/{name}\022k\n\nLi" - "stAgents\022!.flyteidl.admin.ListAgentsRequ" - "est\032\".flyteidl.admin.ListAgentsResponse\"" - "\026\202\323\344\223\002\020\022\016/api/v1/agentsB\?Z=github.com/fl" - "yteorg/flyte/flyteidl/gen/pb-go/flyteidl" - "/serviceb\006proto3" + "DeleteTaskResponse\"\000\022a\n\016GetTaskMetrics\022%" + ".flyteidl.admin.GetTaskMetricsRequest\032&." + "flyteidl.admin.GetTaskMetricsResponse\"\000\022" + "X\n\013GetTaskLogs\022\".flyteidl.admin.GetTaskL" + "ogsRequest\032#.flyteidl.admin.GetTaskLogsR" + "esponse\"\0002\360\001\n\024AgentMetadataService\022k\n\010Ge" + "tAgent\022\037.flyteidl.admin.GetAgentRequest\032" + " .flyteidl.admin.GetAgentResponse\"\034\202\323\344\223\002" + "\026\022\024/api/v1/agent/{name}\022k\n\nListAgents\022!." + "flyteidl.admin.ListAgentsRequest\032\".flyte" + "idl.admin.ListAgentsResponse\"\026\202\323\344\223\002\020\022\016/a" + "pi/v1/agentsB\?Z=github.com/flyteorg/flyt" + "e/flyteidl/gen/pb-go/flyteidl/serviceb\006p" + "roto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fservice_2fagent_2eproto = { false, InitDefaults_flyteidl_2fservice_2fagent_2eproto, descriptor_table_protodef_flyteidl_2fservice_2fagent_2eproto, - "flyteidl/service/agent.proto", &assign_descriptors_table_flyteidl_2fservice_2fagent_2eproto, 696, + "flyteidl/service/agent.proto", &assign_descriptors_table_flyteidl_2fservice_2fagent_2eproto, 885, }; void AddDescriptors_flyteidl_2fservice_2fagent_2eproto() { diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.cc index 481be2619e..68b912f826 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.cc @@ -216,45 +216,43 @@ const char descriptor_table_protodef_flyteidl_2fservice_2fexternal_5fplugin_5fse "\n.flyteidl/service/external_plugin_servi" "ce.proto\022\020flyteidl.service\032\034flyteidl/cor" "e/literals.proto\032\031flyteidl/core/tasks.pr" - "oto\032\035flyteidl/core/interface.proto\"\210\001\n\021T" - "askCreateRequest\022)\n\006inputs\030\001 \001(\0132\031.flyte" - "idl.core.LiteralMap\022-\n\010template\030\002 \001(\0132\033." - "flyteidl.core.TaskTemplate\022\025\n\routput_pre" - "fix\030\003 \001(\t:\002\030\001\"(\n\022TaskCreateResponse\022\016\n\006j" - "ob_id\030\001 \001(\t:\002\030\001\"7\n\016TaskGetRequest\022\021\n\ttas" - "k_type\030\001 \001(\t\022\016\n\006job_id\030\002 \001(\t:\002\030\001\"i\n\017Task" - "GetResponse\022&\n\005state\030\001 \001(\0162\027.flyteidl.se" - "rvice.State\022*\n\007outputs\030\002 \001(\0132\031.flyteidl." - "core.LiteralMap:\002\030\001\":\n\021TaskDeleteRequest" - "\022\021\n\ttask_type\030\001 \001(\t\022\016\n\006job_id\030\002 \001(\t:\002\030\001\"" - "\030\n\022TaskDeleteResponse:\002\030\001*b\n\005State\022\025\n\021RE" - "TRYABLE_FAILURE\020\000\022\025\n\021PERMANENT_FAILURE\020\001" - "\022\013\n\007PENDING\020\002\022\013\n\007RUNNING\020\003\022\r\n\tSUCCEEDED\020" - "\004\032\002\030\0012\250\002\n\025ExternalPluginService\022\\\n\nCreat" - "eTask\022#.flyteidl.service.TaskCreateReque" - "st\032$.flyteidl.service.TaskCreateResponse" - "\"\003\210\002\001\022S\n\007GetTask\022 .flyteidl.service.Task" - "GetRequest\032!.flyteidl.service.TaskGetRes" - "ponse\"\003\210\002\001\022\\\n\nDeleteTask\022#.flyteidl.serv" - "ice.TaskDeleteRequest\032$.flyteidl.service" - ".TaskDeleteResponse\"\003\210\002\001B\?Z=github.com/f" - "lyteorg/flyte/flyteidl/gen/pb-go/flyteid" - "l/serviceb\006proto3" + "oto\"\210\001\n\021TaskCreateRequest\022)\n\006inputs\030\001 \001(" + "\0132\031.flyteidl.core.LiteralMap\022-\n\010template" + "\030\002 \001(\0132\033.flyteidl.core.TaskTemplate\022\025\n\ro" + "utput_prefix\030\003 \001(\t:\002\030\001\"(\n\022TaskCreateResp" + "onse\022\016\n\006job_id\030\001 \001(\t:\002\030\001\"7\n\016TaskGetReque" + "st\022\021\n\ttask_type\030\001 \001(\t\022\016\n\006job_id\030\002 \001(\t:\002\030" + "\001\"i\n\017TaskGetResponse\022&\n\005state\030\001 \001(\0162\027.fl" + "yteidl.service.State\022*\n\007outputs\030\002 \001(\0132\031." + "flyteidl.core.LiteralMap:\002\030\001\":\n\021TaskDele" + "teRequest\022\021\n\ttask_type\030\001 \001(\t\022\016\n\006job_id\030\002" + " \001(\t:\002\030\001\"\030\n\022TaskDeleteResponse:\002\030\001*b\n\005St" + "ate\022\025\n\021RETRYABLE_FAILURE\020\000\022\025\n\021PERMANENT_" + "FAILURE\020\001\022\013\n\007PENDING\020\002\022\013\n\007RUNNING\020\003\022\r\n\tS" + "UCCEEDED\020\004\032\002\030\0012\250\002\n\025ExternalPluginService" + "\022\\\n\nCreateTask\022#.flyteidl.service.TaskCr" + "eateRequest\032$.flyteidl.service.TaskCreat" + "eResponse\"\003\210\002\001\022S\n\007GetTask\022 .flyteidl.ser" + "vice.TaskGetRequest\032!.flyteidl.service.T" + "askGetResponse\"\003\210\002\001\022\\\n\nDeleteTask\022#.flyt" + "eidl.service.TaskDeleteRequest\032$.flyteid" + "l.service.TaskDeleteResponse\"\003\210\002\001B\?Z=git" + "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" + "o/flyteidl/serviceb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto = { false, InitDefaults_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto, descriptor_table_protodef_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto, - "flyteidl/service/external_plugin_service.proto", &assign_descriptors_table_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto, 1057, + "flyteidl/service/external_plugin_service.proto", &assign_descriptors_table_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto, 1026, }; void AddDescriptors_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto() { - static constexpr ::google::protobuf::internal::InitFunc deps[3] = + static constexpr ::google::protobuf::internal::InitFunc deps[2] = { ::AddDescriptors_flyteidl_2fcore_2fliterals_2eproto, ::AddDescriptors_flyteidl_2fcore_2ftasks_2eproto, - ::AddDescriptors_flyteidl_2fcore_2finterface_2eproto, }; - ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto, deps, 3); + ::google::protobuf::internal::AddDescriptors(&descriptor_table_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto, deps, 2); } // Force running AddDescriptors() at dynamic initialization time. diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.h b/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.h index 82d5616c5c..bd326d980e 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.h +++ b/flyteidl/gen/pb-cpp/flyteidl/service/external_plugin_service.pb.h @@ -34,7 +34,6 @@ #include #include "flyteidl/core/literals.pb.h" #include "flyteidl/core/tasks.pb.h" -#include "flyteidl/core/interface.pb.h" // @@protoc_insertion_point(includes) #include #define PROTOBUF_INTERNAL_EXPORT_flyteidl_2fservice_2fexternal_5fplugin_5fservice_2eproto diff --git a/flyteidl/gen/pb-cpp/flyteidl/service/signal.pb.cc b/flyteidl/gen/pb-cpp/flyteidl/service/signal.pb.cc index a1f4aa5982..5a6d8e19a8 100644 --- a/flyteidl/gen/pb-cpp/flyteidl/service/signal.pb.cc +++ b/flyteidl/gen/pb-cpp/flyteidl/service/signal.pb.cc @@ -39,24 +39,29 @@ ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_fl const char descriptor_table_protodef_flyteidl_2fservice_2fsignal_2eproto[] = "\n\035flyteidl/service/signal.proto\022\020flyteid" "l.service\032\034google/api/annotations.proto\032" - "\033flyteidl/admin/signal.proto2\232\003\n\rSignalS" + "\033flyteidl/admin/signal.proto2\336\004\n\rSignalS" "ervice\022W\n\021GetOrCreateSignal\022(.flyteidl.a" "dmin.SignalGetOrCreateRequest\032\026.flyteidl" - ".admin.Signal\"\000\022\301\001\n\013ListSignals\022!.flytei" + ".admin.Signal\"\000\022\324\002\n\013ListSignals\022!.flytei" "dl.admin.SignalListRequest\032\032.flyteidl.ad" - "min.SignalList\"s\202\323\344\223\002m\022k/api/v1/signals/" - "{workflow_execution_id.project}/{workflo" - "w_execution_id.domain}/{workflow_executi" - "on_id.name}\022l\n\tSetSignal\022 .flyteidl.admi" - "n.SignalSetRequest\032!.flyteidl.admin.Sign" - "alSetResponse\"\032\202\323\344\223\002\024\"\017/api/v1/signals:\001" - "*B\?Z=github.com/flyteorg/flyte/flyteidl/" - "gen/pb-go/flyteidl/serviceb\006proto3" + "min.SignalList\"\205\002\202\323\344\223\002\376\001\022k/api/v1/signal" + "s/{workflow_execution_id.project}/{workf" + "low_execution_id.domain}/{workflow_execu" + "tion_id.name}Z\216\001\022\213\001/api/v1/signals/org/{" + "workflow_execution_id.org}/{workflow_exe" + "cution_id.project}/{workflow_execution_i" + "d.domain}/{workflow_execution_id.name}\022\234" + "\001\n\tSetSignal\022 .flyteidl.admin.SignalSetR" + "equest\032!.flyteidl.admin.SignalSetRespons" + "e\"J\202\323\344\223\002D\"\017/api/v1/signals:\001*Z.\")/api/v1" + "/signals/org/{id.execution_id.org}:\001*B\?Z" + "=github.com/flyteorg/flyte/flyteidl/gen/" + "pb-go/flyteidl/serviceb\006proto3" ; ::google::protobuf::internal::DescriptorTable descriptor_table_flyteidl_2fservice_2fsignal_2eproto = { false, InitDefaults_flyteidl_2fservice_2fsignal_2eproto, descriptor_table_protodef_flyteidl_2fservice_2fsignal_2eproto, - "flyteidl/service/signal.proto", &assign_descriptors_table_flyteidl_2fservice_2fsignal_2eproto, 594, + "flyteidl/service/signal.proto", &assign_descriptors_table_flyteidl_2fservice_2fsignal_2eproto, 790, }; void AddDescriptors_flyteidl_2fservice_2fsignal_2eproto() { diff --git a/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go index 2e255f5c88..a3f2e327d9 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/agent.pb.go @@ -7,6 +7,8 @@ import ( fmt "fmt" core "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" proto "github.com/golang/protobuf/proto" + duration "github.com/golang/protobuf/ptypes/duration" + timestamp "github.com/golang/protobuf/ptypes/timestamp" math "math" ) @@ -396,8 +398,8 @@ func (m *GetTaskResponse) GetLogLinks() []*core.TaskLog { } type Resource struct { - // The state of the execution is used to control its visibility in the UI/CLI. - State State `protobuf:"varint,1,opt,name=state,proto3,enum=flyteidl.admin.State" json:"state,omitempty"` + // DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI. + State State `protobuf:"varint,1,opt,name=state,proto3,enum=flyteidl.admin.State" json:"state,omitempty"` // Deprecated: Do not use. // The outputs of the execution. It's typically used by sql task. Agent service will create a // Structured dataset pointing to the query result table. // +optional @@ -405,10 +407,12 @@ type Resource struct { // A descriptive message for the current state. e.g. waiting for cluster. Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` // log information for the task execution. - LogLinks []*core.TaskLog `protobuf:"bytes,4,rep,name=log_links,json=logLinks,proto3" json:"log_links,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + LogLinks []*core.TaskLog `protobuf:"bytes,4,rep,name=log_links,json=logLinks,proto3" json:"log_links,omitempty"` + // The phase of the execution is used to determine the phase of the plugin's execution. + Phase core.TaskExecution_Phase `protobuf:"varint,5,opt,name=phase,proto3,enum=flyteidl.core.TaskExecution_Phase" json:"phase,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Resource) Reset() { *m = Resource{} } @@ -436,6 +440,7 @@ func (m *Resource) XXX_DiscardUnknown() { var xxx_messageInfo_Resource proto.InternalMessageInfo +// Deprecated: Do not use. func (m *Resource) GetState() State { if m != nil { return m.State @@ -464,6 +469,13 @@ func (m *Resource) GetLogLinks() []*core.TaskLog { return nil } +func (m *Resource) GetPhase() core.TaskExecution_Phase { + if m != nil { + return m.Phase + } + return core.TaskExecution_UNDEFINED +} + // A message used to delete a task. type DeleteTaskRequest struct { // A predefined yet extensible Task type identifier. @@ -749,6 +761,254 @@ func (m *ListAgentsResponse) GetAgents() []*Agent { return nil } +// A request to get the metrics from a task execution. +type GetTaskMetricsRequest struct { + // A predefined yet extensible Task type identifier. + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + ResourceMeta []byte `protobuf:"bytes,2,opt,name=resource_meta,json=resourceMeta,proto3" json:"resource_meta,omitempty"` + // The metrics to query. If empty, will return a default set of metrics. + // e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG + Queries []string `protobuf:"bytes,3,rep,name=queries,proto3" json:"queries,omitempty"` + // Start timestamp, inclusive. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End timestamp, inclusive.. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Query resolution step width in duration format or float number of seconds. + Step *duration.Duration `protobuf:"bytes,6,opt,name=step,proto3" json:"step,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskMetricsRequest) Reset() { *m = GetTaskMetricsRequest{} } +func (m *GetTaskMetricsRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskMetricsRequest) ProtoMessage() {} +func (*GetTaskMetricsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c434e52bb0028071, []int{13} +} + +func (m *GetTaskMetricsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskMetricsRequest.Unmarshal(m, b) +} +func (m *GetTaskMetricsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskMetricsRequest.Marshal(b, m, deterministic) +} +func (m *GetTaskMetricsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskMetricsRequest.Merge(m, src) +} +func (m *GetTaskMetricsRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskMetricsRequest.Size(m) +} +func (m *GetTaskMetricsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskMetricsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskMetricsRequest proto.InternalMessageInfo + +func (m *GetTaskMetricsRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *GetTaskMetricsRequest) GetResourceMeta() []byte { + if m != nil { + return m.ResourceMeta + } + return nil +} + +func (m *GetTaskMetricsRequest) GetQueries() []string { + if m != nil { + return m.Queries + } + return nil +} + +func (m *GetTaskMetricsRequest) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *GetTaskMetricsRequest) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *GetTaskMetricsRequest) GetStep() *duration.Duration { + if m != nil { + return m.Step + } + return nil +} + +// A response containing a list of metrics for a task execution. +type GetTaskMetricsResponse struct { + // The execution metric results. + Results []*core.ExecutionMetricResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskMetricsResponse) Reset() { *m = GetTaskMetricsResponse{} } +func (m *GetTaskMetricsResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskMetricsResponse) ProtoMessage() {} +func (*GetTaskMetricsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c434e52bb0028071, []int{14} +} + +func (m *GetTaskMetricsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskMetricsResponse.Unmarshal(m, b) +} +func (m *GetTaskMetricsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskMetricsResponse.Marshal(b, m, deterministic) +} +func (m *GetTaskMetricsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskMetricsResponse.Merge(m, src) +} +func (m *GetTaskMetricsResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskMetricsResponse.Size(m) +} +func (m *GetTaskMetricsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskMetricsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskMetricsResponse proto.InternalMessageInfo + +func (m *GetTaskMetricsResponse) GetResults() []*core.ExecutionMetricResult { + if m != nil { + return m.Results + } + return nil +} + +// A request to get the log from a task execution. +type GetTaskLogsRequest struct { + // A predefined yet extensible Task type identifier. + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + ResourceMeta []byte `protobuf:"bytes,2,opt,name=resource_meta,json=resourceMeta,proto3" json:"resource_meta,omitempty"` + // Number of lines to return. + Lines uint64 `protobuf:"varint,3,opt,name=lines,proto3" json:"lines,omitempty"` + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + Token string `protobuf:"bytes,4,opt,name=token,proto3" json:"token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskLogsRequest) Reset() { *m = GetTaskLogsRequest{} } +func (m *GetTaskLogsRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskLogsRequest) ProtoMessage() {} +func (*GetTaskLogsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c434e52bb0028071, []int{15} +} + +func (m *GetTaskLogsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskLogsRequest.Unmarshal(m, b) +} +func (m *GetTaskLogsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskLogsRequest.Marshal(b, m, deterministic) +} +func (m *GetTaskLogsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskLogsRequest.Merge(m, src) +} +func (m *GetTaskLogsRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskLogsRequest.Size(m) +} +func (m *GetTaskLogsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskLogsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskLogsRequest proto.InternalMessageInfo + +func (m *GetTaskLogsRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *GetTaskLogsRequest) GetResourceMeta() []byte { + if m != nil { + return m.ResourceMeta + } + return nil +} + +func (m *GetTaskLogsRequest) GetLines() uint64 { + if m != nil { + return m.Lines + } + return 0 +} + +func (m *GetTaskLogsRequest) GetToken() string { + if m != nil { + return m.Token + } + return "" +} + +// A response containing the logs for a task execution. +type GetTaskLogsResponse struct { + // The execution log results. + Results []string `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + Token string `protobuf:"bytes,2,opt,name=token,proto3" json:"token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskLogsResponse) Reset() { *m = GetTaskLogsResponse{} } +func (m *GetTaskLogsResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskLogsResponse) ProtoMessage() {} +func (*GetTaskLogsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c434e52bb0028071, []int{16} +} + +func (m *GetTaskLogsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskLogsResponse.Unmarshal(m, b) +} +func (m *GetTaskLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskLogsResponse.Marshal(b, m, deterministic) +} +func (m *GetTaskLogsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskLogsResponse.Merge(m, src) +} +func (m *GetTaskLogsResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskLogsResponse.Size(m) +} +func (m *GetTaskLogsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskLogsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskLogsResponse proto.InternalMessageInfo + +func (m *GetTaskLogsResponse) GetResults() []string { + if m != nil { + return m.Results + } + return nil +} + +func (m *GetTaskLogsResponse) GetToken() string { + if m != nil { + return m.Token + } + return "" +} + func init() { proto.RegisterEnum("flyteidl.admin.State", State_name, State_value) proto.RegisterType((*TaskExecutionMetadata)(nil), "flyteidl.admin.TaskExecutionMetadata") @@ -767,67 +1027,85 @@ func init() { proto.RegisterType((*GetAgentResponse)(nil), "flyteidl.admin.GetAgentResponse") proto.RegisterType((*ListAgentsRequest)(nil), "flyteidl.admin.ListAgentsRequest") proto.RegisterType((*ListAgentsResponse)(nil), "flyteidl.admin.ListAgentsResponse") + proto.RegisterType((*GetTaskMetricsRequest)(nil), "flyteidl.admin.GetTaskMetricsRequest") + proto.RegisterType((*GetTaskMetricsResponse)(nil), "flyteidl.admin.GetTaskMetricsResponse") + proto.RegisterType((*GetTaskLogsRequest)(nil), "flyteidl.admin.GetTaskLogsRequest") + proto.RegisterType((*GetTaskLogsResponse)(nil), "flyteidl.admin.GetTaskLogsResponse") } func init() { proto.RegisterFile("flyteidl/admin/agent.proto", fileDescriptor_c434e52bb0028071) } var fileDescriptor_c434e52bb0028071 = []byte{ - // 901 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x7f, 0x6f, 0xe3, 0x44, - 0x10, 0x6d, 0x7e, 0x36, 0x99, 0xb4, 0xbd, 0x64, 0xaf, 0x01, 0x37, 0x77, 0xa0, 0xca, 0xa8, 0xa8, - 0xe2, 0x74, 0x09, 0xd7, 0xa2, 0xa3, 0x80, 0xe0, 0x94, 0xb6, 0xa6, 0x57, 0x29, 0x8d, 0xaa, 0x6d, - 0x8a, 0x00, 0x09, 0xa2, 0x4d, 0x32, 0x35, 0x56, 0x1c, 0xdb, 0x78, 0xd7, 0xd5, 0x05, 0xf1, 0x1f, - 0x5f, 0x82, 0xef, 0xc2, 0x97, 0x43, 0xde, 0x5d, 0x3b, 0x71, 0xc8, 0x9d, 0x7a, 0xe2, 0x3f, 0xef, - 0xbc, 0xb7, 0x6f, 0xde, 0xce, 0xcc, 0x66, 0x03, 0xad, 0x3b, 0x77, 0x2e, 0xd0, 0x99, 0xb8, 0x1d, - 0x36, 0x99, 0x39, 0x5e, 0x87, 0xd9, 0xe8, 0x89, 0x76, 0x10, 0xfa, 0xc2, 0x27, 0x3b, 0x09, 0xd6, - 0x96, 0x58, 0xeb, 0x69, 0xca, 0x1d, 0xfb, 0x21, 0x76, 0x5c, 0x47, 0x60, 0xc8, 0x5c, 0xae, 0xd8, - 0xad, 0xbd, 0x2c, 0x2a, 0x18, 0x9f, 0x26, 0xd0, 0x47, 0x59, 0xc8, 0xf1, 0x04, 0x86, 0x77, 0x6c, - 0x8c, 0x1a, 0xfe, 0x78, 0x05, 0x9e, 0xa0, 0x27, 0x9c, 0x3b, 0x07, 0xc3, 0xf5, 0xdb, 0xf1, 0x0d, - 0x8e, 0x23, 0xe1, 0xf8, 0x9e, 0x82, 0xcd, 0xbf, 0x4b, 0xd0, 0x1c, 0x30, 0x3e, 0xb5, 0x92, 0xf8, - 0x15, 0x0a, 0x36, 0x61, 0x82, 0x11, 0x0a, 0x8d, 0xd8, 0xc6, 0x30, 0xdd, 0x31, 0x74, 0x26, 0x46, - 0x6e, 0x3f, 0x77, 0x58, 0x3b, 0xfa, 0xb4, 0x9d, 0x1e, 0x2e, 0x16, 0x6d, 0x67, 0x04, 0x2e, 0x53, - 0x07, 0xf4, 0x91, 0xc8, 0x02, 0xe4, 0x29, 0x54, 0x3d, 0x36, 0x43, 0x1e, 0xb0, 0x31, 0x1a, 0xf9, - 0xfd, 0xdc, 0x61, 0x95, 0x2e, 0x02, 0xe4, 0x12, 0xca, 0x2e, 0x1b, 0xa1, 0xcb, 0x8d, 0xc2, 0x7e, - 0xe1, 0xb0, 0x76, 0xf4, 0xa2, 0x9d, 0xad, 0x61, 0x7b, 0xad, 0xd1, 0x76, 0x4f, 0xee, 0xb1, 0x3c, - 0x11, 0xce, 0xa9, 0x16, 0x20, 0x3f, 0x42, 0x8d, 0x79, 0x9e, 0x2f, 0x58, 0xcc, 0xe4, 0x46, 0x51, - 0xea, 0xbd, 0x7c, 0x98, 0x5e, 0x77, 0xb1, 0x51, 0x89, 0x2e, 0x4b, 0x91, 0x36, 0x3c, 0x9e, 0x9e, - 0xf0, 0x21, 0xc7, 0xf0, 0xde, 0x19, 0xe3, 0x90, 0x8d, 0xc7, 0x7e, 0xe4, 0x09, 0xa3, 0x24, 0x0f, - 0xd3, 0x98, 0x9e, 0xf0, 0x1b, 0x85, 0x74, 0x15, 0x40, 0x04, 0x34, 0xd1, 0xbb, 0x77, 0x42, 0xdf, - 0x9b, 0xa1, 0x27, 0x86, 0xf7, 0x2c, 0x74, 0xd8, 0xc8, 0x45, 0x6e, 0x94, 0xa5, 0xa7, 0x57, 0x0f, - 0xf3, 0x64, 0x2d, 0x24, 0x7e, 0x48, 0x14, 0x94, 0xb9, 0x5d, 0x5c, 0x03, 0xb5, 0xbe, 0x82, 0xda, - 0x52, 0x59, 0x48, 0x1d, 0x0a, 0x53, 0x9c, 0xcb, 0xee, 0x55, 0x69, 0xfc, 0x49, 0x76, 0xa1, 0x74, - 0xcf, 0xdc, 0x28, 0xe9, 0x82, 0x5a, 0x7c, 0x9d, 0x3f, 0xc9, 0xb5, 0xbe, 0x83, 0xfa, 0x6a, 0x05, - 0xde, 0x6b, 0xff, 0x05, 0xec, 0xbd, 0xd5, 0xed, 0xfb, 0x08, 0x99, 0x7f, 0xe5, 0xa1, 0x71, 0x16, - 0x22, 0x13, 0x18, 0xd7, 0x84, 0xe2, 0xef, 0x11, 0x72, 0x41, 0x5e, 0x40, 0xd9, 0xf1, 0x82, 0x48, - 0x70, 0x3d, 0x8b, 0x7b, 0x2b, 0xb3, 0xd8, 0x53, 0x17, 0xeb, 0x8a, 0x05, 0x54, 0x13, 0xc9, 0x97, - 0x50, 0x11, 0x38, 0x0b, 0x5c, 0x26, 0x54, 0x96, 0xda, 0xd1, 0x93, 0x35, 0x03, 0x3c, 0xd0, 0x14, - 0x9a, 0x92, 0xc9, 0x27, 0xb0, 0xed, 0x47, 0x22, 0x88, 0xc4, 0x30, 0x08, 0xf1, 0xce, 0x79, 0x63, - 0x14, 0xa4, 0xc7, 0x2d, 0x15, 0xbc, 0x96, 0x31, 0xf2, 0x0b, 0x7c, 0xb8, 0x72, 0x4f, 0x66, 0xba, - 0x6b, 0x46, 0x51, 0x26, 0x3b, 0x78, 0x50, 0x8b, 0x69, 0x53, 0xac, 0x0b, 0x9b, 0x7f, 0x00, 0x59, - 0x2e, 0x02, 0x0f, 0x7c, 0x8f, 0x23, 0x39, 0x80, 0xed, 0x10, 0xb9, 0x1f, 0x85, 0x63, 0x94, 0xe9, - 0x64, 0x31, 0xb6, 0x5e, 0x6f, 0xd0, 0xad, 0x24, 0x1c, 0x0b, 0x90, 0x97, 0x50, 0x49, 0xd6, 0xfa, - 0xe4, 0xc6, 0xaa, 0x19, 0xaa, 0xf1, 0xd7, 0x1b, 0x34, 0xe5, 0x9e, 0x96, 0xa0, 0x10, 0x22, 0x37, - 0x29, 0xec, 0x5c, 0xa0, 0x58, 0xae, 0xfe, 0x13, 0xa8, 0xca, 0xc3, 0x8a, 0x79, 0x80, 0xba, 0x8b, - 0x95, 0x38, 0x30, 0x98, 0x07, 0xb2, 0x5c, 0x59, 0x53, 0x71, 0xca, 0xad, 0xac, 0x25, 0xf3, 0x4f, - 0x78, 0x94, 0x6a, 0xea, 0xc3, 0x7c, 0xb1, 0xe4, 0x32, 0xf7, 0x6e, 0x97, 0x0b, 0x8f, 0xe4, 0x18, - 0xaa, 0xae, 0x6f, 0x0f, 0x5d, 0xc7, 0x9b, 0x72, 0x23, 0x2f, 0x2f, 0xd3, 0x07, 0x6b, 0xda, 0xda, - 0xf3, 0x6d, 0x5a, 0x71, 0x7d, 0xbb, 0x17, 0xf3, 0xcc, 0x7f, 0x72, 0x50, 0x49, 0xb4, 0xc8, 0x33, - 0x28, 0x71, 0x11, 0x0f, 0x45, 0x9c, 0x74, 0xe7, 0xa8, 0xb9, 0x9a, 0xf4, 0x26, 0x06, 0xa9, 0xe2, - 0x90, 0x63, 0xd8, 0x54, 0x6d, 0xe7, 0xba, 0x92, 0xef, 0x18, 0xbc, 0x84, 0x49, 0x0c, 0xd8, 0x9c, - 0x21, 0xe7, 0xcc, 0x46, 0x3d, 0x3a, 0xc9, 0x32, 0xeb, 0xbe, 0xf8, 0x40, 0xf7, 0xb7, 0xd0, 0x38, - 0x47, 0x17, 0xb3, 0x17, 0xe2, 0xff, 0xb7, 0x64, 0x17, 0xc8, 0xb2, 0xac, 0xea, 0x8a, 0x79, 0x05, - 0xa5, 0x6e, 0xfc, 0x9e, 0x11, 0x02, 0xc5, 0xf8, 0x37, 0x5a, 0x6b, 0xcb, 0x6f, 0xf2, 0x39, 0xec, - 0xf2, 0x28, 0x08, 0xfc, 0x50, 0xe0, 0x64, 0x98, 0xa6, 0x57, 0x7d, 0xa8, 0x52, 0x92, 0x62, 0x03, - 0x6d, 0x84, 0x9b, 0x07, 0xb2, 0xef, 0x52, 0x31, 0x71, 0xbe, 0x46, 0xd8, 0x7c, 0x05, 0xf5, 0x05, - 0x4d, 0xcf, 0xc7, 0x33, 0x28, 0xc9, 0x97, 0x55, 0x0f, 0xc7, 0x7f, 0xfa, 0xa4, 0xd8, 0x8a, 0x63, - 0x3e, 0x86, 0x46, 0xcf, 0xe1, 0x4a, 0x81, 0xeb, 0x4c, 0xe6, 0x19, 0x90, 0xe5, 0xa0, 0xd6, 0x7d, - 0x0e, 0x65, 0xb9, 0x27, 0xfe, 0x29, 0x29, 0xbc, 0x5d, 0x58, 0x93, 0x3e, 0xfb, 0x15, 0x4a, 0x72, - 0x22, 0x48, 0x13, 0x1a, 0xd4, 0x1a, 0xd0, 0x9f, 0xba, 0xa7, 0x3d, 0x6b, 0xf8, 0x7d, 0xf7, 0xb2, - 0x77, 0x4b, 0xad, 0xfa, 0x46, 0x1c, 0xbe, 0xb6, 0xe8, 0x55, 0xb7, 0x6f, 0xf5, 0x07, 0x69, 0x38, - 0x47, 0x6a, 0xb0, 0x79, 0x6d, 0xf5, 0xcf, 0x2f, 0xfb, 0x17, 0xf5, 0x7c, 0xbc, 0xa0, 0xb7, 0xfd, - 0x7e, 0xbc, 0x28, 0x90, 0x6d, 0xa8, 0xde, 0xdc, 0x9e, 0x9d, 0x59, 0xd6, 0xb9, 0x75, 0x5e, 0x2f, - 0x9e, 0x7e, 0xfb, 0xf3, 0x37, 0xb6, 0x23, 0x7e, 0x8b, 0x46, 0xed, 0xb1, 0x3f, 0xeb, 0x48, 0x2b, - 0x7e, 0x68, 0xab, 0x8f, 0x4e, 0xfa, 0x8a, 0xdb, 0xe8, 0x75, 0x82, 0xd1, 0x73, 0xdb, 0xef, 0x64, - 0xff, 0x7c, 0x8c, 0xca, 0xf2, 0x41, 0x3f, 0xfe, 0x37, 0x00, 0x00, 0xff, 0xff, 0x3c, 0x11, 0x18, - 0x7d, 0x95, 0x08, 0x00, 0x00, + // 1123 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xef, 0x6f, 0xdb, 0xc4, + 0x1b, 0x5f, 0x7e, 0x36, 0x79, 0xd2, 0x75, 0xc9, 0xad, 0xd9, 0xd7, 0xcb, 0xf6, 0x85, 0xc9, 0x30, + 0x54, 0x81, 0xe6, 0xb0, 0x0e, 0x46, 0x07, 0x62, 0x53, 0xda, 0x9a, 0xad, 0x52, 0x1a, 0x55, 0xb7, + 0x14, 0x0d, 0x24, 0x88, 0x2e, 0xc9, 0x53, 0xcf, 0x8a, 0x63, 0x7b, 0xbe, 0x73, 0xb5, 0x22, 0x5e, + 0xc1, 0xbf, 0xc0, 0x0b, 0xfe, 0x55, 0xde, 0x21, 0xdf, 0x9d, 0x9d, 0x38, 0x64, 0xa3, 0x13, 0x7b, + 0xe7, 0xe7, 0xd7, 0xe7, 0xf9, 0x3c, 0x3f, 0x7c, 0x77, 0xd0, 0x39, 0xf3, 0x2e, 0x04, 0xba, 0x53, + 0xaf, 0xcb, 0xa6, 0x73, 0xd7, 0xef, 0x32, 0x07, 0x7d, 0x61, 0x85, 0x51, 0x20, 0x02, 0xb2, 0x95, + 0xda, 0x2c, 0x69, 0xeb, 0xdc, 0xce, 0x7c, 0x27, 0x41, 0x84, 0x5d, 0xcf, 0x15, 0x18, 0x31, 0x8f, + 0x2b, 0xef, 0xce, 0xcd, 0xbc, 0x55, 0x30, 0x3e, 0x4b, 0x4d, 0x1f, 0xe4, 0x4d, 0xee, 0x14, 0x7d, + 0xe1, 0x9e, 0xb9, 0x18, 0x69, 0xfb, 0xff, 0xf3, 0x76, 0x7c, 0x8d, 0x93, 0x58, 0xb8, 0x81, 0xaf, + 0xcd, 0xb7, 0xf2, 0xe6, 0x39, 0x8a, 0xc8, 0x9d, 0x64, 0xd8, 0x4e, 0x10, 0x38, 0x1e, 0x76, 0xa5, + 0x34, 0x8e, 0xcf, 0xba, 0xd3, 0x38, 0x62, 0x4b, 0xc1, 0x1f, 0xae, 0xda, 0x85, 0x3b, 0x47, 0x2e, + 0xd8, 0x3c, 0x54, 0x0e, 0xe6, 0x9f, 0x15, 0x68, 0x0f, 0x19, 0x9f, 0xd9, 0x69, 0xd6, 0x63, 0x14, + 0x6c, 0xca, 0x04, 0x23, 0x14, 0x5a, 0x49, 0x15, 0xa3, 0x8c, 0xcf, 0xc8, 0x9d, 0x1a, 0x85, 0x3b, + 0x85, 0x9d, 0xc6, 0xee, 0x27, 0x56, 0xd6, 0x9b, 0x84, 0x93, 0x95, 0x03, 0x38, 0xca, 0xea, 0xa3, + 0xd7, 0x44, 0xde, 0x40, 0x6e, 0x43, 0xdd, 0x67, 0x73, 0xe4, 0x21, 0x9b, 0xa0, 0x51, 0xbc, 0x53, + 0xd8, 0xa9, 0xd3, 0x85, 0x82, 0x1c, 0x41, 0xd5, 0x63, 0x63, 0xf4, 0xb8, 0x51, 0xba, 0x53, 0xda, + 0x69, 0xec, 0xde, 0xb7, 0xf2, 0x23, 0xb0, 0xd6, 0x12, 0xb5, 0xfa, 0x32, 0xc6, 0xf6, 0x45, 0x74, + 0x41, 0x35, 0x00, 0x79, 0x01, 0x0d, 0xe6, 0xfb, 0x81, 0x90, 0xbd, 0xe0, 0x46, 0x59, 0xe2, 0x3d, + 0xbc, 0x1c, 0x5e, 0x6f, 0x11, 0xa8, 0x40, 0x97, 0xa1, 0x88, 0x05, 0xd7, 0x67, 0x7b, 0x7c, 0xc4, + 0x31, 0x3a, 0x77, 0x27, 0x38, 0x62, 0x93, 0x49, 0x10, 0xfb, 0xc2, 0xa8, 0xc8, 0x62, 0x5a, 0xb3, + 0x3d, 0xfe, 0x5c, 0x59, 0x7a, 0xca, 0x40, 0x04, 0xb4, 0xd1, 0x3f, 0x77, 0xa3, 0xc0, 0x9f, 0xa3, + 0x2f, 0x46, 0xe7, 0x2c, 0x72, 0xd9, 0xd8, 0x43, 0x6e, 0x54, 0x25, 0xa7, 0x27, 0x97, 0xe3, 0x64, + 0x2f, 0x20, 0xbe, 0x4f, 0x11, 0x14, 0xb9, 0x6d, 0x5c, 0x63, 0xea, 0x3c, 0x82, 0xc6, 0x52, 0x5b, + 0x48, 0x13, 0x4a, 0x33, 0xbc, 0x90, 0xd3, 0xab, 0xd3, 0xe4, 0x93, 0x6c, 0x43, 0xe5, 0x9c, 0x79, + 0x71, 0x3a, 0x05, 0x25, 0x7c, 0x5d, 0xdc, 0x2b, 0x74, 0x1e, 0x43, 0x73, 0xb5, 0x03, 0xef, 0x14, + 0xff, 0x14, 0x6e, 0xbe, 0x91, 0xed, 0xbb, 0x00, 0x99, 0xbf, 0x17, 0xa1, 0x75, 0x10, 0x21, 0x13, + 0x98, 0xf4, 0x84, 0xe2, 0xab, 0x18, 0xb9, 0x20, 0xf7, 0xa1, 0xea, 0xfa, 0x61, 0x2c, 0xb8, 0xde, + 0xc5, 0x9b, 0x2b, 0xbb, 0xd8, 0x57, 0xff, 0xe5, 0x31, 0x0b, 0xa9, 0x76, 0x24, 0x5f, 0x41, 0x4d, + 0xe0, 0x3c, 0xf4, 0x98, 0x50, 0x59, 0x1a, 0xbb, 0xb7, 0xd6, 0x2c, 0xf0, 0x50, 0xbb, 0xd0, 0xcc, + 0x99, 0x7c, 0x04, 0x57, 0x83, 0x58, 0x84, 0xb1, 0x18, 0x85, 0x11, 0x9e, 0xb9, 0xaf, 0x8d, 0x92, + 0xe4, 0xb8, 0xa9, 0x94, 0x27, 0x52, 0x47, 0x7e, 0x82, 0xff, 0xad, 0xfc, 0x27, 0x73, 0x3d, 0x35, + 0xa3, 0x2c, 0x93, 0xdd, 0xbd, 0xd4, 0x88, 0x69, 0x5b, 0xac, 0x53, 0x9b, 0xbf, 0x00, 0x59, 0x6e, + 0x02, 0x0f, 0x03, 0x9f, 0x23, 0xb9, 0x0b, 0x57, 0x23, 0xe4, 0x41, 0x1c, 0x4d, 0x50, 0xa6, 0x93, + 0xcd, 0xd8, 0x7c, 0x76, 0x85, 0x6e, 0xa6, 0xea, 0x04, 0x80, 0x3c, 0x84, 0x5a, 0x2a, 0xeb, 0xca, + 0x8d, 0x55, 0x32, 0x54, 0xdb, 0x9f, 0x5d, 0xa1, 0x99, 0xef, 0x7e, 0x05, 0x4a, 0x11, 0x72, 0x93, + 0xc2, 0xd6, 0x53, 0x14, 0xcb, 0xdd, 0xbf, 0x05, 0x75, 0x59, 0xac, 0xb8, 0x08, 0x51, 0x4f, 0xb1, + 0x96, 0x28, 0x86, 0x17, 0xa1, 0x6c, 0x57, 0x9e, 0x54, 0x92, 0x72, 0x33, 0x4f, 0xc9, 0xfc, 0x15, + 0xae, 0x65, 0x98, 0xba, 0x98, 0x2f, 0x96, 0x58, 0x16, 0xde, 0xce, 0x72, 0xc1, 0x91, 0x3c, 0x80, + 0xba, 0x17, 0x38, 0x23, 0xcf, 0xf5, 0x67, 0xdc, 0x28, 0xca, 0x9f, 0xe9, 0xc6, 0x9a, 0xb1, 0xf6, + 0x03, 0x87, 0xd6, 0xbc, 0xc0, 0xe9, 0x27, 0x7e, 0xe6, 0x5f, 0x05, 0xa8, 0xa5, 0x58, 0xa4, 0x0b, + 0x15, 0x2e, 0x92, 0xa5, 0x48, 0x92, 0x6e, 0xed, 0xb6, 0x57, 0x93, 0x3e, 0x4f, 0x8c, 0xfb, 0x45, + 0xa3, 0x40, 0x95, 0x1f, 0x79, 0x00, 0x1b, 0x6a, 0xf4, 0x5c, 0x77, 0xf3, 0x2d, 0xcb, 0x97, 0x7a, + 0x12, 0x03, 0x36, 0xe6, 0xc8, 0x39, 0x73, 0x50, 0xaf, 0x4f, 0x2a, 0xe6, 0x2b, 0x28, 0x5f, 0xae, + 0x02, 0xb2, 0x07, 0x95, 0xf0, 0x25, 0xe3, 0x28, 0x4f, 0x9c, 0xad, 0x5d, 0xf3, 0x6d, 0x47, 0xb1, + 0x75, 0x92, 0x78, 0x52, 0x15, 0x60, 0x9e, 0x42, 0xeb, 0x10, 0x3d, 0xcc, 0xff, 0x4e, 0xff, 0x7d, + 0xa0, 0xdb, 0x40, 0x96, 0x61, 0xd5, 0x4c, 0xcd, 0x63, 0xa8, 0xf4, 0x92, 0xcb, 0x94, 0x10, 0x28, + 0x27, 0x27, 0xbc, 0xc6, 0x96, 0xdf, 0xe4, 0x73, 0xd8, 0xe6, 0x71, 0x18, 0x06, 0x91, 0xc0, 0xe9, + 0x28, 0x4b, 0xaf, 0xa6, 0x58, 0xa7, 0x24, 0xb3, 0x0d, 0x35, 0x11, 0x6e, 0xde, 0x95, 0x5b, 0x23, + 0x11, 0x53, 0xe6, 0x6b, 0x80, 0xcd, 0x27, 0xd0, 0x5c, 0xb8, 0xe9, 0xed, 0xfa, 0x0c, 0x2a, 0xf2, + 0x5a, 0xd7, 0xab, 0xf5, 0x8f, 0x29, 0x2b, 0x6f, 0xe5, 0x63, 0x5e, 0x87, 0x56, 0xdf, 0xe5, 0x0a, + 0x81, 0xeb, 0x4c, 0xe6, 0x01, 0x90, 0x65, 0xa5, 0xc6, 0xbd, 0x07, 0x55, 0x19, 0x93, 0x1c, 0x44, + 0xa5, 0x37, 0x03, 0x6b, 0x27, 0xf3, 0x8f, 0x22, 0xb4, 0xf5, 0xe2, 0x1f, 0xab, 0x2b, 0xfc, 0xbd, + 0x8d, 0x20, 0x59, 0xb1, 0x57, 0x31, 0x46, 0x2e, 0xaa, 0x9b, 0xb3, 0x4e, 0x53, 0x91, 0x3c, 0x02, + 0xe0, 0x82, 0x45, 0x62, 0x94, 0xdc, 0xfb, 0xfa, 0x3c, 0xea, 0x58, 0xea, 0x51, 0x60, 0xa5, 0x8f, + 0x02, 0x6b, 0x98, 0x3e, 0x0a, 0x68, 0x5d, 0x7a, 0x27, 0x32, 0xf9, 0x12, 0x6a, 0xe8, 0x4f, 0x55, + 0x60, 0xe5, 0x5f, 0x03, 0x37, 0xd0, 0x9f, 0xca, 0xb0, 0x7b, 0x50, 0xe6, 0x02, 0x43, 0xa3, 0xaa, + 0x7f, 0x90, 0xd5, 0x90, 0x43, 0xfd, 0x40, 0xa1, 0xd2, 0xcd, 0x7c, 0x01, 0x37, 0x56, 0xbb, 0xa2, + 0xfb, 0xfb, 0x18, 0x36, 0x22, 0xe4, 0xb1, 0x97, 0x35, 0xf8, 0xe3, 0x95, 0x55, 0x5f, 0x3e, 0x2b, + 0x23, 0x77, 0x42, 0xa5, 0x33, 0x4d, 0x83, 0xcc, 0xdf, 0x0a, 0x40, 0x34, 0x74, 0x3f, 0x70, 0xde, + 0x63, 0xb7, 0xb7, 0xa1, 0xe2, 0xb9, 0xbe, 0xec, 0x75, 0x61, 0xa7, 0x4c, 0x95, 0x90, 0x68, 0x45, + 0x30, 0x43, 0x5f, 0x36, 0xb9, 0x4e, 0x95, 0x60, 0xda, 0x70, 0x3d, 0xc7, 0x41, 0xd7, 0x66, 0xe4, + 0x6b, 0xab, 0x67, 0xac, 0x17, 0x30, 0xc5, 0x25, 0x98, 0x4f, 0x7f, 0x86, 0x8a, 0x3c, 0x8c, 0x48, + 0x1b, 0x5a, 0xd4, 0x1e, 0xd2, 0x1f, 0x7a, 0xfb, 0x7d, 0x7b, 0xf4, 0x5d, 0xef, 0xa8, 0x7f, 0x4a, + 0xed, 0xe6, 0x95, 0x44, 0x7d, 0x62, 0xd3, 0xe3, 0xde, 0xc0, 0x1e, 0x0c, 0x33, 0x75, 0x81, 0x34, + 0x60, 0xe3, 0xc4, 0x1e, 0x1c, 0x1e, 0x0d, 0x9e, 0x36, 0x8b, 0x89, 0x40, 0x4f, 0x07, 0x83, 0x44, + 0x28, 0x91, 0xab, 0x50, 0x7f, 0x7e, 0x7a, 0x70, 0x60, 0xdb, 0x87, 0xf6, 0x61, 0xb3, 0xbc, 0xff, + 0xed, 0x8f, 0xdf, 0x38, 0xae, 0x78, 0x19, 0x8f, 0xad, 0x49, 0x30, 0xef, 0xca, 0x36, 0x07, 0x91, + 0xa3, 0x3e, 0xba, 0xd9, 0xfb, 0xd3, 0x41, 0xbf, 0x1b, 0x8e, 0xef, 0x39, 0x41, 0x37, 0xff, 0x6c, + 0x1e, 0x57, 0xe5, 0x74, 0x1f, 0xfc, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xf2, 0xd1, 0x03, 0x06, 0x4f, + 0x0b, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/common.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/common.pb.go index 3703dfb22e..d5598fd1db 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/common.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/common.pb.go @@ -93,7 +93,9 @@ type NamedEntityIdentifier struct { // User provided value for the resource. // The combination of project + domain + name uniquely identifies the resource. // +optional - in certain contexts - like 'List API', 'Launch plans' - Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,4,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -145,6 +147,13 @@ func (m *NamedEntityIdentifier) GetName() string { return "" } +func (m *NamedEntityIdentifier) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Additional metadata around a named entity. type NamedEntityMetadata struct { // Common description across all versions of the entity @@ -329,7 +338,9 @@ type NamedEntityIdentifierListRequest struct { SortBy *Sort `protobuf:"bytes,5,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` // Indicates a list of filters passed as string. // +optional - Filters string `protobuf:"bytes,6,opt,name=filters,proto3" json:"filters,omitempty"` + Filters string `protobuf:"bytes,6,opt,name=filters,proto3" json:"filters,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,7,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -402,6 +413,13 @@ func (m *NamedEntityIdentifierListRequest) GetFilters() string { return "" } +func (m *NamedEntityIdentifierListRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Represents a request structure to list NamedEntity objects type NamedEntityListRequest struct { // Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. @@ -423,7 +441,9 @@ type NamedEntityListRequest struct { SortBy *Sort `protobuf:"bytes,6,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` // Indicates a list of filters passed as string. // +optional - Filters string `protobuf:"bytes,7,opt,name=filters,proto3" json:"filters,omitempty"` + Filters string `protobuf:"bytes,7,opt,name=filters,proto3" json:"filters,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,8,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -503,6 +523,13 @@ func (m *NamedEntityListRequest) GetFilters() string { return "" } +func (m *NamedEntityListRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Represents a list of NamedEntityIdentifiers. type NamedEntityIdentifierList struct { // A list of identifiers. @@ -1484,80 +1511,81 @@ func init() { func init() { proto.RegisterFile("flyteidl/admin/common.proto", fileDescriptor_7c2cf612a185829c) } var fileDescriptor_7c2cf612a185829c = []byte{ - // 1192 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0xdf, 0x73, 0xdb, 0xc4, - 0x13, 0x8f, 0x1c, 0xc7, 0xa9, 0x37, 0x6d, 0xea, 0x5e, 0xd2, 0x7e, 0xdd, 0xe4, 0x4b, 0x09, 0x62, - 0xf8, 0xd1, 0x0e, 0xb5, 0x67, 0x52, 0x0a, 0xf4, 0x37, 0x4e, 0xac, 0xb6, 0x99, 0xa6, 0x6e, 0x90, - 0xd3, 0x32, 0x65, 0x60, 0xc4, 0x59, 0x3a, 0xbb, 0x47, 0x24, 0x9d, 0xb8, 0x3b, 0xb5, 0x15, 0x2f, - 0x0c, 0xbc, 0xf1, 0xc6, 0x03, 0xff, 0x10, 0x4f, 0x0c, 0xef, 0xfc, 0x41, 0xcc, 0x9d, 0x24, 0x5b, - 0x56, 0xdc, 0x42, 0xd3, 0xce, 0xf0, 0x76, 0x7b, 0xbb, 0x7b, 0xfb, 0xd9, 0xcf, 0xed, 0xae, 0x4e, - 0xb0, 0x3e, 0xf4, 0x13, 0x49, 0xa8, 0xe7, 0xb7, 0xb1, 0x17, 0xd0, 0xb0, 0xed, 0xb2, 0x20, 0x60, - 0x61, 0x2b, 0xe2, 0x4c, 0x32, 0xb4, 0x9c, 0x2b, 0x5b, 0x5a, 0xb9, 0xf6, 0xd6, 0xd8, 0xd8, 0x65, - 0x9c, 0xb4, 0xc9, 0x73, 0xe2, 0xc6, 0x92, 0xe6, 0xe6, 0x6b, 0xe7, 0xa6, 0xd5, 0xd4, 0x23, 0xa1, - 0xa4, 0x43, 0x4a, 0x78, 0xa6, 0xff, 0xff, 0xb4, 0xde, 0xa7, 0x92, 0x70, 0xec, 0x8b, 0x4c, 0xfb, - 0xf6, 0x88, 0xb1, 0x91, 0x4f, 0xda, 0x5a, 0x1a, 0xc4, 0xc3, 0xb6, 0xa4, 0x01, 0x11, 0x12, 0x07, - 0x51, 0x6a, 0x60, 0x7e, 0x03, 0xa7, 0x7b, 0x38, 0x20, 0x9e, 0x15, 0x4a, 0x2a, 0x93, 0x9d, 0xf1, - 0xe9, 0xa8, 0x09, 0x8b, 0x11, 0x67, 0xdf, 0x11, 0x57, 0x36, 0x8d, 0x0d, 0xe3, 0xc3, 0xba, 0x9d, - 0x8b, 0xe8, 0x0c, 0xd4, 0x3c, 0x16, 0x60, 0x1a, 0x36, 0x2b, 0x5a, 0x91, 0x49, 0x08, 0x41, 0x35, - 0xc4, 0x01, 0x69, 0xce, 0xeb, 0x5d, 0xbd, 0x36, 0x19, 0xac, 0x14, 0x8e, 0xbf, 0x4f, 0x24, 0xf6, - 0xb0, 0xc4, 0x68, 0x03, 0x96, 0x3c, 0x22, 0x5c, 0x4e, 0x23, 0x95, 0x69, 0x16, 0xa0, 0xb8, 0x85, - 0x3e, 0x81, 0x05, 0x21, 0xb1, 0x24, 0x3a, 0xc6, 0xf2, 0xe6, 0x46, 0x6b, 0x9a, 0xb5, 0x56, 0xe1, - 0xd4, 0xbe, 0xb2, 0xb3, 0x53, 0x73, 0xf3, 0x0f, 0x03, 0x96, 0x0a, 0x3a, 0xf4, 0x39, 0x9c, 0xe0, - 0x44, 0xb0, 0x98, 0xbb, 0xc4, 0x91, 0x49, 0x44, 0x74, 0xac, 0xe5, 0xcd, 0xf5, 0xc9, 0x79, 0x8a, - 0xb6, 0x96, 0x9d, 0xd9, 0xec, 0x27, 0x11, 0xb1, 0x8f, 0xf3, 0x82, 0x84, 0x2e, 0x43, 0x85, 0x7a, - 0x1a, 0xc6, 0xd2, 0xe6, 0x7b, 0x2f, 0x81, 0x31, 0xe1, 0xce, 0xae, 0x50, 0x0f, 0xdd, 0x82, 0x63, - 0x41, 0x96, 0xae, 0x66, 0x64, 0x69, 0xf3, 0xdd, 0x97, 0x38, 0xe7, 0xcc, 0xd8, 0x63, 0x27, 0xf3, - 0x67, 0x03, 0xaa, 0x7d, 0xc6, 0x25, 0x6a, 0xc0, 0xfc, 0x01, 0x49, 0x32, 0x92, 0xd4, 0x12, 0x5d, - 0x87, 0xba, 0x47, 0x39, 0x71, 0x35, 0x79, 0x29, 0x41, 0xe7, 0xca, 0x87, 0x2b, 0xd7, 0x56, 0x37, - 0xb7, 0xb2, 0x27, 0x0e, 0xe6, 0x05, 0xa8, 0x8f, 0xf7, 0xd1, 0x32, 0x40, 0xd7, 0xea, 0x6f, 0x5b, - 0xbd, 0xee, 0x4e, 0xef, 0x4e, 0x63, 0x0e, 0x9d, 0x80, 0x7a, 0x67, 0x2c, 0x1a, 0xe6, 0x9f, 0x06, - 0x6c, 0xcc, 0xcc, 0x71, 0x97, 0x0a, 0x69, 0x93, 0xef, 0x63, 0x22, 0xe4, 0x11, 0x4a, 0x65, 0x15, - 0x16, 0x7c, 0x1a, 0x50, 0xa9, 0x99, 0x39, 0x61, 0xa7, 0x82, 0xda, 0x95, 0xec, 0x80, 0x84, 0xcd, - 0xaa, 0x36, 0x4e, 0x05, 0x74, 0x11, 0x16, 0x05, 0xe3, 0xd2, 0x19, 0x24, 0xcd, 0x05, 0xcd, 0xe3, - 0xea, 0xac, 0x54, 0xed, 0x9a, 0x32, 0xda, 0x4a, 0x14, 0x98, 0x21, 0xf5, 0x25, 0xe1, 0xa2, 0x59, - 0x4b, 0xc1, 0x64, 0xa2, 0xf9, 0x53, 0x05, 0xce, 0x14, 0x72, 0x29, 0x66, 0xf0, 0xfa, 0x55, 0x52, - 0xe0, 0xa0, 0xf2, 0x22, 0x0e, 0xe6, 0x67, 0x73, 0x50, 0x9d, 0xc9, 0xc1, 0xc2, 0x0b, 0x38, 0xa8, - 0xbd, 0x1a, 0x07, 0x8b, 0xd3, 0x1c, 0x48, 0x38, 0xfb, 0xc2, 0xeb, 0x44, 0x1d, 0x38, 0xa6, 0x64, - 0x49, 0x89, 0x68, 0x1a, 0x1b, 0xf3, 0xff, 0xbe, 0xde, 0xc7, 0x6e, 0x13, 0xf8, 0x95, 0x02, 0x7c, - 0xf3, 0x5b, 0x38, 0x59, 0x22, 0x1e, 0x7d, 0x7a, 0x28, 0xd6, 0xfa, 0x4b, 0x62, 0xfd, 0x63, 0x84, - 0x5f, 0x8d, 0xa9, 0x39, 0x76, 0x87, 0xbc, 0xc1, 0xab, 0x3d, 0xda, 0x00, 0x30, 0xff, 0x32, 0xa0, - 0x59, 0xd0, 0x3e, 0x8c, 0x3c, 0x35, 0xa6, 0xfe, 0x63, 0x54, 0xaf, 0x3f, 0x96, 0xd6, 0xa7, 0x2a, - 0x28, 0xcf, 0x4a, 0x44, 0x2c, 0x14, 0xc4, 0xbc, 0x01, 0x8d, 0x07, 0x03, 0x55, 0xf5, 0x85, 0x0b, - 0x38, 0xaf, 0x81, 0x1a, 0x3a, 0xd6, 0xd9, 0x52, 0x7e, 0x25, 0xca, 0x7e, 0x37, 0x60, 0x25, 0x4f, - 0xb9, 0xd8, 0x9e, 0x97, 0x0b, 0x47, 0xbc, 0x42, 0xae, 0xe3, 0x0e, 0xab, 0xcc, 0xec, 0xb0, 0xf9, - 0x62, 0x87, 0x15, 0x5a, 0xa6, 0x3a, 0xd5, 0x32, 0xaf, 0x38, 0x7f, 0xcc, 0x9b, 0x70, 0xca, 0x0a, - 0x30, 0xf5, 0x7b, 0x4c, 0x21, 0x71, 0xb1, 0x9e, 0xb2, 0xe7, 0xa1, 0xc1, 0x89, 0x4b, 0x23, 0x4a, - 0x42, 0x29, 0x1c, 0xa2, 0xf4, 0xba, 0xea, 0xeb, 0xf6, 0xc9, 0xc9, 0xbe, 0x76, 0x33, 0xb7, 0xe0, - 0xf4, 0x1e, 0x1e, 0x11, 0xde, 0x8d, 0x65, 0x72, 0xd4, 0x33, 0x6e, 0xc2, 0xa9, 0xbe, 0x8f, 0xdd, - 0x83, 0xa3, 0xfa, 0xff, 0x56, 0x81, 0xe3, 0x53, 0xbe, 0x37, 0xa1, 0x16, 0x3d, 0xc1, 0x22, 0xeb, - 0xd5, 0xe5, 0xcd, 0xf7, 0x4b, 0xf7, 0xf8, 0x25, 0xe3, 0x07, 0x43, 0x9f, 0x3d, 0xb3, 0xc6, 0x8f, - 0x97, 0x3d, 0x65, 0x6e, 0x67, 0x5e, 0xe8, 0x0a, 0x2c, 0xa4, 0x01, 0xd3, 0x7a, 0x7d, 0xa7, 0xcc, - 0xe0, 0x21, 0xc6, 0xee, 0xce, 0xd9, 0xa9, 0x07, 0xba, 0x0d, 0x10, 0x29, 0x3e, 0x1c, 0x2f, 0x96, - 0x49, 0x56, 0xb2, 0x87, 0x6a, 0x60, 0x26, 0x63, 0x77, 0xe7, 0xec, 0x7a, 0x94, 0x2b, 0x14, 0x04, - 0xa1, 0x38, 0xd1, 0xd7, 0x3b, 0x03, 0xc2, 0x21, 0xc2, 0x14, 0x04, 0xed, 0xb1, 0x55, 0x83, 0xaa, - 0xea, 0x51, 0xf3, 0x32, 0x2c, 0x3e, 0xe4, 0xfe, 0x96, 0xcf, 0x06, 0xea, 0x9b, 0x1c, 0x73, 0x3f, - 0xff, 0x26, 0xc7, 0xdc, 0x57, 0x65, 0x35, 0x48, 0x24, 0x11, 0x3a, 0xc5, 0x79, 0x3b, 0x15, 0xae, - 0x56, 0x9a, 0x86, 0xf9, 0x23, 0xd4, 0x76, 0xf1, 0x80, 0xf8, 0x02, 0x5d, 0x85, 0xda, 0x53, 0xec, - 0xc7, 0xe3, 0x91, 0x67, 0x96, 0x41, 0xa4, 0x76, 0xad, 0x47, 0xda, 0xc8, 0x0a, 0x25, 0x4f, 0xec, - 0xcc, 0x63, 0xed, 0x0a, 0x2c, 0x15, 0xb6, 0x67, 0x3c, 0x0a, 0x56, 0x61, 0x41, 0x9b, 0xe6, 0x83, - 0x51, 0x0b, 0x57, 0x2b, 0x9f, 0x19, 0xe6, 0x2f, 0x06, 0x2c, 0x75, 0xc2, 0x90, 0x49, 0x9d, 0x97, - 0x40, 0xb7, 0x4a, 0x30, 0x3e, 0x28, 0xc3, 0x28, 0x18, 0xbf, 0x69, 0x2c, 0xd7, 0xa0, 0x6a, 0x85, - 0x4f, 0x05, 0xba, 0x54, 0xc2, 0x50, 0x9e, 0x7c, 0xf7, 0x48, 0xa2, 0x43, 0xec, 0x61, 0xca, 0xf3, - 0xb8, 0xe6, 0x0f, 0x70, 0xac, 0x13, 0xcb, 0x27, 0x36, 0xf3, 0x09, 0xfa, 0x08, 0x10, 0x16, 0x22, - 0x0e, 0xf0, 0xc0, 0x27, 0x0e, 0xc5, 0x81, 0xc3, 0x99, 0x4f, 0x32, 0x0c, 0x8d, 0xb1, 0x66, 0x07, - 0x07, 0xda, 0xfa, 0x3a, 0xac, 0x1d, 0xc4, 0x03, 0xc2, 0x43, 0x22, 0x89, 0x70, 0x04, 0xe1, 0x4f, - 0xa9, 0x4b, 0x1c, 0xec, 0xba, 0x2c, 0x0e, 0xf3, 0x2f, 0x76, 0x73, 0x62, 0xd1, 0x4f, 0x0d, 0x3a, - 0xa9, 0x5e, 0xdf, 0xe2, 0x3d, 0x58, 0xb1, 0xf1, 0xb3, 0x07, 0xb1, 0x8c, 0x62, 0xd9, 0xc5, 0x12, - 0x6f, 0xb3, 0x70, 0x48, 0x47, 0xe8, 0x63, 0x38, 0xc3, 0xf4, 0x9e, 0xe3, 0xb3, 0xb4, 0x6e, 0x9c, - 0x88, 0x93, 0x21, 0x7d, 0x9e, 0x41, 0x59, 0x4d, 0xb5, 0xbb, 0x99, 0x72, 0x4f, 0xeb, 0xcc, 0x2f, - 0xa0, 0x7e, 0x5b, 0xa5, 0xfb, 0xd0, 0xde, 0x15, 0xea, 0x81, 0x40, 0xc3, 0x28, 0x96, 0x22, 0x73, - 0xc9, 0x24, 0x35, 0x92, 0x52, 0x67, 0x91, 0x3f, 0x29, 0x32, 0x51, 0xbd, 0xb4, 0x3d, 0xe2, 0x1e, - 0xe4, 0x2f, 0x6d, 0xb5, 0xbe, 0xf0, 0x35, 0x34, 0xca, 0x6f, 0x62, 0xf4, 0x3f, 0x58, 0xe9, 0x75, - 0xee, 0x5b, 0x5d, 0xc7, 0xea, 0xed, 0xef, 0xec, 0x3f, 0x76, 0x3a, 0xdb, 0xfb, 0x3b, 0x8f, 0xac, - 0xc6, 0x1c, 0x3a, 0x0b, 0xa7, 0xa7, 0x15, 0xf6, 0xf6, 0xdd, 0x9d, 0x47, 0x56, 0xb7, 0x61, 0xa0, - 0x55, 0x68, 0xf4, 0x1f, 0xf7, 0xf7, 0xad, 0xfb, 0xce, 0x1d, 0xab, 0x67, 0xd9, 0x9d, 0x7d, 0xab, - 0xdb, 0xa8, 0x6c, 0xdd, 0xf8, 0xea, 0xda, 0x88, 0xca, 0x27, 0xf1, 0xa0, 0xe5, 0xb2, 0xa0, 0xad, - 0xaf, 0x8a, 0xf1, 0x51, 0xba, 0x68, 0x8f, 0xff, 0x40, 0x46, 0x24, 0x6c, 0x47, 0x83, 0x8b, 0x23, - 0xd6, 0x9e, 0xfe, 0x01, 0x1a, 0xd4, 0xf4, 0xcf, 0xc6, 0xa5, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, - 0x5a, 0xee, 0x56, 0xed, 0x19, 0x0d, 0x00, 0x00, + // 1207 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x5f, 0x73, 0xdb, 0x44, + 0x10, 0x8f, 0x1c, 0xc7, 0x89, 0x37, 0x6d, 0xea, 0x5e, 0xd2, 0xe2, 0x26, 0x50, 0x82, 0x18, 0xfe, + 0xb4, 0x43, 0xed, 0x99, 0x94, 0x02, 0xfd, 0x8f, 0x13, 0xab, 0x6d, 0xa6, 0xa9, 0x1b, 0xe4, 0xb4, + 0x4c, 0x19, 0x66, 0xc4, 0x59, 0x3a, 0xbb, 0x87, 0x25, 0x9d, 0xb8, 0x3b, 0xb5, 0x35, 0x2f, 0xcc, + 0xf0, 0xc6, 0x1b, 0x33, 0xf0, 0x85, 0x78, 0xe2, 0x0b, 0xf0, 0xc6, 0x97, 0x61, 0xee, 0x24, 0xd9, + 0xb2, 0xe2, 0x16, 0x92, 0x76, 0x86, 0xb7, 0xdb, 0xdb, 0xdd, 0xdb, 0xdf, 0xfe, 0x6e, 0x77, 0x75, + 0x82, 0x8d, 0xbe, 0x3f, 0x92, 0x84, 0x7a, 0x7e, 0x13, 0x7b, 0x01, 0x0d, 0x9b, 0x2e, 0x0b, 0x02, + 0x16, 0x36, 0x22, 0xce, 0x24, 0x43, 0x2b, 0x99, 0xb2, 0xa1, 0x95, 0xeb, 0xef, 0x8c, 0x8d, 0x5d, + 0xc6, 0x49, 0x93, 0xbc, 0x20, 0x6e, 0x2c, 0x69, 0x66, 0xbe, 0x7e, 0x7e, 0x5a, 0x4d, 0x3d, 0x12, + 0x4a, 0xda, 0xa7, 0x84, 0xa7, 0xfa, 0xb7, 0xa7, 0xf5, 0x3e, 0x95, 0x84, 0x63, 0x5f, 0xa4, 0xda, + 0x77, 0x07, 0x8c, 0x0d, 0x7c, 0xd2, 0xd4, 0x52, 0x2f, 0xee, 0x37, 0x25, 0x0d, 0x88, 0x90, 0x38, + 0x88, 0x12, 0x03, 0x93, 0xc1, 0x99, 0x0e, 0x0e, 0x88, 0x67, 0x85, 0x92, 0xca, 0xd1, 0xee, 0xf8, + 0x74, 0x54, 0x87, 0xc5, 0x88, 0xb3, 0xef, 0x89, 0x2b, 0xeb, 0xc6, 0xa6, 0xf1, 0x71, 0xd5, 0xce, + 0x44, 0x74, 0x16, 0x2a, 0x1e, 0x0b, 0x30, 0x0d, 0xeb, 0x25, 0xad, 0x48, 0x25, 0x84, 0xa0, 0x1c, + 0xe2, 0x80, 0xd4, 0xe7, 0xf5, 0xae, 0x5e, 0xa3, 0x1a, 0xcc, 0x33, 0x3e, 0xa8, 0x97, 0xf5, 0x96, + 0x5a, 0x9a, 0x0c, 0x56, 0x73, 0x01, 0x1f, 0x10, 0x89, 0x3d, 0x2c, 0x31, 0xda, 0x84, 0x65, 0x8f, + 0x08, 0x97, 0xd3, 0x48, 0xe5, 0x9e, 0x86, 0xcc, 0x6f, 0xa1, 0xcf, 0x60, 0x41, 0x48, 0x2c, 0x89, + 0x8e, 0xba, 0xb2, 0xb5, 0xd9, 0x98, 0xe6, 0xb1, 0x91, 0x3b, 0xb5, 0xab, 0xec, 0xec, 0xc4, 0xdc, + 0xfc, 0xd3, 0x80, 0xe5, 0x9c, 0x0e, 0x7d, 0x09, 0x27, 0x39, 0x11, 0x2c, 0xe6, 0x2e, 0x71, 0xe4, + 0x28, 0x22, 0x3a, 0xd6, 0xca, 0xd6, 0xc6, 0xe4, 0x3c, 0x45, 0x64, 0xc3, 0x4e, 0x6d, 0x0e, 0x46, + 0x11, 0xb1, 0x4f, 0xf0, 0x9c, 0x84, 0xae, 0x40, 0x89, 0x7a, 0x1a, 0xc6, 0xf2, 0xd6, 0x07, 0xaf, + 0x80, 0x31, 0x61, 0xd3, 0x2e, 0x51, 0x0f, 0xdd, 0x86, 0xa5, 0x20, 0x4d, 0x57, 0x73, 0xb4, 0xbc, + 0xf5, 0xfe, 0x2b, 0x9c, 0x33, 0x66, 0xec, 0xb1, 0x93, 0xf9, 0xb3, 0x01, 0xe5, 0x2e, 0xe3, 0x52, + 0xb1, 0x3a, 0x24, 0xa3, 0x94, 0x24, 0xb5, 0x44, 0x37, 0xa0, 0xea, 0x51, 0x4e, 0x5c, 0x4d, 0x5e, + 0x42, 0xd0, 0xf9, 0xe2, 0xe1, 0xca, 0xb5, 0xd1, 0xce, 0xac, 0xec, 0x89, 0x83, 0x79, 0x11, 0xaa, + 0xe3, 0x7d, 0xb4, 0x02, 0xd0, 0xb6, 0xba, 0x3b, 0x56, 0xa7, 0xbd, 0xdb, 0xb9, 0x5b, 0x9b, 0x43, + 0x27, 0xa1, 0xda, 0x1a, 0x8b, 0x86, 0xf9, 0xb7, 0x01, 0x9b, 0x33, 0x73, 0xdc, 0xa3, 0x42, 0xda, + 0xe4, 0x87, 0x98, 0x08, 0x79, 0x8c, 0xe2, 0x59, 0x83, 0x05, 0x9f, 0x06, 0x54, 0x6a, 0x66, 0x4e, + 0xda, 0x89, 0xa0, 0x76, 0x25, 0x1b, 0x92, 0x30, 0x2d, 0xa0, 0x44, 0x40, 0x97, 0x60, 0x51, 0x30, + 0x2e, 0x9d, 0xde, 0xa8, 0xbe, 0xa0, 0x79, 0x5c, 0x9b, 0x95, 0xaa, 0x5d, 0x51, 0x46, 0xdb, 0x23, + 0x05, 0xa6, 0x4f, 0x7d, 0x49, 0xb8, 0xa8, 0x57, 0x12, 0x30, 0xa9, 0x98, 0x55, 0xe7, 0xe2, 0xa4, + 0x3a, 0x7f, 0x2b, 0xc1, 0xd9, 0x5c, 0x76, 0xf9, 0x9c, 0x5e, 0xbf, 0x6e, 0x72, 0xac, 0x94, 0x5e, + 0xc6, 0xca, 0xfc, 0x6c, 0x56, 0xca, 0x33, 0x59, 0x59, 0x78, 0x09, 0x2b, 0x95, 0xa3, 0xb1, 0xb2, + 0x38, 0x93, 0x95, 0xa5, 0x09, 0x2b, 0x12, 0xce, 0xbd, 0xf4, 0xca, 0x51, 0x0b, 0x96, 0x94, 0x2c, + 0x29, 0x11, 0x75, 0x63, 0x73, 0xfe, 0xbf, 0xf7, 0xc4, 0xd8, 0x6d, 0x92, 0x50, 0x29, 0x97, 0x90, + 0xf9, 0x1d, 0x9c, 0x2a, 0x5c, 0x05, 0xfa, 0xfc, 0x50, 0xac, 0x8d, 0x57, 0xc4, 0xfa, 0xd7, 0x08, + 0xbf, 0x1a, 0x53, 0xd3, 0xef, 0x2e, 0x79, 0x83, 0x97, 0x7d, 0xbc, 0x21, 0x61, 0xfe, 0x65, 0x40, + 0x3d, 0xa7, 0x7d, 0x14, 0x79, 0x6a, 0x94, 0xfd, 0xcf, 0xa8, 0x5e, 0x7f, 0x74, 0x6d, 0x4c, 0x55, + 0x50, 0x96, 0x95, 0x88, 0x58, 0x28, 0x88, 0x79, 0x13, 0x6a, 0x0f, 0x7b, 0xaa, 0x0f, 0x72, 0x17, + 0x70, 0x41, 0x03, 0x35, 0x74, 0xac, 0x73, 0x85, 0xfc, 0x0a, 0x94, 0xfd, 0x61, 0xc0, 0x6a, 0x96, + 0x72, 0xbe, 0x61, 0xaf, 0xe4, 0x8e, 0x38, 0x42, 0xae, 0xe3, 0x9e, 0x2b, 0xcd, 0xec, 0xb9, 0xf9, + 0x7c, 0xcf, 0xe5, 0x9a, 0xa8, 0x3c, 0xdd, 0x44, 0x47, 0x9b, 0x51, 0xe6, 0x2d, 0x38, 0x6d, 0x05, + 0x98, 0xfa, 0x1d, 0xa6, 0x90, 0xb8, 0x58, 0x4f, 0xe2, 0x0b, 0x50, 0xe3, 0xc4, 0xa5, 0x11, 0x25, + 0xa1, 0x14, 0x0e, 0x51, 0x7a, 0x5d, 0xf5, 0x55, 0xfb, 0xd4, 0x64, 0x5f, 0xbb, 0x99, 0xdb, 0x70, + 0x66, 0x1f, 0x0f, 0x08, 0x6f, 0xc7, 0x72, 0x74, 0xdc, 0x33, 0x6e, 0xc1, 0xe9, 0xae, 0x8f, 0xdd, + 0xe1, 0x71, 0xfd, 0x7f, 0x2f, 0xc1, 0x89, 0x29, 0xdf, 0x5b, 0x50, 0x89, 0x9e, 0x62, 0x91, 0xf6, + 0xea, 0xca, 0xd6, 0x87, 0x85, 0x7b, 0xfc, 0x9a, 0xf1, 0x61, 0xdf, 0x67, 0xcf, 0xad, 0xf1, 0x93, + 0x67, 0x5f, 0x99, 0xdb, 0xa9, 0x17, 0xba, 0x0a, 0x0b, 0x49, 0xc0, 0xa4, 0x5e, 0xdf, 0x2b, 0x32, + 0x78, 0x88, 0xb1, 0x7b, 0x73, 0x76, 0xe2, 0x81, 0xee, 0x00, 0x44, 0x8a, 0x0f, 0xc7, 0x8b, 0xe5, + 0x28, 0x2d, 0xd9, 0x43, 0x35, 0x30, 0x93, 0xb1, 0x7b, 0x73, 0x76, 0x35, 0xca, 0x14, 0x0a, 0x82, + 0x50, 0x9c, 0xe8, 0xeb, 0x9d, 0x01, 0xe1, 0x10, 0x61, 0x0a, 0x82, 0xf6, 0xd8, 0xae, 0x40, 0x59, + 0xf5, 0xa8, 0x79, 0x05, 0x16, 0x1f, 0x71, 0x7f, 0xdb, 0x67, 0x3d, 0x35, 0x59, 0x63, 0xee, 0x67, + 0xdf, 0xed, 0x98, 0xfb, 0xaa, 0xac, 0x7a, 0x23, 0x49, 0x84, 0x4e, 0x71, 0xde, 0x4e, 0x84, 0x6b, + 0xa5, 0xba, 0x61, 0xfe, 0x04, 0x95, 0x3d, 0xdc, 0x23, 0xbe, 0x40, 0xd7, 0xa0, 0xf2, 0x0c, 0xfb, + 0xf1, 0x78, 0xe4, 0x99, 0x45, 0x10, 0x89, 0x5d, 0xe3, 0xb1, 0x36, 0xb2, 0x42, 0xc9, 0x47, 0x76, + 0xea, 0xb1, 0x7e, 0x15, 0x96, 0x73, 0xdb, 0x33, 0x1e, 0x0e, 0x6b, 0xb0, 0xa0, 0x4d, 0xb3, 0xc1, + 0xa8, 0x85, 0x6b, 0xa5, 0x2f, 0x0c, 0xf3, 0x17, 0x03, 0x96, 0x5b, 0x61, 0xc8, 0xa4, 0xce, 0x4b, + 0xa0, 0xdb, 0x05, 0x18, 0x1f, 0x15, 0x61, 0xe4, 0x8c, 0xdf, 0x34, 0x96, 0xeb, 0x50, 0xb6, 0xc2, + 0x67, 0x02, 0x5d, 0x2e, 0x60, 0x28, 0x4e, 0xbe, 0xfb, 0x64, 0xa4, 0x43, 0xec, 0x63, 0xca, 0xb3, + 0xb8, 0xe6, 0x8f, 0xb0, 0xd4, 0x8a, 0xe5, 0x53, 0x9b, 0xf9, 0x04, 0x7d, 0x02, 0x08, 0x0b, 0x11, + 0x07, 0xb8, 0xe7, 0x13, 0x87, 0xe2, 0xc0, 0xe1, 0xcc, 0x27, 0x29, 0x86, 0xda, 0x58, 0xb3, 0x8b, + 0x03, 0x6d, 0x7d, 0x03, 0xd6, 0x87, 0x71, 0x8f, 0xf0, 0x90, 0x48, 0x22, 0x1c, 0x41, 0xf8, 0x33, + 0xea, 0x12, 0x07, 0xbb, 0x2e, 0x8b, 0xc3, 0xec, 0x1b, 0x5e, 0x9f, 0x58, 0x74, 0x13, 0x83, 0x56, + 0xa2, 0xd7, 0xb7, 0x78, 0x1f, 0x56, 0x6d, 0xfc, 0xfc, 0x61, 0x2c, 0xa3, 0x58, 0xb6, 0xb1, 0xc4, + 0x3b, 0x2c, 0xec, 0xd3, 0x01, 0xfa, 0x14, 0xce, 0x32, 0xbd, 0xe7, 0xf8, 0x2c, 0xa9, 0x1b, 0x27, + 0xe2, 0xa4, 0x4f, 0x5f, 0xa4, 0x50, 0xd6, 0x12, 0xed, 0x5e, 0xaa, 0xdc, 0xd7, 0x3a, 0xf3, 0x2b, + 0xa8, 0xde, 0x51, 0xe9, 0x3e, 0xb2, 0xf7, 0x84, 0x7a, 0x32, 0xd0, 0x30, 0x8a, 0xa5, 0x48, 0x5d, + 0x52, 0x49, 0x8d, 0xa4, 0xc4, 0x59, 0x64, 0x8f, 0x8c, 0x54, 0x54, 0xef, 0x73, 0x8f, 0xb8, 0xc3, + 0xec, 0x7d, 0xae, 0xd6, 0x17, 0xbf, 0x85, 0x5a, 0xf1, 0xdd, 0x8c, 0xde, 0x82, 0xd5, 0x4e, 0xeb, + 0x81, 0xd5, 0x76, 0xac, 0xce, 0xc1, 0xee, 0xc1, 0x13, 0xa7, 0xb5, 0x73, 0xb0, 0xfb, 0xd8, 0xaa, + 0xcd, 0xa1, 0x73, 0x70, 0x66, 0x5a, 0x61, 0xef, 0xdc, 0xdb, 0x7d, 0x6c, 0xb5, 0x6b, 0x06, 0x5a, + 0x83, 0x5a, 0xf7, 0x49, 0xf7, 0xc0, 0x7a, 0xe0, 0xdc, 0xb5, 0x3a, 0x96, 0xdd, 0x3a, 0xb0, 0xda, + 0xb5, 0xd2, 0xf6, 0xcd, 0x6f, 0xae, 0x0f, 0xa8, 0x7c, 0x1a, 0xf7, 0x1a, 0x2e, 0x0b, 0x9a, 0xfa, + 0xaa, 0x18, 0x1f, 0x24, 0x8b, 0xe6, 0xf8, 0xbf, 0x65, 0x40, 0xc2, 0x66, 0xd4, 0xbb, 0x34, 0x60, + 0xcd, 0xe9, 0xdf, 0xa6, 0x5e, 0x45, 0xff, 0xa2, 0x5c, 0xfe, 0x27, 0x00, 0x00, 0xff, 0xff, 0xa8, + 0x9c, 0xe3, 0x6a, 0x4f, 0x0d, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go index 6ee9c5dc15..a77fcce08b 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/execution.pb.go @@ -115,10 +115,12 @@ type ExecutionCreateRequest struct { // The inputs required to start the execution. All required inputs must be // included in this map. If not required and not provided, defaults apply. // +optional - Inputs *core.LiteralMap `protobuf:"bytes,5,opt,name=inputs,proto3" json:"inputs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Inputs *core.LiteralMap `protobuf:"bytes,5,opt,name=inputs,proto3" json:"inputs,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,6,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ExecutionCreateRequest) Reset() { *m = ExecutionCreateRequest{} } @@ -181,6 +183,13 @@ func (m *ExecutionCreateRequest) GetInputs() *core.LiteralMap { return nil } +func (m *ExecutionCreateRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Request to relaunch the referenced execution. type ExecutionRelaunchRequest struct { // Identifier of the workflow execution to relaunch. @@ -1739,122 +1748,123 @@ func init() { func init() { proto.RegisterFile("flyteidl/admin/execution.proto", fileDescriptor_4e2785d91b3809ec) } var fileDescriptor_4e2785d91b3809ec = []byte{ - // 1862 bytes of a gzipped FileDescriptorProto + // 1874 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x58, 0x5b, 0x73, 0xdb, 0xc6, - 0x15, 0x16, 0x29, 0x52, 0xa2, 0x0e, 0x25, 0x8a, 0x5e, 0x29, 0x0a, 0x2c, 0x3b, 0xb6, 0x82, 0x4c, - 0x6b, 0x4f, 0x32, 0x25, 0x27, 0x4a, 0x3d, 0x9d, 0x38, 0x71, 0x26, 0x14, 0x45, 0x87, 0x4c, 0x75, - 0x71, 0x57, 0x17, 0xe7, 0x32, 0x19, 0x74, 0x09, 0x2c, 0x49, 0x34, 0xb8, 0x79, 0x77, 0x21, 0xd9, - 0xff, 0xa0, 0xd3, 0xa7, 0x3e, 0xb6, 0xff, 0xa0, 0x4f, 0x7d, 0xec, 0x2f, 0xe8, 0x8f, 0xea, 0x63, - 0x07, 0x8b, 0x05, 0x08, 0x80, 0x94, 0x25, 0x8f, 0xf5, 0x86, 0x3d, 0xe7, 0x3b, 0x67, 0xcf, 0x9e, - 0x3d, 0xb7, 0x05, 0x3c, 0x18, 0x39, 0x6f, 0x04, 0xb5, 0x2d, 0xa7, 0x4d, 0x2c, 0xd7, 0xf6, 0xda, - 0xf4, 0x35, 0x35, 0x43, 0x61, 0xfb, 0x5e, 0x2b, 0x60, 0xbe, 0xf0, 0x51, 0x23, 0xe1, 0xb7, 0x24, - 0x7f, 0xfb, 0x51, 0x01, 0x6f, 0x3a, 0x21, 0x17, 0x94, 0x19, 0x84, 0x73, 0x7b, 0xec, 0xb9, 0xd4, - 0x13, 0xb1, 0xe0, 0xf6, 0xbd, 0x22, 0xd0, 0x77, 0xdd, 0x44, 0xeb, 0xf6, 0xfd, 0x94, 0x69, 0xfa, - 0x8c, 0xb6, 0x1d, 0x5b, 0x50, 0x46, 0x1c, 0xae, 0xb8, 0x1f, 0xe5, 0xb9, 0x05, 0x93, 0xb6, 0x1f, - 0xe6, 0xd9, 0x84, 0x09, 0x7b, 0x44, 0x4c, 0x61, 0xd8, 0x96, 0x02, 0x3c, 0xc8, 0x03, 0x6c, 0x8b, - 0x7a, 0xc2, 0x1e, 0xd9, 0x94, 0xcd, 0x98, 0x26, 0xf9, 0x2e, 0x15, 0xcc, 0x36, 0xf9, 0x7c, 0xd3, - 0x38, 0x35, 0x43, 0x66, 0x8b, 0x37, 0x89, 0xea, 0xb1, 0xef, 0x8f, 0x1d, 0xda, 0x96, 0xab, 0x61, - 0x38, 0x6a, 0x5b, 0x21, 0x23, 0x59, 0xdb, 0x8a, 0x7c, 0x61, 0xbb, 0x94, 0x0b, 0xe2, 0x06, 0x57, - 0x29, 0xb8, 0x64, 0x24, 0x08, 0x28, 0x53, 0xdb, 0xeb, 0xff, 0x2d, 0xc1, 0x56, 0x2f, 0x39, 0x70, - 0x97, 0x51, 0x22, 0x28, 0xa6, 0xaf, 0x42, 0xca, 0x05, 0xd2, 0x60, 0x39, 0x60, 0xfe, 0x5f, 0xa8, - 0x29, 0xb4, 0xd2, 0x4e, 0xe9, 0xf1, 0x0a, 0x4e, 0x96, 0x68, 0x0b, 0x96, 0x2c, 0xdf, 0x25, 0xb6, - 0xa7, 0x95, 0x25, 0x43, 0xad, 0x10, 0x82, 0x8a, 0x47, 0x5c, 0xaa, 0x2d, 0x4a, 0xaa, 0xfc, 0x46, - 0x9f, 0x43, 0x85, 0x07, 0xd4, 0xd4, 0x2a, 0x3b, 0xa5, 0xc7, 0xf5, 0xdd, 0x8f, 0x5a, 0xf9, 0xfb, - 0x6d, 0xa5, 0x7b, 0x9f, 0x04, 0xd4, 0xc4, 0x12, 0x8a, 0x3e, 0x87, 0x25, 0xdb, 0x0b, 0x42, 0xc1, - 0xb5, 0xaa, 0x14, 0xba, 0x3b, 0x15, 0x8a, 0x7c, 0xd4, 0x3a, 0x88, 0xaf, 0xef, 0x90, 0x04, 0x58, - 0x01, 0xf5, 0x7f, 0x96, 0x40, 0x4b, 0x55, 0x61, 0xea, 0x90, 0xd0, 0x33, 0x27, 0xc9, 0x41, 0x9e, - 0x42, 0xd9, 0xb6, 0xe4, 0x19, 0xea, 0xbb, 0x9f, 0x16, 0x74, 0xbd, 0xf4, 0xd9, 0xaf, 0x23, 0xc7, - 0xbf, 0x4c, 0x85, 0x07, 0xe9, 0xed, 0xe1, 0xb2, 0x6d, 0xcd, 0x3d, 0xd2, 0x23, 0x58, 0xf7, 0x2f, - 0x28, 0xbb, 0x64, 0xb6, 0xa0, 0x86, 0x49, 0xcc, 0x09, 0x95, 0xa7, 0xab, 0xe1, 0x46, 0x4a, 0xee, - 0x46, 0xd4, 0xef, 0x2b, 0xb5, 0x72, 0x73, 0x51, 0xff, 0x57, 0x09, 0x3e, 0xcc, 0xd8, 0x66, 0x46, - 0xa0, 0xdb, 0x34, 0xad, 0x9c, 0x31, 0xed, 0x19, 0xd4, 0x5c, 0x2a, 0x88, 0x45, 0x04, 0x91, 0x26, - 0xd7, 0x77, 0x3f, 0xbe, 0xd2, 0xe3, 0x87, 0x0a, 0x88, 0x53, 0x11, 0xfd, 0x2c, 0x63, 0x69, 0x12, - 0x0c, 0x3c, 0xf0, 0x3d, 0x4e, 0xdf, 0xc7, 0x52, 0xfd, 0x47, 0xb8, 0x37, 0x03, 0xf9, 0x8e, 0x8a, - 0x5b, 0x70, 0x82, 0xfe, 0x9f, 0x12, 0xac, 0xa4, 0xbc, 0xf7, 0x72, 0x67, 0x12, 0xa8, 0xe5, 0x9b, - 0x07, 0xea, 0x53, 0x58, 0x36, 0x1d, 0x9f, 0x87, 0x8c, 0x2a, 0x67, 0xef, 0x5c, 0x29, 0xd5, 0x8d, - 0x71, 0x38, 0x11, 0xd0, 0xff, 0x0c, 0x6b, 0x29, 0xf3, 0xc0, 0xe6, 0x02, 0x7d, 0x09, 0x90, 0x56, - 0x1e, 0xae, 0x95, 0x76, 0x16, 0xf3, 0x91, 0x5f, 0xd0, 0x87, 0x33, 0x60, 0xb4, 0x09, 0x55, 0xe1, - 0xff, 0x4a, 0x93, 0x74, 0x8c, 0x17, 0x3a, 0x85, 0xc6, 0x34, 0x53, 0xf6, 0x1c, 0x7f, 0x88, 0xfe, - 0x00, 0x4b, 0x17, 0xc4, 0x09, 0x29, 0x57, 0x2e, 0xba, 0x3a, 0xb1, 0xf6, 0xca, 0x5a, 0xa9, 0xbf, - 0x80, 0x15, 0x1c, 0x21, 0x58, 0x0c, 0x99, 0x1d, 0xab, 0xef, 0x2f, 0xe0, 0x68, 0xb1, 0xb7, 0x04, - 0x15, 0x19, 0x33, 0x5d, 0x58, 0xeb, 0x0c, 0x7d, 0x26, 0x92, 0x70, 0x8a, 0xac, 0x31, 0x49, 0xc8, - 0xa9, 0xaa, 0x1a, 0xf1, 0x02, 0xdd, 0x87, 0x95, 0x80, 0xd9, 0x9e, 0x69, 0x07, 0xc4, 0x51, 0x76, - 0x4e, 0x09, 0xfa, 0x3f, 0x96, 0xa1, 0x59, 0xf4, 0x15, 0xfa, 0x06, 0x96, 0xfd, 0x50, 0xc8, 0x42, - 0x10, 0xdb, 0xfb, 0xa0, 0xe8, 0x8e, 0xfc, 0xf9, 0x94, 0xd1, 0x89, 0x10, 0x7a, 0x02, 0x55, 0xca, - 0x98, 0xcf, 0x66, 0xaf, 0x54, 0x9e, 0x36, 0xdd, 0xaf, 0x17, 0x81, 0xfa, 0x0b, 0x38, 0x46, 0xa3, - 0xdf, 0x40, 0x9d, 0x44, 0x07, 0x32, 0xe2, 0x53, 0x40, 0x64, 0xab, 0x52, 0x0d, 0x92, 0xd1, 0x95, - 0x07, 0x7a, 0x0e, 0x8d, 0x18, 0x96, 0x26, 0xdc, 0xea, 0xfc, 0xc8, 0xc9, 0x79, 0xa7, 0xbf, 0x80, - 0xd7, 0x48, 0xce, 0x5d, 0xdf, 0x42, 0x3d, 0x36, 0xd8, 0x90, 0x4a, 0xd6, 0x6e, 0x76, 0x33, 0x10, - 0xcb, 0xec, 0x47, 0x1a, 0x9e, 0xc3, 0xba, 0xe9, 0xbb, 0x41, 0x28, 0xa8, 0x65, 0xa8, 0xc2, 0xb9, - 0x78, 0x03, 0x2d, 0xb8, 0x91, 0x48, 0x0d, 0xa4, 0x10, 0xfa, 0x1a, 0xaa, 0xc1, 0x84, 0xf0, 0xb8, - 0x9a, 0x35, 0x76, 0x7f, 0x7b, 0x5d, 0x02, 0xb5, 0x5e, 0x44, 0x68, 0x1c, 0x0b, 0x45, 0xf1, 0xcb, - 0x05, 0x61, 0x91, 0x11, 0x44, 0xa8, 0xca, 0xbd, 0xdd, 0x8a, 0xdb, 0x4f, 0x2b, 0x69, 0x3f, 0xad, - 0xd3, 0xa4, 0x3f, 0xe1, 0x15, 0x85, 0xee, 0x08, 0xf4, 0x04, 0x6a, 0x49, 0x5f, 0xd3, 0x96, 0x94, - 0xe5, 0x45, 0xc1, 0x7d, 0x05, 0xc0, 0x29, 0x34, 0xda, 0xd1, 0x94, 0x45, 0x4a, 0xee, 0xb8, 0x7c, - 0xfd, 0x8e, 0x0a, 0xdd, 0x91, 0xc9, 0x16, 0x06, 0x56, 0x22, 0x5a, 0xbb, 0x5e, 0x54, 0xa1, 0x3b, - 0x02, 0xed, 0xc1, 0x9a, 0xe7, 0x47, 0x75, 0xc3, 0x24, 0x71, 0xaa, 0xae, 0xc8, 0x54, 0xbd, 0x5f, - 0xbc, 0xf6, 0xa3, 0x0c, 0x08, 0xe7, 0x45, 0xd0, 0x53, 0xa8, 0x5f, 0x2a, 0x6f, 0x1a, 0xb6, 0xa5, - 0xd5, 0xe7, 0xde, 0x56, 0xa6, 0x3e, 0x41, 0x82, 0x1e, 0x58, 0xe8, 0x17, 0xd8, 0xe4, 0x82, 0x44, - 0x9d, 0x67, 0x42, 0xbc, 0x31, 0x35, 0x2c, 0x2a, 0x88, 0xed, 0x70, 0xad, 0x21, 0x95, 0x7c, 0x76, - 0x75, 0xdd, 0x8a, 0x84, 0xba, 0x52, 0x66, 0x3f, 0x16, 0xc1, 0x88, 0xcf, 0xd0, 0xf6, 0xd6, 0x61, - 0x4d, 0x85, 0x23, 0xa3, 0x3c, 0x74, 0x84, 0xfe, 0x33, 0x34, 0x4e, 0xde, 0x70, 0x41, 0xdd, 0x34, - 0x62, 0x3f, 0x83, 0x3b, 0x69, 0xf1, 0x31, 0xd4, 0x40, 0xa6, 0x92, 0xbd, 0x49, 0xa7, 0x49, 0x2c, - 0xe9, 0x51, 0xde, 0x47, 0x9d, 0x89, 0x07, 0xc4, 0x4c, 0x5a, 0xd5, 0x94, 0xa0, 0xff, 0xaf, 0x02, - 0x77, 0x66, 0x1a, 0x12, 0xea, 0x42, 0xc5, 0xf5, 0xad, 0xb8, 0x80, 0x34, 0x76, 0xdb, 0xd7, 0x76, - 0xb0, 0x0c, 0xc5, 0xb7, 0x28, 0x96, 0xc2, 0x6f, 0x2f, 0x38, 0xd1, 0x70, 0xe3, 0x51, 0x2e, 0x6c, - 0x6f, 0x2c, 0x73, 0x65, 0x0d, 0x27, 0x4b, 0xf4, 0x0c, 0x56, 0xb9, 0x39, 0xa1, 0x56, 0xe8, 0xc4, - 0xc1, 0x51, 0xb9, 0x36, 0x38, 0xea, 0x29, 0xbe, 0x23, 0xd0, 0x4f, 0xf0, 0x41, 0x40, 0x18, 0xf5, - 0x84, 0xe1, 0xf9, 0x16, 0x35, 0x52, 0x7f, 0xa8, 0x8c, 0x28, 0x26, 0xd5, 0x91, 0x6f, 0xd1, 0x79, - 0x1d, 0x69, 0x23, 0x56, 0x92, 0x63, 0xa3, 0x9f, 0x61, 0x83, 0xd1, 0x11, 0x65, 0xd4, 0x33, 0xb3, - 0x9a, 0x9b, 0xef, 0xdc, 0xef, 0x50, 0xaa, 0x66, 0xaa, 0xfc, 0x3b, 0x58, 0xe7, 0xf2, 0x9e, 0xa7, - 0x05, 0xed, 0xce, 0xfc, 0xaa, 0x9b, 0x0f, 0x07, 0xdc, 0xe0, 0xf9, 0xf0, 0xf8, 0x1a, 0x56, 0x33, - 0x33, 0x32, 0xd7, 0x50, 0xb1, 0x95, 0x49, 0xf3, 0x3a, 0x0a, 0x32, 0xd8, 0xc7, 0xf5, 0x04, 0x3e, - 0xb0, 0xb8, 0x3e, 0xce, 0xf4, 0xc5, 0xe8, 0x36, 0x11, 0xc0, 0xd2, 0x61, 0xe7, 0xe8, 0xac, 0x73, - 0xd0, 0x5c, 0x40, 0x6b, 0xb0, 0x72, 0xd2, 0xed, 0xf7, 0xf6, 0xcf, 0x0e, 0x7a, 0xfb, 0xcd, 0x52, - 0xc4, 0x3a, 0xf9, 0xf1, 0xe4, 0xb4, 0x77, 0xd8, 0x2c, 0xa3, 0x55, 0xa8, 0xe1, 0xde, 0x41, 0xe7, - 0xec, 0xa8, 0xdb, 0x6f, 0x2e, 0x22, 0x04, 0x8d, 0x6e, 0x7f, 0x70, 0xb0, 0x6f, 0xbc, 0x3c, 0xc6, - 0x7f, 0x7c, 0x7e, 0x70, 0xfc, 0xb2, 0x59, 0x89, 0x84, 0x71, 0xaf, 0x7b, 0x7c, 0xde, 0xc3, 0xbd, - 0xfd, 0x66, 0x55, 0x3f, 0x87, 0x66, 0x36, 0x45, 0x65, 0x0f, 0x9e, 0xc9, 0xed, 0xd2, 0x3b, 0xe7, - 0xb6, 0xfe, 0xf7, 0x5a, 0xe6, 0x04, 0x27, 0xf1, 0x98, 0x50, 0x8f, 0x07, 0x52, 0x23, 0x70, 0x88, - 0x77, 0x45, 0xef, 0xcd, 0x66, 0x7b, 0x8c, 0x7e, 0xe1, 0x10, 0x0f, 0x3d, 0x49, 0x67, 0xe1, 0xf2, - 0x4d, 0x4a, 0xba, 0x02, 0xbf, 0xe7, 0x1c, 0x88, 0xfa, 0x45, 0x3f, 0x54, 0xe7, 0x8f, 0x37, 0x45, - 0x07, 0x46, 0xdd, 0x2d, 0x5f, 0xe9, 0x3e, 0x86, 0xba, 0x65, 0x73, 0x32, 0x74, 0xa8, 0x41, 0x1c, - 0x47, 0x56, 0xf7, 0x5a, 0xd4, 0xbe, 0x14, 0xb1, 0xe3, 0x38, 0xa8, 0x05, 0x4b, 0x0e, 0x19, 0x52, - 0x87, 0xab, 0x12, 0xbe, 0x35, 0xd3, 0xe5, 0x25, 0x17, 0x2b, 0x14, 0x7a, 0x06, 0x75, 0xe2, 0x79, - 0xbe, 0x50, 0xa6, 0xc5, 0xc5, 0xfb, 0xde, 0x4c, 0xd7, 0x9d, 0x42, 0x70, 0x16, 0x8f, 0x06, 0xd0, - 0x4c, 0x1e, 0x59, 0x86, 0xe9, 0x7b, 0x82, 0xbe, 0x16, 0xb2, 0xc7, 0xe7, 0x02, 0x5d, 0xfa, 0xf6, - 0x44, 0xc1, 0xba, 0x31, 0x0a, 0xaf, 0xf3, 0x3c, 0x01, 0x7d, 0x09, 0x2b, 0x24, 0x14, 0x13, 0x83, - 0xf9, 0x0e, 0x55, 0x59, 0xa8, 0xcd, 0xd8, 0x11, 0x8a, 0x09, 0xf6, 0x1d, 0x2a, 0xaf, 0xa7, 0x46, - 0xd4, 0x0a, 0x1d, 0x02, 0x7a, 0x15, 0x12, 0x27, 0x32, 0xc2, 0x1f, 0x19, 0x9c, 0xb2, 0x0b, 0xdb, - 0xa4, 0x2a, 0xe1, 0x1e, 0x16, 0xec, 0xf8, 0x53, 0x0c, 0x3c, 0x1e, 0x9d, 0xc4, 0x30, 0xdc, 0x7c, - 0x55, 0xa0, 0x44, 0x4f, 0x12, 0x97, 0xbc, 0x36, 0x02, 0xc2, 0x88, 0xe3, 0x50, 0xc7, 0xe6, 0xae, - 0x86, 0x76, 0x4a, 0x8f, 0xab, 0xb8, 0xe1, 0x92, 0xd7, 0x2f, 0xa6, 0x54, 0xf4, 0x03, 0x6c, 0x31, - 0x72, 0x69, 0x64, 0x26, 0x8e, 0xc8, 0x09, 0x23, 0x7b, 0xac, 0x6d, 0xc8, 0xbd, 0x3f, 0x29, 0xda, - 0x8f, 0xc9, 0xe5, 0x71, 0x3a, 0x6a, 0x74, 0x25, 0x14, 0x6f, 0xb0, 0x59, 0x22, 0x7a, 0x01, 0x68, - 0xf6, 0x71, 0xae, 0x6d, 0xce, 0x0f, 0x3e, 0xd5, 0x1d, 0x3a, 0x29, 0x10, 0xdf, 0x31, 0x8b, 0x24, - 0xf4, 0x2d, 0xac, 0xd9, 0x9e, 0xa0, 0x8c, 0x85, 0x81, 0xb0, 0x87, 0x0e, 0xd5, 0x3e, 0xb8, 0xa2, - 0x14, 0xef, 0xf9, 0xbe, 0x73, 0x1e, 0x4d, 0xaa, 0x38, 0x2f, 0x30, 0xef, 0xa5, 0xb6, 0x35, 0xef, - 0xa5, 0x86, 0x1e, 0x43, 0x85, 0x7a, 0x17, 0x5c, 0xfb, 0x50, 0xee, 0xb0, 0x39, 0x93, 0x2b, 0xde, - 0x05, 0xc7, 0x12, 0x11, 0xbd, 0xba, 0x04, 0x19, 0x73, 0x4d, 0xdb, 0x59, 0x8c, 0x5e, 0x5d, 0xd1, - 0xf7, 0x9e, 0x06, 0x5b, 0xd9, 0xa8, 0x37, 0x22, 0xe5, 0xcc, 0xb6, 0x28, 0xff, 0xbe, 0x52, 0xab, - 0x34, 0xab, 0xba, 0x0b, 0x77, 0xd3, 0x6c, 0x3b, 0xa5, 0xcc, 0xb5, 0xbd, 0xcc, 0x33, 0xfb, 0x7d, - 0xde, 0x2c, 0xe9, 0xa8, 0x5d, 0xce, 0x8c, 0xda, 0xfa, 0x7d, 0xd8, 0x9e, 0xb7, 0x5d, 0xfc, 0x90, - 0xd3, 0x7f, 0x81, 0x87, 0xf3, 0x1e, 0x63, 0xd1, 0x4d, 0xde, 0xc6, 0x83, 0xec, 0xaf, 0x65, 0xd8, - 0xb9, 0x5a, 0xbf, 0x7a, 0x4c, 0x3e, 0x29, 0x4e, 0xf6, 0x1f, 0x16, 0x3d, 0x7e, 0xc6, 0x9c, 0x64, - 0xa4, 0x9f, 0x0e, 0xf4, 0x5f, 0x14, 0x8a, 0xe1, 0x5b, 0xa5, 0x92, 0x52, 0xf8, 0x14, 0xea, 0xa3, - 0xd0, 0x71, 0x6e, 0x3a, 0x19, 0x63, 0x88, 0xd0, 0xe9, 0x44, 0xbc, 0x2a, 0x65, 0x13, 0x63, 0x2b, - 0xd7, 0x09, 0xcb, 0xad, 0xe2, 0xd4, 0xe0, 0xfa, 0xdf, 0xb2, 0xff, 0x56, 0xce, 0xe4, 0x00, 0x79, - 0x1b, 0x97, 0xfe, 0x7b, 0xa8, 0xca, 0xb9, 0x4d, 0x3a, 0xa1, 0x31, 0xdb, 0x9e, 0xf3, 0x13, 0x1f, - 0x8e, 0xc1, 0xfa, 0xbf, 0x4b, 0x70, 0xef, 0x2d, 0xb3, 0xe0, 0x54, 0x6b, 0xe9, 0x1d, 0xb4, 0xa2, - 0xaf, 0xa0, 0xee, 0x9b, 0x66, 0xc8, 0x58, 0x3c, 0x2b, 0x95, 0xaf, 0x9d, 0x95, 0x20, 0x81, 0x77, - 0x44, 0x7e, 0x42, 0x5b, 0x2c, 0x3e, 0x09, 0xef, 0x66, 0xfe, 0x45, 0x24, 0xce, 0x53, 0x21, 0x7c, - 0x01, 0xfa, 0xbc, 0x10, 0x3b, 0x8c, 0x7f, 0xac, 0xdd, 0x52, 0x62, 0x59, 0x34, 0x10, 0x13, 0x79, - 0xa2, 0x2a, 0x8e, 0x17, 0xfa, 0x11, 0x7c, 0xf2, 0xd6, 0x7d, 0x55, 0x74, 0x3f, 0x82, 0x0a, 0x0f, - 0xd2, 0x46, 0xbf, 0x51, 0xec, 0x2a, 0x01, 0xf1, 0xb0, 0x04, 0x7c, 0xfa, 0x0d, 0x34, 0xf2, 0x6e, - 0x45, 0x9b, 0xd0, 0xec, 0xfd, 0xd0, 0xeb, 0x9e, 0x9d, 0x0e, 0x8e, 0x8f, 0x8c, 0x4e, 0xf7, 0x74, - 0x70, 0xde, 0x6b, 0x2e, 0xa0, 0x2d, 0x40, 0x19, 0x2a, 0xee, 0xf6, 0x07, 0xe7, 0xd1, 0xfc, 0xb3, - 0xf7, 0xec, 0xa7, 0xaf, 0xc6, 0xb6, 0x98, 0x84, 0xc3, 0x96, 0xe9, 0xbb, 0x6d, 0xb9, 0x8d, 0xcf, - 0xc6, 0xf1, 0x47, 0x3b, 0xfd, 0xaf, 0x38, 0xa6, 0x5e, 0x3b, 0x18, 0xfe, 0x6e, 0xec, 0xb7, 0xf3, - 0xbf, 0x48, 0x87, 0x4b, 0xf2, 0x7e, 0xbe, 0xf8, 0x7f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xae, 0xc9, - 0xe4, 0xb3, 0x94, 0x15, 0x00, 0x00, + 0x15, 0x16, 0x29, 0x52, 0xa6, 0x0e, 0x2d, 0x8a, 0x5e, 0x29, 0x0a, 0x2c, 0x3b, 0xb6, 0x82, 0x4c, + 0x6b, 0x4f, 0x32, 0x25, 0x27, 0x4a, 0x3d, 0x9d, 0x38, 0x71, 0x26, 0x14, 0x45, 0x47, 0x4c, 0x75, + 0x71, 0x57, 0x17, 0xe7, 0x32, 0x19, 0x74, 0x05, 0x2c, 0x29, 0x34, 0xb8, 0x79, 0x77, 0x21, 0xd9, + 0xff, 0xa0, 0xd3, 0xa7, 0x3e, 0xb6, 0xff, 0xa0, 0x4f, 0x7d, 0xec, 0x4f, 0xe9, 0x6f, 0xe9, 0x63, + 0x67, 0x17, 0x0b, 0x10, 0x00, 0x29, 0x4b, 0x1e, 0xeb, 0x0d, 0x7b, 0xce, 0x77, 0xce, 0x9e, 0x3d, + 0x7b, 0x6e, 0x0b, 0x78, 0x30, 0xf2, 0xde, 0x08, 0xea, 0x3a, 0x5e, 0x97, 0x38, 0xbe, 0x1b, 0x74, + 0xe9, 0x6b, 0x6a, 0xc7, 0xc2, 0x0d, 0x83, 0x4e, 0xc4, 0x42, 0x11, 0xa2, 0x56, 0xca, 0xef, 0x28, + 0xfe, 0xfa, 0xa3, 0x12, 0xde, 0xf6, 0x62, 0x2e, 0x28, 0xb3, 0x08, 0xe7, 0xee, 0x38, 0xf0, 0x69, + 0x20, 0x12, 0xc1, 0xf5, 0x7b, 0x65, 0x60, 0xe8, 0xfb, 0xa9, 0xd6, 0xf5, 0xfb, 0x19, 0xd3, 0x0e, + 0x19, 0xed, 0x7a, 0xae, 0xa0, 0x8c, 0x78, 0x5c, 0x73, 0x3f, 0x2a, 0x72, 0x4b, 0x26, 0xad, 0x3f, + 0x2c, 0xb2, 0x09, 0x13, 0xee, 0x88, 0xd8, 0xc2, 0x72, 0x1d, 0x0d, 0x78, 0x50, 0x04, 0xb8, 0x0e, + 0x0d, 0x84, 0x3b, 0x72, 0x29, 0x9b, 0x32, 0x4d, 0xf1, 0x7d, 0x2a, 0x98, 0x6b, 0xf3, 0xd9, 0xa6, + 0x71, 0x6a, 0xc7, 0xcc, 0x15, 0x6f, 0x52, 0xd5, 0xe3, 0x30, 0x1c, 0x7b, 0xb4, 0xab, 0x56, 0xa7, + 0xf1, 0xa8, 0xeb, 0xc4, 0x8c, 0xe4, 0x6d, 0x2b, 0xf3, 0x85, 0xeb, 0x53, 0x2e, 0x88, 0x1f, 0x5d, + 0xa6, 0xe0, 0x82, 0x91, 0x28, 0xa2, 0x4c, 0x6f, 0x6f, 0xfe, 0xb7, 0x02, 0x6b, 0x83, 0xf4, 0xc0, + 0x7d, 0x46, 0x89, 0xa0, 0x98, 0xbe, 0x8a, 0x29, 0x17, 0xc8, 0x80, 0x5b, 0x11, 0x0b, 0xff, 0x42, + 0x6d, 0x61, 0x54, 0x36, 0x2a, 0x8f, 0x17, 0x71, 0xba, 0x44, 0x6b, 0xb0, 0xe0, 0x84, 0x3e, 0x71, + 0x03, 0xa3, 0xaa, 0x18, 0x7a, 0x85, 0x10, 0xd4, 0x02, 0xe2, 0x53, 0x63, 0x5e, 0x51, 0xd5, 0x37, + 0xfa, 0x1c, 0x6a, 0x3c, 0xa2, 0xb6, 0x51, 0xdb, 0xa8, 0x3c, 0x6e, 0x6e, 0x7e, 0xd4, 0x29, 0xde, + 0x6f, 0x27, 0xdb, 0xfb, 0x30, 0xa2, 0x36, 0x56, 0x50, 0xf4, 0x39, 0x2c, 0xb8, 0x41, 0x14, 0x0b, + 0x6e, 0xd4, 0x95, 0xd0, 0xdd, 0x89, 0x90, 0xf4, 0x51, 0x67, 0x37, 0xb9, 0xbe, 0x3d, 0x12, 0x61, + 0x0d, 0x44, 0x6d, 0x98, 0x0f, 0xd9, 0xd8, 0x58, 0x50, 0x1b, 0xcb, 0x4f, 0xf3, 0x9f, 0x15, 0x30, + 0x32, 0xe5, 0x98, 0x7a, 0x24, 0x0e, 0xec, 0xb3, 0xf4, 0x68, 0x4f, 0xa1, 0xea, 0x3a, 0xea, 0x54, + 0xcd, 0xcd, 0x4f, 0x4b, 0xda, 0x5f, 0x86, 0xec, 0xd7, 0x91, 0x17, 0x5e, 0x64, 0xc2, 0xc3, 0xec, + 0x3e, 0x71, 0xd5, 0x75, 0x66, 0x1e, 0xf2, 0x11, 0x2c, 0x87, 0xe7, 0x94, 0x5d, 0x30, 0x57, 0x50, + 0xcb, 0x26, 0xf6, 0x19, 0x55, 0xe7, 0x6d, 0xe0, 0x56, 0x46, 0xee, 0x4b, 0xea, 0xf7, 0xb5, 0x46, + 0xb5, 0x3d, 0x6f, 0xfe, 0xab, 0x02, 0x1f, 0xe6, 0x6c, 0xb3, 0x25, 0xe8, 0x26, 0x4d, 0xab, 0xe6, + 0x4c, 0x7b, 0x06, 0x0d, 0x9f, 0x0a, 0xe2, 0x10, 0x41, 0x94, 0xc9, 0xcd, 0xcd, 0x8f, 0x2f, 0xbd, + 0x83, 0x3d, 0x0d, 0xc4, 0x99, 0x88, 0x79, 0x9c, 0xb3, 0x34, 0x0d, 0x0f, 0x1e, 0x85, 0x01, 0xa7, + 0xef, 0x63, 0xa9, 0xf9, 0x23, 0xdc, 0x9b, 0x82, 0x7c, 0x47, 0xc5, 0x0d, 0x38, 0xc1, 0xfc, 0x4f, + 0x05, 0x16, 0x33, 0xde, 0x7b, 0xb9, 0x33, 0x0d, 0xdd, 0xea, 0xf5, 0x43, 0xf7, 0x29, 0xdc, 0xb2, + 0xbd, 0x90, 0xc7, 0x8c, 0x6a, 0x67, 0x6f, 0x5c, 0x2a, 0xd5, 0x4f, 0x70, 0x38, 0x15, 0x30, 0xff, + 0x0c, 0x4b, 0x19, 0x73, 0xd7, 0xe5, 0x02, 0x7d, 0x09, 0x90, 0xd5, 0x22, 0x6e, 0x54, 0x36, 0xe6, + 0x8b, 0xb9, 0x50, 0xd2, 0x87, 0x73, 0x60, 0xb4, 0x0a, 0x75, 0x11, 0xfe, 0x4a, 0xd3, 0x04, 0x4d, + 0x16, 0x26, 0x85, 0xd6, 0x24, 0x77, 0xb6, 0xbc, 0xf0, 0x14, 0xfd, 0x01, 0x16, 0xce, 0x89, 0x17, + 0x53, 0xae, 0x5d, 0x74, 0x79, 0xaa, 0x6d, 0x55, 0x8d, 0xca, 0xce, 0x1c, 0xd6, 0x70, 0x84, 0x60, + 0x3e, 0x66, 0x6e, 0xa2, 0x7e, 0x67, 0x0e, 0xcb, 0xc5, 0xd6, 0x02, 0xd4, 0x54, 0xcc, 0xf4, 0x61, + 0xa9, 0x77, 0x1a, 0x32, 0x91, 0x86, 0x93, 0xb4, 0xc6, 0x26, 0x31, 0xa7, 0xba, 0x8e, 0x24, 0x0b, + 0x74, 0x1f, 0x16, 0x23, 0xe6, 0x06, 0xb6, 0x1b, 0x11, 0x4f, 0xdb, 0x39, 0x21, 0x98, 0xff, 0xb8, + 0x05, 0xed, 0xb2, 0xaf, 0xd0, 0x37, 0x70, 0x2b, 0x8c, 0x85, 0x2a, 0x0d, 0x89, 0xbd, 0x0f, 0xca, + 0xee, 0x28, 0x9e, 0x4f, 0x1b, 0x9d, 0x0a, 0xa1, 0x27, 0x50, 0xa7, 0x8c, 0x85, 0x6c, 0xfa, 0x4a, + 0xd5, 0x69, 0xb3, 0xfd, 0x06, 0x12, 0xb4, 0x33, 0x87, 0x13, 0x34, 0xfa, 0x0d, 0x34, 0x89, 0x3c, + 0x90, 0x95, 0x9c, 0x02, 0xa4, 0xad, 0x5a, 0x35, 0x28, 0x46, 0x5f, 0x1d, 0xe8, 0x39, 0xb4, 0x12, + 0x58, 0x96, 0x70, 0xb7, 0x67, 0x47, 0x4e, 0xc1, 0x3b, 0x3b, 0x73, 0x78, 0x89, 0x14, 0xdc, 0xf5, + 0x2d, 0x34, 0x13, 0x83, 0x2d, 0xa5, 0x64, 0xe9, 0x7a, 0x37, 0x03, 0x89, 0xcc, 0xb6, 0xd4, 0xf0, + 0x1c, 0x96, 0xed, 0xd0, 0x8f, 0x62, 0x41, 0x1d, 0x4b, 0x97, 0xd2, 0xf9, 0x6b, 0x68, 0xc1, 0xad, + 0x54, 0x6a, 0x98, 0x94, 0xd5, 0xaf, 0xa1, 0x1e, 0x9d, 0x11, 0x9e, 0x54, 0xb3, 0xd6, 0xe6, 0x6f, + 0xaf, 0x4a, 0xa0, 0xce, 0x0b, 0x89, 0xc6, 0x89, 0x90, 0x8c, 0x5f, 0x2e, 0x08, 0x93, 0x46, 0x10, + 0xa1, 0x6b, 0xf9, 0x7a, 0x27, 0x69, 0x48, 0x9d, 0xb4, 0x21, 0x75, 0x8e, 0xd2, 0x8e, 0x85, 0x17, + 0x35, 0xba, 0x27, 0xd0, 0x13, 0x68, 0xa4, 0x9d, 0x4e, 0x15, 0x75, 0x69, 0x79, 0x59, 0x70, 0x5b, + 0x03, 0x70, 0x06, 0x95, 0x3b, 0xda, 0xaa, 0x48, 0xa9, 0x1d, 0x6f, 0x5d, 0xbd, 0xa3, 0x46, 0xf7, + 0x54, 0xb2, 0xc5, 0x91, 0x93, 0x8a, 0x36, 0xae, 0x16, 0xd5, 0xe8, 0x9e, 0x40, 0x5b, 0xb0, 0x14, + 0x84, 0xb2, 0x6e, 0xd8, 0x24, 0x49, 0xd5, 0x45, 0x95, 0xaa, 0xf7, 0xcb, 0xd7, 0xbe, 0x9f, 0x03, + 0xe1, 0xa2, 0x08, 0x7a, 0x0a, 0xcd, 0x0b, 0xed, 0x4d, 0xcb, 0x75, 0x8c, 0xe6, 0xcc, 0xdb, 0xca, + 0xd5, 0x27, 0x48, 0xd1, 0x43, 0x07, 0xfd, 0x02, 0xab, 0x5c, 0x10, 0xd9, 0x79, 0xce, 0x48, 0x30, + 0xa6, 0x96, 0x43, 0x05, 0x71, 0x3d, 0x6e, 0xb4, 0x94, 0x92, 0xcf, 0x2e, 0xaf, 0x5b, 0x52, 0xa8, + 0xaf, 0x64, 0xb6, 0x13, 0x11, 0x8c, 0xf8, 0x14, 0x6d, 0x6b, 0x19, 0x96, 0x74, 0x38, 0x32, 0xca, + 0x63, 0x4f, 0x98, 0x3f, 0x43, 0xeb, 0xf0, 0x0d, 0x17, 0xd4, 0xcf, 0x22, 0xf6, 0x33, 0xb8, 0x93, + 0x15, 0x1f, 0x4b, 0x8f, 0x68, 0x3a, 0xd9, 0xdb, 0x74, 0x92, 0xc4, 0x8a, 0x2e, 0xf3, 0x5e, 0x76, + 0x26, 0x1e, 0x11, 0x3b, 0x6d, 0x55, 0x13, 0x82, 0xf9, 0xbf, 0x1a, 0xdc, 0x99, 0x6a, 0x48, 0xa8, + 0x0f, 0x35, 0x3f, 0x74, 0x92, 0x02, 0xd2, 0xda, 0xec, 0x5e, 0xd9, 0xc1, 0x72, 0x94, 0xd0, 0xa1, + 0x58, 0x09, 0xbf, 0xbd, 0xe0, 0xc8, 0x71, 0x27, 0xa0, 0x5c, 0xb8, 0xc1, 0x58, 0xe5, 0xca, 0x12, + 0x4e, 0x97, 0xe8, 0x19, 0xdc, 0xe6, 0xf6, 0x19, 0x75, 0x62, 0x2f, 0x09, 0x8e, 0xda, 0x95, 0xc1, + 0xd1, 0xcc, 0xf0, 0x3d, 0x81, 0x7e, 0x82, 0x0f, 0x22, 0xc2, 0x68, 0x20, 0xac, 0x20, 0x74, 0xa8, + 0x95, 0xf9, 0x43, 0x67, 0x44, 0x39, 0xa9, 0xf6, 0x43, 0x87, 0xce, 0xea, 0x48, 0x2b, 0x89, 0x92, + 0x02, 0x1b, 0xfd, 0x0c, 0x2b, 0x8c, 0x8e, 0x28, 0xa3, 0x81, 0x9d, 0xd7, 0xdc, 0x7e, 0xe7, 0x7e, + 0x87, 0x32, 0x35, 0x13, 0xe5, 0xdf, 0xc1, 0x32, 0x57, 0xf7, 0x3c, 0x29, 0x68, 0x77, 0x66, 0x57, + 0xdd, 0x62, 0x38, 0xe0, 0x16, 0x2f, 0x86, 0xc7, 0xd7, 0x70, 0x3b, 0x37, 0x35, 0x73, 0x03, 0x95, + 0x5b, 0x99, 0x32, 0xaf, 0xa7, 0x21, 0xc3, 0x6d, 0xdc, 0x4c, 0xe1, 0x43, 0x87, 0x9b, 0xe3, 0x5c, + 0x5f, 0x94, 0xb7, 0x89, 0x00, 0x16, 0xf6, 0x7a, 0xfb, 0xc7, 0xbd, 0xdd, 0xf6, 0x1c, 0x5a, 0x82, + 0xc5, 0xc3, 0xfe, 0xce, 0x60, 0xfb, 0x78, 0x77, 0xb0, 0xdd, 0xae, 0x48, 0xd6, 0xe1, 0x8f, 0x87, + 0x47, 0x83, 0xbd, 0x76, 0x15, 0xdd, 0x86, 0x06, 0x1e, 0xec, 0xf6, 0x8e, 0xf7, 0xfb, 0x3b, 0xed, + 0x79, 0x84, 0xa0, 0xd5, 0xdf, 0x19, 0xee, 0x6e, 0x5b, 0x2f, 0x0f, 0xf0, 0x1f, 0x9f, 0xef, 0x1e, + 0xbc, 0x6c, 0xd7, 0xa4, 0x30, 0x1e, 0xf4, 0x0f, 0x4e, 0x06, 0x78, 0xb0, 0xdd, 0xae, 0x9b, 0x27, + 0xd0, 0xce, 0xa7, 0xa8, 0xea, 0xc1, 0x53, 0xb9, 0x5d, 0x79, 0xe7, 0xdc, 0x36, 0xff, 0xde, 0xc8, + 0x9d, 0xe0, 0x30, 0x19, 0x13, 0x9a, 0xc9, 0x40, 0x6a, 0x45, 0x1e, 0x09, 0x2e, 0xe9, 0xbd, 0xf9, + 0x6c, 0x4f, 0xd0, 0x2f, 0x3c, 0x12, 0xa0, 0x27, 0xd9, 0x74, 0x5c, 0xbd, 0x4e, 0x49, 0x4f, 0x27, + 0xe4, 0xf7, 0x9b, 0x03, 0xd1, 0x4e, 0xd9, 0x0f, 0xf5, 0xd9, 0xe3, 0x4d, 0xd9, 0x81, 0xb2, 0xbb, + 0x15, 0x2b, 0xdd, 0xc7, 0xd0, 0x74, 0x5c, 0x4e, 0x4e, 0x3d, 0x6a, 0x11, 0xcf, 0x53, 0xd5, 0xbd, + 0x21, 0xdb, 0x97, 0x26, 0xf6, 0x3c, 0x0f, 0x75, 0x60, 0xc1, 0x23, 0xa7, 0xd4, 0xe3, 0xba, 0x84, + 0xaf, 0x4d, 0x75, 0x79, 0xc5, 0xc5, 0x1a, 0x85, 0x9e, 0x41, 0x93, 0x04, 0x41, 0x28, 0xb4, 0x69, + 0x49, 0xf1, 0xbe, 0x37, 0xd5, 0x75, 0x27, 0x10, 0x9c, 0xc7, 0xa3, 0x21, 0xb4, 0xd3, 0x67, 0x97, + 0x65, 0x87, 0x81, 0xa0, 0xaf, 0x85, 0xea, 0xf1, 0x85, 0x40, 0x57, 0xbe, 0x3d, 0xd4, 0xb0, 0x7e, + 0x82, 0xc2, 0xcb, 0xbc, 0x48, 0x40, 0x5f, 0xc2, 0x22, 0x89, 0xc5, 0x99, 0xc5, 0x42, 0x8f, 0xea, + 0x2c, 0x34, 0xa6, 0xec, 0x88, 0xc5, 0x19, 0x0e, 0x3d, 0xaa, 0xae, 0xa7, 0x41, 0xf4, 0x0a, 0xed, + 0x01, 0x7a, 0x15, 0x13, 0x4f, 0x1a, 0x11, 0x8e, 0x2c, 0x4e, 0xd9, 0xb9, 0x6b, 0x53, 0x9d, 0x70, + 0x0f, 0x4b, 0x76, 0xfc, 0x29, 0x01, 0x1e, 0x8c, 0x0e, 0x13, 0x18, 0x6e, 0xbf, 0x2a, 0x51, 0xe4, + 0x93, 0xc4, 0x27, 0xaf, 0xad, 0x88, 0x30, 0xe2, 0x79, 0xd4, 0x73, 0xb9, 0x6f, 0xa0, 0x8d, 0xca, + 0xe3, 0x3a, 0x6e, 0xf9, 0xe4, 0xf5, 0x8b, 0x09, 0x15, 0xfd, 0x00, 0x6b, 0x8c, 0x5c, 0x58, 0xb9, + 0x89, 0x43, 0x3a, 0x61, 0xe4, 0x8e, 0x8d, 0x15, 0xb5, 0xf7, 0x27, 0x65, 0xfb, 0x31, 0xb9, 0x38, + 0xc8, 0x46, 0x8d, 0xbe, 0x82, 0xe2, 0x15, 0x36, 0x4d, 0x44, 0x2f, 0x00, 0x4d, 0x3f, 0xd7, 0x8d, + 0xd5, 0xd9, 0xc1, 0xa7, 0xbb, 0x43, 0x2f, 0x03, 0xe2, 0x3b, 0x76, 0x99, 0x84, 0xbe, 0x85, 0x25, + 0x37, 0x10, 0x94, 0xb1, 0x38, 0x12, 0xee, 0xa9, 0x47, 0x8d, 0x0f, 0x2e, 0x29, 0xc5, 0x5b, 0x61, + 0xe8, 0x9d, 0xc8, 0x49, 0x15, 0x17, 0x05, 0x66, 0xbd, 0xd4, 0xd6, 0x66, 0xbd, 0xd4, 0xd0, 0x63, + 0xa8, 0xd1, 0xe0, 0x9c, 0x1b, 0x1f, 0xaa, 0x1d, 0x56, 0xa7, 0x72, 0x25, 0x38, 0xe7, 0x58, 0x21, + 0xe4, 0xab, 0x4b, 0x90, 0x31, 0x37, 0x8c, 0x8d, 0x79, 0xf9, 0xea, 0x92, 0xdf, 0x5b, 0x06, 0xac, + 0xe5, 0xa3, 0xde, 0x92, 0xca, 0x99, 0xeb, 0x50, 0xfe, 0x7d, 0xad, 0x51, 0x6b, 0xd7, 0x4d, 0x1f, + 0xee, 0x66, 0xd9, 0x76, 0x44, 0x99, 0xef, 0x06, 0xb9, 0x87, 0xf7, 0xfb, 0xbc, 0x59, 0xb2, 0x51, + 0xbb, 0x9a, 0x1b, 0xb5, 0xcd, 0xfb, 0xb0, 0x3e, 0x6b, 0xbb, 0xe4, 0x21, 0x67, 0xfe, 0x02, 0x0f, + 0x67, 0x3d, 0xc6, 0xe4, 0x4d, 0xde, 0xc4, 0x83, 0xec, 0xaf, 0x55, 0xd8, 0xb8, 0x5c, 0xbf, 0x7e, + 0x4c, 0x3e, 0x29, 0x4f, 0xf6, 0x1f, 0x96, 0x3d, 0x7e, 0xcc, 0xbc, 0x74, 0xa4, 0x9f, 0x0c, 0xf4, + 0x5f, 0x94, 0x8a, 0xe1, 0x5b, 0xa5, 0xd2, 0x52, 0xf8, 0x14, 0x9a, 0xa3, 0xd8, 0xf3, 0xae, 0x3b, + 0x19, 0x63, 0x90, 0xe8, 0x6c, 0x22, 0xbe, 0xad, 0x64, 0x53, 0x63, 0x6b, 0x57, 0x09, 0xab, 0xad, + 0x92, 0xd4, 0xe0, 0xe6, 0xdf, 0xf2, 0x7f, 0x5b, 0x8e, 0xd5, 0x00, 0x79, 0x13, 0x97, 0xfe, 0x7b, + 0xa8, 0xab, 0xb9, 0x4d, 0x39, 0xa1, 0x35, 0xdd, 0x9e, 0x8b, 0x13, 0x1f, 0x4e, 0xc0, 0xe6, 0xbf, + 0x2b, 0x70, 0xef, 0x2d, 0xb3, 0xe0, 0x44, 0x6b, 0xe5, 0x1d, 0xb4, 0xa2, 0xaf, 0xa0, 0x19, 0xda, + 0x76, 0xcc, 0x58, 0x32, 0x2b, 0x55, 0xaf, 0x9c, 0x95, 0x20, 0x85, 0xf7, 0x44, 0x71, 0x42, 0x9b, + 0x2f, 0x3f, 0x09, 0xef, 0xe6, 0xfe, 0x45, 0xa4, 0xce, 0xd3, 0x21, 0x7c, 0x0e, 0xe6, 0xac, 0x10, + 0xdb, 0x4b, 0x7e, 0xb5, 0xdd, 0x50, 0x62, 0x39, 0x34, 0x12, 0x67, 0xea, 0x44, 0x75, 0x9c, 0x2c, + 0xcc, 0x7d, 0xf8, 0xe4, 0xad, 0xfb, 0xea, 0xe8, 0x7e, 0x04, 0x35, 0x1e, 0x65, 0x8d, 0x7e, 0xa5, + 0xdc, 0x55, 0x22, 0x12, 0x60, 0x05, 0xf8, 0xf4, 0x1b, 0x68, 0x15, 0xdd, 0x8a, 0x56, 0xa1, 0x3d, + 0xf8, 0x61, 0xd0, 0x3f, 0x3e, 0x1a, 0x1e, 0xec, 0x5b, 0xbd, 0xfe, 0xd1, 0xf0, 0x64, 0xd0, 0x9e, + 0x43, 0x6b, 0x80, 0x72, 0x54, 0xdc, 0xdf, 0x19, 0x9e, 0xc8, 0xf9, 0x67, 0xeb, 0xd9, 0x4f, 0x5f, + 0x8d, 0x5d, 0x71, 0x16, 0x9f, 0x76, 0xec, 0xd0, 0xef, 0xaa, 0x6d, 0x42, 0x36, 0x4e, 0x3e, 0xba, + 0xd9, 0x9f, 0xc6, 0x31, 0x0d, 0xba, 0xd1, 0xe9, 0xef, 0xc6, 0x61, 0xb7, 0xf8, 0xd3, 0xf4, 0x74, + 0x41, 0xdd, 0xcf, 0x17, 0xff, 0x0f, 0x00, 0x00, 0xff, 0xff, 0xd6, 0xfa, 0xb5, 0xe8, 0xa6, 0x15, + 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/launch_plan.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/launch_plan.pb.go index 5e7b95f8ca..1a5a48e613 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/launch_plan.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/launch_plan.pb.go @@ -757,7 +757,7 @@ func (m *ActiveLaunchPlanRequest) GetId() *NamedEntityIdentifier { return nil } -// Represents a request structure to list active launch plans within a project/domain. +// Represents a request structure to list active launch plans within a project/domain and optional org. // See :ref:`ref_flyteidl.admin.LaunchPlan` for more details type ActiveLaunchPlanListRequest struct { // Name of the project that contains the identifiers. @@ -775,7 +775,9 @@ type ActiveLaunchPlanListRequest struct { Token string `protobuf:"bytes,4,opt,name=token,proto3" json:"token,omitempty"` // Sort ordering. // +optional - SortBy *Sort `protobuf:"bytes,5,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + SortBy *Sort `protobuf:"bytes,5,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,6,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -841,6 +843,13 @@ func (m *ActiveLaunchPlanListRequest) GetSortBy() *Sort { return nil } +func (m *ActiveLaunchPlanListRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + func init() { proto.RegisterEnum("flyteidl.admin.LaunchPlanState", LaunchPlanState_name, LaunchPlanState_value) proto.RegisterType((*LaunchPlanCreateRequest)(nil), "flyteidl.admin.LaunchPlanCreateRequest") @@ -860,79 +869,80 @@ func init() { func init() { proto.RegisterFile("flyteidl/admin/launch_plan.proto", fileDescriptor_368a863574f5e699) } var fileDescriptor_368a863574f5e699 = []byte{ - // 1183 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x8e, 0x1b, 0x35, - 0x14, 0x26, 0x69, 0xf6, 0xef, 0x6c, 0x37, 0x9b, 0xba, 0x65, 0x3b, 0x4d, 0x4b, 0xbb, 0x04, 0x21, - 0x16, 0x68, 0x13, 0xa9, 0xd0, 0x8b, 0xfe, 0x49, 0x64, 0xd3, 0xbd, 0x88, 0xe8, 0xcf, 0xe2, 0x2d, - 0x15, 0xe2, 0x66, 0xe4, 0xcc, 0x9c, 0x24, 0xa6, 0x9e, 0xf1, 0xd4, 0xf6, 0xec, 0x66, 0xc5, 0x0b, - 0x21, 0xde, 0x81, 0x47, 0xe0, 0x82, 0x37, 0x42, 0xe3, 0xf1, 0x4c, 0x92, 0xc9, 0x56, 0x05, 0x24, - 0xae, 0x66, 0xec, 0xf3, 0x7d, 0x9f, 0x7d, 0x8e, 0x8f, 0xcf, 0x31, 0xec, 0x8f, 0xc5, 0xb9, 0x41, - 0x1e, 0x8a, 0x1e, 0x0b, 0x23, 0x1e, 0xf7, 0x04, 0x4b, 0xe3, 0x60, 0xea, 0x27, 0x82, 0xc5, 0xdd, - 0x44, 0x49, 0x23, 0x49, 0xb3, 0x40, 0x74, 0x2d, 0xa2, 0xfd, 0x49, 0xc9, 0x08, 0xa4, 0xc2, 0x1e, - 0xce, 0x30, 0x48, 0x0d, 0x97, 0x0e, 0xde, 0xbe, 0xb5, 0x6c, 0x16, 0xdc, 0xa0, 0x62, 0x42, 0x3b, - 0xeb, 0xed, 0x65, 0x2b, 0x0f, 0x31, 0x36, 0x7c, 0xcc, 0x51, 0x39, 0x7b, 0x45, 0x9c, 0xc7, 0x06, - 0xd5, 0x98, 0x05, 0x78, 0xb1, 0xb8, 0xc6, 0x20, 0x55, 0xdc, 0x9c, 0xaf, 0x90, 0x73, 0x5f, 0x74, - 0x30, 0xc5, 0x30, 0x15, 0x05, 0xf9, 0x66, 0xc5, 0x1c, 0xc8, 0x28, 0x2a, 0xb7, 0x7d, 0x63, 0x22, - 0xe5, 0x44, 0x60, 0xcf, 0x8e, 0x46, 0xe9, 0xb8, 0xc7, 0xe2, 0x42, 0xf6, 0x4e, 0xd5, 0x64, 0x78, - 0x84, 0xda, 0xb0, 0x28, 0x29, 0x9c, 0xaa, 0x02, 0xce, 0x14, 0x4b, 0x12, 0x54, 0xce, 0xe9, 0xce, - 0x0c, 0xae, 0x3f, 0xb7, 0x61, 0x3d, 0x16, 0x2c, 0x1e, 0x28, 0x64, 0x06, 0x29, 0xbe, 0x4b, 0x51, - 0x1b, 0xf2, 0x25, 0xd4, 0x79, 0xe8, 0xd5, 0xf6, 0x6b, 0x07, 0xdb, 0xf7, 0x6f, 0x74, 0xcb, 0x48, - 0x67, 0xde, 0x75, 0x87, 0x65, 0x70, 0x68, 0x9d, 0x87, 0xe4, 0x3e, 0x34, 0x74, 0x82, 0x81, 0x57, - 0xb7, 0xe0, 0xdb, 0xdd, 0xe5, 0x63, 0xe9, 0xce, 0x57, 0x38, 0x49, 0x30, 0xa0, 0x16, 0xdb, 0x69, - 0x83, 0xb7, 0xba, 0xb2, 0x4e, 0x64, 0xac, 0xb1, 0xf3, 0x5b, 0x0d, 0x60, 0x6e, 0xfc, 0x9f, 0x77, - 0x42, 0x1e, 0xc3, 0x46, 0x20, 0xa4, 0x4e, 0x15, 0x7a, 0x97, 0x2c, 0xed, 0xd3, 0xf7, 0xd3, 0x06, - 0x39, 0x90, 0x16, 0x8c, 0x0e, 0x42, 0x73, 0x6e, 0x7d, 0xce, 0xb5, 0x21, 0x4f, 0xe1, 0xf2, 0x42, - 0xa6, 0x6a, 0xaf, 0xb6, 0x7f, 0xe9, 0x60, 0xfb, 0x7e, 0xfb, 0xfd, 0x9a, 0x74, 0x5b, 0x94, 0xff, - 0x9a, 0x5c, 0x83, 0x35, 0x23, 0xdf, 0x62, 0x6c, 0x5d, 0xd8, 0xa2, 0xf9, 0xa0, 0x73, 0x0a, 0x8d, - 0x7e, 0x6a, 0xa6, 0xe4, 0x2e, 0x10, 0xa6, 0x75, 0x1a, 0xb1, 0x91, 0x40, 0x9f, 0xb3, 0xc8, 0x57, - 0x52, 0xa0, 0x0d, 0xcd, 0x16, 0x6d, 0x95, 0x96, 0x21, 0x8b, 0xa8, 0x14, 0x48, 0x9e, 0x40, 0xfb, - 0x6d, 0x3a, 0x42, 0x15, 0xa3, 0x41, 0xed, 0x6b, 0x54, 0xa7, 0x3c, 0x40, 0x9f, 0x05, 0x81, 0x4c, - 0x63, 0xe3, 0x16, 0xf0, 0xe6, 0x88, 0x93, 0x1c, 0xd0, 0xcf, 0xed, 0x8f, 0xea, 0x5e, 0xad, 0xf3, - 0xc7, 0xc6, 0xa2, 0x7f, 0x59, 0xd0, 0xc8, 0x23, 0xd8, 0x3e, 0x93, 0xea, 0xed, 0x58, 0xc8, 0x33, - 0xff, 0x9f, 0x1c, 0x0b, 0x14, 0xe8, 0x61, 0x48, 0xbe, 0x87, 0xdd, 0x6c, 0xde, 0x9c, 0xfb, 0x11, - 0x1a, 0x16, 0x32, 0xc3, 0xdc, 0x49, 0x75, 0xde, 0x1f, 0x9e, 0x17, 0x0e, 0x49, 0x9b, 0x39, 0xb5, - 0x18, 0x93, 0x43, 0x68, 0x86, 0x38, 0x66, 0xa9, 0x30, 0x3e, 0x8f, 0x93, 0xd4, 0x68, 0x77, 0x7c, - 0x37, 0x2b, 0x7b, 0x39, 0x66, 0x8a, 0x45, 0x68, 0x50, 0xbd, 0x60, 0x09, 0xdd, 0x71, 0x94, 0xa1, - 0x65, 0x90, 0x27, 0x70, 0x79, 0xcc, 0x67, 0x18, 0x16, 0x0a, 0x8d, 0x0b, 0xbd, 0x79, 0x9e, 0x57, - 0x8a, 0x8c, 0xbf, 0x6d, 0xe1, 0x8e, 0xbd, 0x07, 0x0d, 0x1b, 0xff, 0xb5, 0x2c, 0x92, 0x87, 0x75, - 0xaf, 0x46, 0xed, 0x98, 0x74, 0x61, 0x5d, 0xb0, 0x11, 0x0a, 0xed, 0xad, 0x5b, 0xbd, 0xbd, 0x55, - 0xef, 0x32, 0x2b, 0x75, 0x28, 0xf2, 0x14, 0xb6, 0x59, 0x1c, 0x4b, 0xc3, 0xb2, 0x62, 0xa5, 0xbd, - 0x8d, 0xaa, 0x1b, 0x39, 0xa9, 0x3f, 0x87, 0xd0, 0x45, 0x3c, 0xb9, 0x0b, 0x0d, 0x96, 0x9a, 0xa9, - 0xb7, 0x69, 0x79, 0xd7, 0x56, 0x78, 0xa9, 0x99, 0xe6, 0x9b, 0xcb, 0x50, 0xe4, 0x21, 0x6c, 0x65, - 0xdf, 0x3c, 0x73, 0xb6, 0x2c, 0xc5, 0xbb, 0x88, 0x92, 0x65, 0x90, 0xa5, 0x6d, 0x32, 0x37, 0x22, - 0x43, 0x68, 0x15, 0x75, 0xcd, 0x0f, 0x64, 0x6c, 0x70, 0x66, 0x3c, 0xa8, 0xde, 0x34, 0x1b, 0xb1, - 0x13, 0x07, 0x1b, 0xe4, 0x28, 0xba, 0xab, 0x97, 0x27, 0xc8, 0x0b, 0x20, 0xef, 0x52, 0x26, 0x32, - 0x25, 0x39, 0x2e, 0x52, 0xd3, 0x6b, 0x59, 0xb1, 0x3b, 0x15, 0xb1, 0x1f, 0x72, 0xe0, 0xab, 0xb1, - 0x4b, 0x50, 0xda, 0x7a, 0x57, 0x99, 0x21, 0x3f, 0xc1, 0x9e, 0x62, 0x67, 0xbe, 0x4c, 0x4d, 0x92, - 0x1a, 0x3f, 0x4b, 0x8f, 0x6c, 0x83, 0x63, 0x3e, 0xf1, 0xae, 0x58, 0xc9, 0xcf, 0xaa, 0x1e, 0x52, - 0x76, 0xf6, 0xca, 0x82, 0x9f, 0x31, 0xc3, 0x06, 0x16, 0x4a, 0xaf, 0xaa, 0xd5, 0x49, 0xf2, 0x05, - 0xec, 0x46, 0x6c, 0xe6, 0x27, 0x4c, 0x31, 0x21, 0x50, 0x70, 0x1d, 0x79, 0x64, 0xbf, 0x76, 0xb0, - 0x46, 0x9b, 0x11, 0x9b, 0x1d, 0xcf, 0x67, 0xc9, 0x77, 0xb0, 0x63, 0x7b, 0x82, 0x4a, 0x13, 0xc3, - 0x47, 0x02, 0xbd, 0xab, 0x76, 0xe5, 0x76, 0x37, 0x2f, 0xc1, 0xdd, 0xa2, 0x04, 0x77, 0x0f, 0xa5, - 0x14, 0x6f, 0x98, 0x48, 0x91, 0x2e, 0x13, 0xb2, 0xa5, 0xe4, 0x29, 0xaa, 0x33, 0xc5, 0x0d, 0xfa, - 0x01, 0x0b, 0xa6, 0xe8, 0x5d, 0xdb, 0xaf, 0x1d, 0x6c, 0xd2, 0x66, 0x39, 0x3d, 0xc8, 0x66, 0xc9, - 0x01, 0x34, 0x30, 0x3e, 0xd5, 0xde, 0xc7, 0x17, 0x1f, 0xf8, 0x51, 0x7c, 0xaa, 0xa9, 0x45, 0x74, - 0xfe, 0xac, 0xc3, 0x95, 0x95, 0xea, 0x45, 0x1e, 0xc0, 0x9a, 0x36, 0xcc, 0xe4, 0x85, 0xa3, 0xb9, - 0x18, 0xef, 0x95, 0x32, 0x99, 0xc1, 0x68, 0x8e, 0x26, 0xcf, 0x60, 0x17, 0x67, 0x09, 0x06, 0x66, - 0x7e, 0x5f, 0xea, 0x1f, 0xbe, 0x71, 0xcd, 0x82, 0xe3, 0x2e, 0xcd, 0x11, 0xb4, 0x4a, 0x95, 0xfc, - 0xbc, 0x8a, 0x8b, 0xdb, 0xae, 0xc8, 0xbc, 0x61, 0x8a, 0x67, 0xe5, 0x2c, 0x53, 0x29, 0x57, 0xce, - 0x0f, 0x48, 0x93, 0x87, 0x00, 0x81, 0xed, 0x1a, 0xa1, 0xcf, 0x8c, 0xbb, 0xb7, 0xab, 0xb1, 0x7e, - 0x5d, 0xf4, 0x43, 0xba, 0xe5, 0xd0, 0x7d, 0x93, 0x51, 0xd3, 0x24, 0x2c, 0xa8, 0x6b, 0x1f, 0xa6, - 0x3a, 0x74, 0xdf, 0x74, 0xfe, 0xaa, 0x01, 0x59, 0x2d, 0x4d, 0xe4, 0x5b, 0xd8, 0x2c, 0x3a, 0xba, - 0x2b, 0x88, 0x2b, 0x57, 0xea, 0xc4, 0xd9, 0x69, 0x89, 0x24, 0x87, 0xb0, 0x13, 0xcb, 0xac, 0x4a, - 0x06, 0xee, 0xe2, 0xd7, 0x6d, 0xab, 0xb8, 0x55, 0xa5, 0xbe, 0x5c, 0x00, 0xd1, 0x65, 0x0a, 0xe9, - 0xc3, 0x15, 0xd7, 0x6d, 0x02, 0x19, 0x87, 0x3c, 0xd7, 0xb9, 0xe4, 0xf2, 0xa2, 0xea, 0x52, 0x3f, - 0x3e, 0xa7, 0xad, 0x1c, 0x3e, 0x28, 0xd1, 0x9d, 0x5f, 0x17, 0xdf, 0x00, 0x3f, 0x5a, 0x57, 0xff, - 0xc3, 0x1b, 0xa0, 0xcc, 0xa9, 0xfa, 0xbf, 0xc9, 0xa9, 0xe5, 0x67, 0x40, 0xb1, 0xb8, 0x7b, 0x06, - 0x1c, 0xc3, 0xf5, 0x7e, 0x60, 0xf8, 0x29, 0x2e, 0xf4, 0x4a, 0xb7, 0xb1, 0x07, 0x0b, 0x1b, 0xfb, - 0x7c, 0x25, 0x5e, 0x2c, 0xc2, 0xf0, 0xc8, 0x36, 0x8b, 0xe5, 0x4d, 0x76, 0x7e, 0xaf, 0xc1, 0xcd, - 0xaa, 0x64, 0xd6, 0xb4, 0x0b, 0x59, 0x0f, 0x36, 0x12, 0x25, 0x7f, 0xc1, 0xc0, 0xb8, 0x9e, 0x5a, - 0x0c, 0xc9, 0x1e, 0xac, 0x87, 0x32, 0x62, 0xbc, 0xe8, 0xcb, 0x6e, 0x94, 0xb5, 0x6b, 0xc1, 0x23, - 0x6e, 0x6c, 0xcc, 0x77, 0x68, 0x3e, 0x98, 0x37, 0xf1, 0xc6, 0x42, 0x13, 0x27, 0xf7, 0x60, 0x43, - 0x4b, 0x65, 0xfc, 0xd1, 0xb9, 0x4b, 0xba, 0x95, 0x9b, 0x7b, 0x22, 0x95, 0xa1, 0xeb, 0x19, 0xe8, - 0xf0, 0xfc, 0xab, 0xaf, 0x61, 0xb7, 0x12, 0x34, 0x72, 0x19, 0x36, 0x87, 0x2f, 0xfb, 0x83, 0xd7, - 0xc3, 0x37, 0x47, 0xad, 0x8f, 0x08, 0xc0, 0xba, 0xfb, 0xaf, 0x1d, 0x3e, 0xfd, 0xf9, 0xf1, 0x84, - 0x9b, 0x69, 0x3a, 0xea, 0x06, 0x32, 0xea, 0x59, 0x59, 0xa9, 0x26, 0xf9, 0x4f, 0xaf, 0x7c, 0x5d, - 0x4e, 0x30, 0xee, 0x25, 0xa3, 0x7b, 0x13, 0xd9, 0x5b, 0x7e, 0x70, 0x8e, 0xd6, 0x6d, 0x8e, 0x7c, - 0xf3, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0d, 0xba, 0x5e, 0x97, 0x74, 0x0b, 0x00, 0x00, + // 1195 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdb, 0x72, 0xdb, 0x36, + 0x13, 0xfe, 0x25, 0xcb, 0xa7, 0x75, 0x2c, 0x2b, 0x48, 0x7e, 0x87, 0x51, 0xf2, 0x27, 0xfe, 0xd5, + 0xe9, 0xd4, 0x6d, 0x13, 0x69, 0x26, 0x6d, 0x2e, 0x72, 0x9a, 0xa9, 0xac, 0xf8, 0x42, 0xd3, 0x1c, + 0x5c, 0x38, 0xcd, 0x74, 0x7a, 0xc3, 0x81, 0xc8, 0x95, 0x84, 0x06, 0x24, 0x18, 0x00, 0xb4, 0xe5, + 0xe9, 0x0b, 0xf5, 0x21, 0xda, 0x47, 0xe8, 0x45, 0xdf, 0xa8, 0x43, 0x10, 0xa4, 0x24, 0xca, 0x99, + 0xb4, 0x9d, 0xe9, 0x15, 0x09, 0xec, 0xf7, 0x2d, 0x76, 0x17, 0x7b, 0x00, 0x1c, 0x8c, 0xc5, 0x85, + 0x41, 0x1e, 0x8a, 0x1e, 0x0b, 0x23, 0x1e, 0xf7, 0x04, 0x4b, 0xe3, 0x60, 0xea, 0x27, 0x82, 0xc5, + 0xdd, 0x44, 0x49, 0x23, 0x49, 0xb3, 0x40, 0x74, 0x2d, 0xa2, 0xfd, 0xbf, 0x92, 0x11, 0x48, 0x85, + 0x3d, 0x9c, 0x61, 0x90, 0x1a, 0x2e, 0x1d, 0xbc, 0x7d, 0x7b, 0x59, 0x2c, 0xb8, 0x41, 0xc5, 0x84, + 0x76, 0xd2, 0x3b, 0xcb, 0x52, 0x1e, 0x62, 0x6c, 0xf8, 0x98, 0xa3, 0x72, 0xf2, 0x8a, 0x72, 0x1e, + 0x1b, 0x54, 0x63, 0x16, 0xe0, 0xe5, 0xca, 0x35, 0x06, 0xa9, 0xe2, 0xe6, 0x62, 0x85, 0x9c, 0xfb, + 0xa2, 0x83, 0x29, 0x86, 0xa9, 0x28, 0xc8, 0xb7, 0x2a, 0xe2, 0x40, 0x46, 0x51, 0x69, 0xf6, 0xcd, + 0x89, 0x94, 0x13, 0x81, 0x3d, 0xbb, 0x1a, 0xa5, 0xe3, 0x1e, 0x8b, 0x0b, 0xb5, 0x77, 0xab, 0x22, + 0xc3, 0x23, 0xd4, 0x86, 0x45, 0x49, 0xe1, 0x54, 0x15, 0x70, 0xae, 0x58, 0x92, 0xa0, 0x72, 0x4e, + 0x77, 0x66, 0x70, 0xe3, 0x85, 0x0d, 0xeb, 0x89, 0x60, 0xf1, 0x40, 0x21, 0x33, 0x48, 0xf1, 0x7d, + 0x8a, 0xda, 0x90, 0xcf, 0xa1, 0xce, 0x43, 0xaf, 0x76, 0x50, 0x3b, 0xdc, 0x79, 0x70, 0xb3, 0x5b, + 0x46, 0x3a, 0xf3, 0xae, 0x3b, 0x2c, 0x83, 0x43, 0xeb, 0x3c, 0x24, 0x0f, 0xa0, 0xa1, 0x13, 0x0c, + 0xbc, 0xba, 0x05, 0xdf, 0xe9, 0x2e, 0x5f, 0x4b, 0x77, 0x7e, 0xc2, 0x69, 0x82, 0x01, 0xb5, 0xd8, + 0x4e, 0x1b, 0xbc, 0xd5, 0x93, 0x75, 0x22, 0x63, 0x8d, 0x9d, 0x5f, 0x6a, 0x00, 0x73, 0xe1, 0xbf, + 0x6c, 0x09, 0x79, 0x02, 0x9b, 0x81, 0x90, 0x3a, 0x55, 0xe8, 0xad, 0x59, 0xda, 0xff, 0x3f, 0x4c, + 0x1b, 0xe4, 0x40, 0x5a, 0x30, 0x3a, 0x08, 0xcd, 0xb9, 0xf4, 0x05, 0xd7, 0x86, 0x3c, 0x83, 0x2b, + 0x0b, 0x99, 0xaa, 0xbd, 0xda, 0xc1, 0xda, 0xe1, 0xce, 0x83, 0xf6, 0x87, 0x75, 0xd2, 0x1d, 0x51, + 0xfe, 0x6b, 0x72, 0x1d, 0xd6, 0x8d, 0x7c, 0x87, 0xb1, 0x75, 0x61, 0x9b, 0xe6, 0x8b, 0xce, 0x19, + 0x34, 0xfa, 0xa9, 0x99, 0x92, 0x7b, 0x40, 0x98, 0xd6, 0x69, 0xc4, 0x46, 0x02, 0x7d, 0xce, 0x22, + 0x5f, 0x49, 0x81, 0x36, 0x34, 0xdb, 0xb4, 0x55, 0x4a, 0x86, 0x2c, 0xa2, 0x52, 0x20, 0x79, 0x0a, + 0xed, 0x77, 0xe9, 0x08, 0x55, 0x8c, 0x06, 0xb5, 0xaf, 0x51, 0x9d, 0xf1, 0x00, 0x7d, 0x16, 0x04, + 0x32, 0x8d, 0x8d, 0x3b, 0xc0, 0x9b, 0x23, 0x4e, 0x73, 0x40, 0x3f, 0x97, 0x3f, 0xae, 0x7b, 0xb5, + 0xce, 0x6f, 0x9b, 0x8b, 0xfe, 0x65, 0x41, 0x23, 0x8f, 0x61, 0xe7, 0x5c, 0xaa, 0x77, 0x63, 0x21, + 0xcf, 0xfd, 0xbf, 0x72, 0x2d, 0x50, 0xa0, 0x87, 0x21, 0xf9, 0x16, 0xf6, 0xb2, 0x7d, 0x73, 0xe1, + 0x47, 0x68, 0x58, 0xc8, 0x0c, 0x73, 0x37, 0xd5, 0xf9, 0x70, 0x78, 0x5e, 0x3a, 0x24, 0x6d, 0xe6, + 0xd4, 0x62, 0x4d, 0x8e, 0xa0, 0x19, 0xe2, 0x98, 0xa5, 0xc2, 0xf8, 0x3c, 0x4e, 0x52, 0xa3, 0xdd, + 0xf5, 0xdd, 0xaa, 0xd8, 0x72, 0xc2, 0x14, 0x8b, 0xd0, 0xa0, 0x7a, 0xc9, 0x12, 0xba, 0xeb, 0x28, + 0x43, 0xcb, 0x20, 0x4f, 0xe1, 0xca, 0x98, 0xcf, 0x30, 0x2c, 0x34, 0x34, 0x2e, 0xf5, 0xe6, 0x45, + 0xde, 0x29, 0x32, 0xfe, 0x8e, 0x85, 0x3b, 0xf6, 0x3e, 0x34, 0x6c, 0xfc, 0xd7, 0xb3, 0x48, 0x1e, + 0xd5, 0xbd, 0x1a, 0xb5, 0x6b, 0xd2, 0x85, 0x0d, 0xc1, 0x46, 0x28, 0xb4, 0xb7, 0x61, 0xf5, 0xed, + 0xaf, 0x7a, 0x97, 0x49, 0xa9, 0x43, 0x91, 0x67, 0xb0, 0xc3, 0xe2, 0x58, 0x1a, 0x96, 0x35, 0x2b, + 0xed, 0x6d, 0x56, 0xdd, 0xc8, 0x49, 0xfd, 0x39, 0x84, 0x2e, 0xe2, 0xc9, 0x3d, 0x68, 0xb0, 0xd4, + 0x4c, 0xbd, 0x2d, 0xcb, 0xbb, 0xbe, 0xc2, 0x4b, 0xcd, 0x34, 0x37, 0x2e, 0x43, 0x91, 0x47, 0xb0, + 0x9d, 0x7d, 0xf3, 0xcc, 0xd9, 0xb6, 0x14, 0xef, 0x32, 0x4a, 0x96, 0x41, 0x96, 0xb6, 0xc5, 0xdc, + 0x8a, 0x0c, 0xa1, 0x55, 0xf4, 0x35, 0x3f, 0x90, 0xb1, 0xc1, 0x99, 0xf1, 0xa0, 0x5a, 0x69, 0x36, + 0x62, 0xa7, 0x0e, 0x36, 0xc8, 0x51, 0x74, 0x4f, 0x2f, 0x6f, 0x90, 0x97, 0x40, 0xde, 0xa7, 0x4c, + 0x64, 0x9a, 0xe4, 0xb8, 0x48, 0x4d, 0xaf, 0x65, 0x95, 0xdd, 0xad, 0x28, 0xfb, 0x2e, 0x07, 0xbe, + 0x1e, 0xbb, 0x04, 0xa5, 0xad, 0xf7, 0x95, 0x1d, 0xf2, 0x03, 0xec, 0x2b, 0x76, 0xee, 0xcb, 0xd4, + 0x24, 0xa9, 0xf1, 0xb3, 0xf4, 0xc8, 0x0c, 0x1c, 0xf3, 0x89, 0x77, 0xd5, 0xaa, 0xfc, 0xa4, 0xea, + 0x21, 0x65, 0xe7, 0xaf, 0x2d, 0xf8, 0x39, 0x33, 0x6c, 0x60, 0xa1, 0xf4, 0x9a, 0x5a, 0xdd, 0x24, + 0x9f, 0xc1, 0x5e, 0xc4, 0x66, 0x7e, 0xc2, 0x14, 0x13, 0x02, 0x05, 0xd7, 0x91, 0x47, 0x0e, 0x6a, + 0x87, 0xeb, 0xb4, 0x19, 0xb1, 0xd9, 0xc9, 0x7c, 0x97, 0x7c, 0x03, 0xbb, 0x76, 0x26, 0xa8, 0x34, + 0x31, 0x7c, 0x24, 0xd0, 0xbb, 0x66, 0x4f, 0x6e, 0x77, 0xf3, 0x16, 0xdc, 0x2d, 0x5a, 0x70, 0xf7, + 0x48, 0x4a, 0xf1, 0x96, 0x89, 0x14, 0xe9, 0x32, 0x21, 0x3b, 0x4a, 0x9e, 0xa1, 0x3a, 0x57, 0xdc, + 0xa0, 0x1f, 0xb0, 0x60, 0x8a, 0xde, 0xf5, 0x83, 0xda, 0xe1, 0x16, 0x6d, 0x96, 0xdb, 0x83, 0x6c, + 0x97, 0x1c, 0x42, 0x03, 0xe3, 0x33, 0xed, 0xfd, 0xf7, 0xf2, 0x0b, 0x3f, 0x8e, 0xcf, 0x34, 0xb5, + 0x88, 0xce, 0xef, 0x75, 0xb8, 0xba, 0xd2, 0xbd, 0xc8, 0x43, 0x58, 0xd7, 0x86, 0x99, 0xbc, 0x71, + 0x34, 0x17, 0xe3, 0xbd, 0xd2, 0x26, 0x33, 0x18, 0xcd, 0xd1, 0xe4, 0x39, 0xec, 0xe1, 0x2c, 0xc1, + 0xc0, 0xcc, 0xeb, 0xa5, 0xfe, 0xf1, 0x8a, 0x6b, 0x16, 0x1c, 0x57, 0x34, 0xc7, 0xd0, 0x2a, 0xb5, + 0xe4, 0xf7, 0x55, 0x14, 0x6e, 0xbb, 0xa2, 0xe6, 0x2d, 0x53, 0x3c, 0x6b, 0x67, 0x99, 0x96, 0xf2, + 0xe4, 0xfc, 0x82, 0x34, 0x79, 0x04, 0x10, 0xd8, 0xa9, 0x11, 0xfa, 0xcc, 0xb8, 0xba, 0x5d, 0x8d, + 0xf5, 0x9b, 0x62, 0x1e, 0xd2, 0x6d, 0x87, 0xee, 0x9b, 0x8c, 0x9a, 0x26, 0x61, 0x41, 0x5d, 0xff, + 0x38, 0xd5, 0xa1, 0xfb, 0xa6, 0xf3, 0x47, 0x0d, 0xc8, 0x6a, 0x6b, 0x22, 0x5f, 0xc3, 0x56, 0x31, + 0xd1, 0x5d, 0x43, 0x5c, 0x29, 0xa9, 0x53, 0x27, 0xa7, 0x25, 0x92, 0x1c, 0xc1, 0x6e, 0x2c, 0xb3, + 0x2e, 0x19, 0xb8, 0xc2, 0xaf, 0xdb, 0x51, 0x71, 0xbb, 0x4a, 0x7d, 0xb5, 0x00, 0xa2, 0xcb, 0x14, + 0xd2, 0x87, 0xab, 0x6e, 0xda, 0x04, 0x32, 0x0e, 0x79, 0xae, 0x67, 0xcd, 0xe5, 0x45, 0xd5, 0xa5, + 0x7e, 0x7c, 0x41, 0x5b, 0x39, 0x7c, 0x50, 0xa2, 0x3b, 0x3f, 0x2f, 0xbe, 0x01, 0xbe, 0xb7, 0xae, + 0xfe, 0x83, 0x37, 0x40, 0x99, 0x53, 0xf5, 0xbf, 0x93, 0x53, 0xcb, 0xcf, 0x80, 0xe2, 0x70, 0xf7, + 0x0c, 0x38, 0x81, 0x1b, 0xfd, 0xc0, 0xf0, 0x33, 0x5c, 0x98, 0x95, 0xce, 0xb0, 0x87, 0x0b, 0x86, + 0x7d, 0xba, 0x12, 0x2f, 0x16, 0x61, 0x78, 0x6c, 0x87, 0xc5, 0xb2, 0x91, 0x9d, 0x5f, 0x6b, 0x70, + 0xab, 0xaa, 0x32, 0x1b, 0xda, 0x85, 0x5a, 0x0f, 0x36, 0x13, 0x25, 0x7f, 0xc2, 0xc0, 0xb8, 0x99, + 0x5a, 0x2c, 0xc9, 0x3e, 0x6c, 0x84, 0x32, 0x62, 0xbc, 0x98, 0xcb, 0x6e, 0x95, 0x8d, 0x6b, 0xc1, + 0x23, 0x6e, 0x6c, 0xcc, 0x77, 0x69, 0xbe, 0x98, 0x0f, 0xf1, 0xc6, 0xc2, 0x10, 0x27, 0xf7, 0x61, + 0x53, 0x4b, 0x65, 0xfc, 0xd1, 0x85, 0x4b, 0xba, 0x95, 0xca, 0x3d, 0x95, 0xca, 0xd0, 0x8d, 0x0c, + 0x74, 0x74, 0x41, 0x5a, 0xb0, 0x26, 0xd5, 0xc4, 0x8e, 0x90, 0x6d, 0x9a, 0xfd, 0x7e, 0xf1, 0x25, + 0xec, 0x55, 0xc2, 0x48, 0xae, 0xc0, 0xd6, 0xf0, 0x55, 0x7f, 0xf0, 0x66, 0xf8, 0xf6, 0xb8, 0xf5, + 0x1f, 0x02, 0xb0, 0xe1, 0xfe, 0x6b, 0x47, 0xcf, 0x7e, 0x7c, 0x32, 0xe1, 0x66, 0x9a, 0x8e, 0xba, + 0x81, 0x8c, 0x7a, 0xf6, 0x20, 0xa9, 0x26, 0xf9, 0x4f, 0xaf, 0x7c, 0x6f, 0x4e, 0x30, 0xee, 0x25, + 0xa3, 0xfb, 0x13, 0xd9, 0x5b, 0x7e, 0x82, 0x8e, 0x36, 0x6c, 0xd6, 0x7c, 0xf5, 0x67, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x47, 0x12, 0xae, 0xfa, 0x86, 0x0b, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/matchable_resource.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/matchable_resource.pb.go index b721e43d73..1ab204ede3 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/matchable_resource.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/matchable_resource.pb.go @@ -724,19 +724,21 @@ func (*MatchingAttributes) XXX_OneofWrappers() []interface{} { } } -// Represents a custom set of attributes applied for either a domain; a domain and project; or -// domain, project and workflow name. +// Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org); +// or domain, project and workflow name (and optional org). // These are used to override system level defaults for kubernetes cluster resource management, // default execution values, and more all across different levels of specificity. type MatchableAttributesConfiguration struct { - Attributes *MatchingAttributes `protobuf:"bytes,1,opt,name=attributes,proto3" json:"attributes,omitempty"` - Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` - Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` - Workflow string `protobuf:"bytes,4,opt,name=workflow,proto3" json:"workflow,omitempty"` - LaunchPlan string `protobuf:"bytes,5,opt,name=launch_plan,json=launchPlan,proto3" json:"launch_plan,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Attributes *MatchingAttributes `protobuf:"bytes,1,opt,name=attributes,proto3" json:"attributes,omitempty"` + Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` + Project string `protobuf:"bytes,3,opt,name=project,proto3" json:"project,omitempty"` + Workflow string `protobuf:"bytes,4,opt,name=workflow,proto3" json:"workflow,omitempty"` + LaunchPlan string `protobuf:"bytes,5,opt,name=launch_plan,json=launchPlan,proto3" json:"launch_plan,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,6,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *MatchableAttributesConfiguration) Reset() { *m = MatchableAttributesConfiguration{} } @@ -799,14 +801,23 @@ func (m *MatchableAttributesConfiguration) GetLaunchPlan() string { return "" } +func (m *MatchableAttributesConfiguration) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Request all matching resource attributes for a resource type. // See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details type ListMatchableAttributesRequest struct { // +required - ResourceType MatchableResource `protobuf:"varint,1,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResourceType MatchableResource `protobuf:"varint,1,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` + // Optional, org filter applied to list project requests. + Org string `protobuf:"bytes,2,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ListMatchableAttributesRequest) Reset() { *m = ListMatchableAttributesRequest{} } @@ -841,6 +852,13 @@ func (m *ListMatchableAttributesRequest) GetResourceType() MatchableResource { return MatchableResource_TASK_RESOURCE } +func (m *ListMatchableAttributesRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Response for a request for all matching resource attributes for a resource type. // See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details type ListMatchableAttributesResponse struct { @@ -905,89 +923,90 @@ func init() { } var fileDescriptor_1d15bcabb02640f4 = []byte{ - // 1338 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0x59, 0x73, 0x1a, 0x47, - 0x10, 0x66, 0x75, 0x20, 0xd4, 0xc4, 0xb0, 0x1a, 0x5b, 0x12, 0x92, 0x62, 0x5b, 0xd9, 0x1c, 0x56, - 0x92, 0x32, 0xa4, 0x48, 0x1e, 0x9c, 0xc3, 0x55, 0x41, 0x78, 0x31, 0x94, 0x91, 0x90, 0x06, 0xf0, - 0x91, 0x97, 0xad, 0x61, 0x19, 0x96, 0x8d, 0xf6, 0xd2, 0xec, 0xac, 0x10, 0x95, 0x3f, 0x91, 0x87, - 0xe4, 0xa7, 0xe4, 0xa7, 0xe4, 0x3d, 0xef, 0xc9, 0x8f, 0x48, 0xed, 0x09, 0xac, 0xc0, 0xe5, 0xb7, - 0x99, 0x9e, 0xaf, 0x8f, 0xe9, 0xe9, 0xfe, 0x7a, 0xe0, 0xc9, 0xc8, 0x98, 0x72, 0xaa, 0x0f, 0x8d, - 0x0a, 0x19, 0x9a, 0xba, 0x55, 0x31, 0x09, 0x57, 0xc7, 0x64, 0x60, 0x50, 0x85, 0x51, 0xd7, 0xf6, - 0x98, 0x4a, 0xcb, 0x0e, 0xb3, 0xb9, 0x8d, 0x0a, 0x31, 0xb0, 0x1c, 0x00, 0x0f, 0x8f, 0x52, 0x8a, - 0xaa, 0x6d, 0x9a, 0xb6, 0x15, 0x82, 0x0f, 0xd3, 0x56, 0x55, 0xc3, 0x73, 0x39, 0x65, 0x0a, 0x71, - 0x5d, 0x5d, 0xb3, 0x4c, 0x6a, 0xf1, 0x08, 0xf8, 0x30, 0x01, 0xaa, 0x36, 0xa3, 0x15, 0x7a, 0x4b, - 0x55, 0x8f, 0xeb, 0x89, 0x9d, 0x8f, 0x17, 0x8f, 0x5d, 0xaa, 0x7a, 0x4c, 0xe7, 0xd3, 0xe8, 0xf4, - 0x91, 0x66, 0xdb, 0x9a, 0x41, 0x2b, 0xc1, 0x6e, 0xe0, 0x8d, 0x2a, 0x13, 0x46, 0x1c, 0x87, 0x32, - 0x37, 0x3c, 0x97, 0xfe, 0x14, 0x40, 0xec, 0x11, 0xf7, 0x0a, 0x47, 0x37, 0xe9, 0x3a, 0x54, 0x45, - 0x22, 0xac, 0xab, 0x8e, 0x57, 0x12, 0x8e, 0x85, 0x93, 0x6d, 0xec, 0x2f, 0x7d, 0x89, 0xe6, 0x78, - 0xa5, 0xb5, 0x50, 0xa2, 0x39, 0x1e, 0xda, 0x83, 0xac, 0x49, 0x4d, 0x9b, 0x4d, 0x4b, 0xeb, 0x81, - 0x30, 0xda, 0xa1, 0x12, 0x6c, 0xb9, 0xdc, 0x66, 0x44, 0xa3, 0xa5, 0x8d, 0xe0, 0x20, 0xde, 0xa2, - 0xaf, 0x61, 0x87, 0x3a, 0x63, 0x6a, 0x52, 0x46, 0x0c, 0x25, 0xc6, 0x6c, 0x06, 0x18, 0x31, 0x39, - 0xe8, 0x86, 0x72, 0xe9, 0x77, 0x01, 0xf6, 0xe6, 0xe3, 0xaa, 0x71, 0xce, 0xf4, 0x81, 0xc7, 0xa9, - 0x8b, 0x7e, 0x82, 0xdc, 0x90, 0x8e, 0x88, 0x67, 0x70, 0x37, 0x08, 0x31, 0x5f, 0x3d, 0x2e, 0x2f, - 0x26, 0xbe, 0x9c, 0xbe, 0x11, 0x4e, 0x34, 0xd0, 0x33, 0xc8, 0x1a, 0xba, 0xa9, 0x73, 0x37, 0xb8, - 0xcc, 0x87, 0xe8, 0x46, 0x78, 0xe9, 0x2f, 0x01, 0x0e, 0xea, 0xe1, 0x23, 0x2d, 0x89, 0xea, 0x1d, - 0x00, 0x49, 0x76, 0x25, 0xe1, 0x78, 0xfd, 0x24, 0x5f, 0xfd, 0x3e, 0x6d, 0x7b, 0xa5, 0x7a, 0x79, - 0xb6, 0x94, 0x2d, 0xce, 0xa6, 0x78, 0xce, 0xd8, 0xe1, 0x73, 0x28, 0xa6, 0x8e, 0xfd, 0xf7, 0xb8, - 0xa2, 0xd3, 0xf8, 0x85, 0xae, 0xe8, 0x14, 0x3d, 0x80, 0xcd, 0x1b, 0x62, 0x78, 0x34, 0x7a, 0xa3, - 0x70, 0xf3, 0xc3, 0xda, 0x33, 0x41, 0x2a, 0x43, 0x49, 0x8e, 0x6b, 0xe6, 0xd2, 0xa3, 0xde, 0x7c, - 0xd4, 0x08, 0x36, 0x38, 0xd1, 0xc2, 0x78, 0xb7, 0x71, 0xb0, 0x96, 0x9e, 0xc2, 0x6e, 0x82, 0x8f, - 0x02, 0x6e, 0x93, 0x01, 0x35, 0x66, 0x2e, 0x84, 0x39, 0x17, 0xd2, 0x7f, 0x02, 0x14, 0x2e, 0x0c, - 0x4f, 0xd3, 0xad, 0xce, 0x0d, 0x65, 0x4c, 0x1f, 0x52, 0x74, 0x04, 0xdb, 0x9c, 0xb8, 0x57, 0x0a, - 0x9f, 0x3a, 0x31, 0x38, 0xe7, 0x0b, 0x7a, 0x53, 0x27, 0x38, 0x74, 0x02, 0xb8, 0xa2, 0x0f, 0x4b, - 0x6b, 0x81, 0xdf, 0x5c, 0x28, 0x68, 0x0d, 0x91, 0x01, 0xfb, 0xa6, 0xee, 0xba, 0xba, 0xa5, 0x29, - 0x11, 0x68, 0x40, 0xc7, 0xe4, 0x46, 0xb7, 0x59, 0x50, 0x4d, 0x85, 0xea, 0x77, 0xe9, 0x94, 0x2e, - 0xba, 0x2e, 0x9f, 0x85, 0xda, 0xa1, 0xf4, 0x34, 0xd2, 0xc5, 0xbb, 0xe6, 0x32, 0xb1, 0x54, 0x85, - 0xdd, 0xa5, 0x78, 0x94, 0x83, 0x8d, 0x46, 0xad, 0xd5, 0x16, 0x33, 0xa8, 0x08, 0xf9, 0x7e, 0x57, - 0x56, 0x5e, 0xc8, 0x8d, 0x5a, 0xbf, 0xdd, 0x13, 0x05, 0xa9, 0x03, 0xc5, 0x45, 0x97, 0x7e, 0x41, - 0x6e, 0xdb, 0xf1, 0x26, 0x7a, 0xf9, 0x47, 0xef, 0x0f, 0x13, 0xcf, 0x14, 0xa4, 0x7f, 0xd7, 0x61, - 0xff, 0x8d, 0xcd, 0xae, 0x46, 0x86, 0x3d, 0x99, 0xe5, 0xdd, 0xb6, 0x46, 0xba, 0x86, 0x9e, 0x40, - 0xd1, 0x24, 0xb7, 0x8a, 0x43, 0x18, 0x31, 0x0c, 0x6a, 0xe8, 0xae, 0x19, 0xa4, 0x73, 0x13, 0x17, - 0x4c, 0x72, 0x7b, 0x31, 0x93, 0xa2, 0x16, 0x88, 0x71, 0xe3, 0x2b, 0xaa, 0x6d, 0x71, 0x7a, 0xcb, - 0xa3, 0xfa, 0x9e, 0x8b, 0xc4, 0xe7, 0x87, 0x72, 0x37, 0x82, 0xd5, 0x43, 0x14, 0x2e, 0xba, 0x8b, - 0x02, 0xf4, 0x16, 0xf6, 0x18, 0x99, 0x28, 0xb6, 0xc7, 0x1d, 0x8f, 0x2b, 0x43, 0xc2, 0x89, 0x6f, - 0x71, 0xa4, 0x6b, 0x41, 0xa3, 0xe7, 0xab, 0x9f, 0xa6, 0xaf, 0x86, 0xc9, 0xa4, 0x13, 0x80, 0x5f, - 0x10, 0x4e, 0xc2, 0xc0, 0xf1, 0x7d, 0x76, 0x57, 0x88, 0xca, 0x90, 0x35, 0xfc, 0x42, 0x72, 0x83, - 0xb7, 0xcc, 0x57, 0xf7, 0xd2, 0x96, 0x82, 0x32, 0x73, 0x71, 0x84, 0x42, 0xcf, 0x21, 0x4f, 0x2c, - 0xcb, 0xe6, 0xc4, 0xcf, 0x88, 0x1b, 0x50, 0x45, 0xbe, 0x7a, 0x94, 0x56, 0xaa, 0xcd, 0x20, 0x78, - 0x1e, 0x8f, 0x7e, 0x86, 0x7b, 0xba, 0xc5, 0x29, 0x63, 0x9e, 0xc3, 0xf5, 0x81, 0x41, 0x4b, 0xd9, - 0xc0, 0xc0, 0x61, 0x39, 0xa4, 0xc4, 0x72, 0x4c, 0x89, 0xe5, 0x53, 0xdb, 0x36, 0x5e, 0xfb, 0xb5, - 0x8c, 0x17, 0x15, 0xfc, 0xf4, 0xfb, 0xef, 0x34, 0x61, 0x3a, 0xa7, 0x8a, 0x4a, 0xd4, 0x31, 0x2d, - 0x6d, 0x1d, 0x0b, 0x27, 0x39, 0x5c, 0x48, 0xc4, 0x75, 0x5f, 0x8a, 0x4e, 0x60, 0x83, 0x5a, 0x37, - 0x6e, 0x29, 0x17, 0x78, 0x78, 0x90, 0x0e, 0x51, 0xb6, 0x6e, 0x5c, 0x1c, 0x20, 0xa4, 0x3f, 0xb2, - 0x80, 0xce, 0xfc, 0xf9, 0xa1, 0x5b, 0xda, 0x5c, 0x1f, 0x0e, 0xa0, 0x14, 0x74, 0x4c, 0x3c, 0x50, - 0x94, 0x05, 0x2e, 0xf1, 0x8d, 0x7e, 0xf1, 0x3e, 0x9e, 0x9a, 0x59, 0x6a, 0x66, 0xf0, 0x1e, 0x5f, - 0xce, 0x9b, 0x57, 0x70, 0x14, 0xcf, 0x98, 0x65, 0x6e, 0xc2, 0x72, 0xf9, 0xf2, 0x83, 0x29, 0xab, - 0x99, 0xc1, 0x07, 0xea, 0x4a, 0x3a, 0x1c, 0xc3, 0x61, 0x32, 0xa8, 0x94, 0x6b, 0x9f, 0x75, 0xe6, - 0x7d, 0x85, 0x95, 0x74, 0x72, 0x27, 0x4f, 0x2b, 0x68, 0xaa, 0x99, 0xc1, 0x25, 0xba, 0x8a, 0xc2, - 0x14, 0xd8, 0x9f, 0x79, 0x8a, 0x2f, 0x18, 0x54, 0x50, 0x54, 0x66, 0x9f, 0xaf, 0x74, 0x33, 0xcf, - 0x6e, 0xcd, 0x0c, 0xde, 0xa5, 0x4b, 0x69, 0xaf, 0x03, 0xe8, 0xda, 0x23, 0x86, 0xdf, 0x5a, 0xf6, - 0x48, 0x71, 0x29, 0xbb, 0xd1, 0x55, 0x1a, 0x55, 0xe3, 0xe3, 0x54, 0x77, 0x5d, 0x86, 0xc0, 0xce, - 0xa8, 0x1b, 0xc2, 0x9a, 0x19, 0x2c, 0x5e, 0xa7, 0x64, 0xa8, 0x0d, 0x62, 0x44, 0x6e, 0x33, 0xda, - 0xc8, 0xa6, 0xcd, 0x2d, 0xa3, 0x0d, 0x3f, 0x11, 0x45, 0x27, 0xc5, 0x3e, 0x14, 0x0e, 0x26, 0x11, - 0x7d, 0x28, 0x73, 0x89, 0x08, 0x5b, 0x76, 0x2b, 0x30, 0xfb, 0x24, 0x6d, 0x76, 0x05, 0xdf, 0x34, - 0x33, 0x78, 0x7f, 0xb2, 0x82, 0x8a, 0x30, 0xa0, 0xbb, 0x3f, 0x94, 0xa8, 0xe0, 0x3f, 0x59, 0x51, - 0x34, 0xb5, 0x04, 0xd8, 0xcc, 0xe0, 0x1d, 0x35, 0x2d, 0x3c, 0xcd, 0x41, 0x96, 0x13, 0xa6, 0x51, - 0x2e, 0xfd, 0x2d, 0xc0, 0xf1, 0x59, 0xfc, 0xad, 0x9a, 0x3d, 0x6e, 0xe8, 0xdb, 0x63, 0x41, 0x47, - 0xa3, 0xd3, 0xd4, 0x88, 0xf5, 0x5d, 0x4b, 0x69, 0xd7, 0x77, 0x9b, 0x6b, 0x7e, 0x96, 0xfa, 0xdf, - 0x96, 0xa1, 0x6d, 0x12, 0xdd, 0x8a, 0xe6, 0x64, 0xb4, 0xf3, 0xbf, 0x2d, 0x0e, 0xb3, 0x7f, 0xa5, - 0x2a, 0x8f, 0xfe, 0x33, 0xf1, 0x16, 0x1d, 0x42, 0x2e, 0xce, 0x49, 0xf4, 0xa3, 0x49, 0xf6, 0xe8, - 0x31, 0xe4, 0x0d, 0xe2, 0x59, 0xea, 0x58, 0x71, 0x0c, 0x62, 0x45, 0x9f, 0x19, 0x08, 0x45, 0x17, - 0x06, 0xb1, 0xa4, 0x31, 0x3c, 0x6a, 0xeb, 0x2e, 0x5f, 0x72, 0x35, 0x4c, 0xaf, 0x3d, 0xea, 0x72, - 0xd4, 0x80, 0x7b, 0x49, 0x37, 0x26, 0xf3, 0xb2, 0x70, 0x37, 0xa5, 0x89, 0x89, 0xb8, 0xd9, 0xf0, - 0x47, 0xb1, 0x9e, 0x3f, 0x56, 0xa5, 0xdf, 0xe0, 0xf1, 0x4a, 0x4f, 0xae, 0x63, 0x5b, 0x2e, 0x45, - 0x6f, 0xa1, 0xa0, 0xce, 0x27, 0x34, 0x1e, 0x56, 0xdf, 0xac, 0xf4, 0xb5, 0xe2, 0x25, 0x70, 0xca, - 0xce, 0x57, 0xff, 0x08, 0xb0, 0x73, 0x27, 0x40, 0xb4, 0x03, 0xf7, 0x7a, 0xb5, 0xee, 0x2b, 0x05, - 0xcb, 0xdd, 0x4e, 0x1f, 0xd7, 0x65, 0x31, 0x83, 0x1e, 0x80, 0x58, 0x6f, 0xf7, 0xbb, 0x3d, 0x19, - 0xcf, 0xa4, 0x02, 0xba, 0x0f, 0x45, 0xf9, 0xad, 0x5c, 0xef, 0xf7, 0x5a, 0x9d, 0x73, 0xe5, 0xb2, - 0x2f, 0xf7, 0x65, 0x71, 0x0d, 0x1d, 0xc1, 0xfe, 0x4c, 0x18, 0x2b, 0xb5, 0x6b, 0xa7, 0x72, 0x5b, - 0x5c, 0x47, 0x9f, 0xc1, 0xf1, 0x65, 0xbf, 0xd6, 0x6e, 0xf5, 0xde, 0x29, 0x9d, 0x86, 0xd2, 0x95, - 0xf1, 0xeb, 0x56, 0x5d, 0x56, 0xba, 0x17, 0x72, 0xbd, 0xd5, 0x68, 0xd5, 0x6b, 0xbe, 0x8e, 0xb8, - 0xe1, 0xdb, 0xbd, 0x68, 0xf7, 0x5f, 0xb6, 0xce, 0x95, 0xce, 0x6b, 0x19, 0xe3, 0xd6, 0x0b, 0x59, - 0xdc, 0x44, 0x0f, 0xe1, 0xe0, 0x4d, 0x07, 0xbf, 0x6a, 0xb4, 0x3b, 0x6f, 0x94, 0x39, 0x07, 0x9d, - 0xf3, 0x46, 0xeb, 0xa5, 0x98, 0x45, 0x7b, 0x80, 0x62, 0x67, 0xb5, 0x6e, 0xb7, 0xf5, 0xf2, 0xfc, - 0x4c, 0x3e, 0xef, 0x89, 0x5b, 0xa7, 0xcf, 0x7f, 0xf9, 0x51, 0xd3, 0xf9, 0xd8, 0x1b, 0x94, 0x55, - 0xdb, 0xac, 0x04, 0x09, 0xb3, 0x99, 0x16, 0x2e, 0x2a, 0xc9, 0x17, 0x5c, 0xa3, 0x56, 0xc5, 0x19, - 0x3c, 0xd5, 0xec, 0xca, 0xe2, 0xef, 0x7e, 0x90, 0x0d, 0xa6, 0xcd, 0xb7, 0xff, 0x07, 0x00, 0x00, - 0xff, 0xff, 0x9e, 0xa4, 0xff, 0x06, 0x4c, 0x0c, 0x00, 0x00, + // 1357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0x5b, 0x53, 0xdb, 0xc6, + 0x17, 0xb7, 0xb8, 0x18, 0x73, 0xfc, 0x0f, 0x88, 0x4d, 0x00, 0x03, 0xff, 0x24, 0xd4, 0xbd, 0x84, + 0xb6, 0x13, 0xbb, 0x43, 0xfb, 0x90, 0x5e, 0x32, 0x53, 0xe3, 0x88, 0xe0, 0x89, 0xc1, 0xb0, 0xb6, + 0x73, 0xe9, 0x8b, 0x66, 0x2d, 0xaf, 0x85, 0x8a, 0xa4, 0x15, 0xab, 0x15, 0xc6, 0xed, 0x97, 0xe8, + 0x43, 0xfb, 0x51, 0xfa, 0x7d, 0xfa, 0xd6, 0x87, 0xf6, 0x43, 0x74, 0xb4, 0xba, 0xd8, 0x16, 0x76, + 0x26, 0x6f, 0xda, 0xb3, 0xbf, 0x73, 0xd9, 0xb3, 0xe7, 0xfc, 0xce, 0x0a, 0x9e, 0x0c, 0xec, 0x91, + 0xa0, 0x56, 0xdf, 0xae, 0x92, 0xbe, 0x63, 0xb9, 0x55, 0x87, 0x08, 0xe3, 0x92, 0xf4, 0x6c, 0xaa, + 0x73, 0xea, 0xb3, 0x80, 0x1b, 0xb4, 0xe2, 0x71, 0x26, 0x18, 0x5a, 0x4b, 0x80, 0x15, 0x09, 0xdc, + 0xdd, 0xcb, 0x28, 0x1a, 0xcc, 0x71, 0x98, 0x1b, 0x81, 0x77, 0xb3, 0x56, 0x0d, 0x3b, 0xf0, 0x05, + 0xe5, 0x3a, 0xf1, 0x7d, 0xcb, 0x74, 0x1d, 0xea, 0x8a, 0x18, 0xf8, 0x30, 0x05, 0x1a, 0x8c, 0xd3, + 0x2a, 0xbd, 0xa5, 0x46, 0x20, 0xac, 0xd4, 0xce, 0xff, 0xa7, 0xb7, 0x7d, 0x6a, 0x04, 0xdc, 0x12, + 0xa3, 0x78, 0xf7, 0x91, 0xc9, 0x98, 0x69, 0xd3, 0xaa, 0x5c, 0xf5, 0x82, 0x41, 0x75, 0xc8, 0x89, + 0xe7, 0x51, 0xee, 0x47, 0xfb, 0xe5, 0x3f, 0x14, 0x50, 0x3b, 0xc4, 0xbf, 0xc2, 0xf1, 0x49, 0xda, + 0x1e, 0x35, 0x90, 0x0a, 0x8b, 0x86, 0x17, 0x94, 0x94, 0x7d, 0xe5, 0x60, 0x15, 0x87, 0x9f, 0xa1, + 0xc4, 0xf4, 0x82, 0xd2, 0x42, 0x24, 0x31, 0xbd, 0x00, 0x6d, 0x41, 0xde, 0xa1, 0x0e, 0xe3, 0xa3, + 0xd2, 0xa2, 0x14, 0xc6, 0x2b, 0x54, 0x82, 0x15, 0x5f, 0x30, 0x4e, 0x4c, 0x5a, 0x5a, 0x92, 0x1b, + 0xc9, 0x12, 0x7d, 0x09, 0x1b, 0xd4, 0xbb, 0xa4, 0x0e, 0xe5, 0xc4, 0xd6, 0x13, 0xcc, 0xb2, 0xc4, + 0xa8, 0xe9, 0x46, 0x3b, 0x92, 0x97, 0x7f, 0x53, 0x60, 0x6b, 0x32, 0xae, 0x9a, 0x10, 0xdc, 0xea, + 0x05, 0x82, 0xfa, 0xe8, 0x07, 0x28, 0xf4, 0xe9, 0x80, 0x04, 0xb6, 0xf0, 0x65, 0x88, 0xc5, 0xc3, + 0xfd, 0xca, 0x74, 0xe2, 0x2b, 0xd9, 0x13, 0xe1, 0x54, 0x03, 0x3d, 0x83, 0xbc, 0x6d, 0x39, 0x96, + 0xf0, 0xe5, 0x61, 0x3e, 0x44, 0x37, 0xc6, 0x97, 0xff, 0x54, 0x60, 0xa7, 0x1e, 0x5d, 0xd2, 0x8c, + 0xa8, 0xde, 0x01, 0x90, 0x74, 0x55, 0x52, 0xf6, 0x17, 0x0f, 0x8a, 0x87, 0xdf, 0x66, 0x6d, 0xcf, + 0x55, 0xaf, 0x8c, 0x3f, 0x35, 0x57, 0xf0, 0x11, 0x9e, 0x30, 0xb6, 0xfb, 0x1c, 0xd6, 0x33, 0xdb, + 0xe1, 0x7d, 0x5c, 0xd1, 0x51, 0x72, 0x43, 0x57, 0x74, 0x84, 0x1e, 0xc0, 0xf2, 0x0d, 0xb1, 0x03, + 0x1a, 0xdf, 0x51, 0xb4, 0xf8, 0x6e, 0xe1, 0x99, 0x52, 0xae, 0x40, 0x49, 0x4b, 0x6a, 0xe6, 0x22, + 0xa0, 0xc1, 0x64, 0xd4, 0x08, 0x96, 0x04, 0x31, 0xa3, 0x78, 0x57, 0xb1, 0xfc, 0x2e, 0x3f, 0x85, + 0xcd, 0x14, 0x1f, 0x07, 0xdc, 0x24, 0x3d, 0x6a, 0x8f, 0x5d, 0x28, 0x13, 0x2e, 0xca, 0xff, 0x2a, + 0xb0, 0x76, 0x6e, 0x07, 0xa6, 0xe5, 0xb6, 0x6e, 0x28, 0xe7, 0x56, 0x9f, 0xa2, 0x3d, 0x58, 0x15, + 0xc4, 0xbf, 0xd2, 0xc5, 0xc8, 0x4b, 0xc0, 0x85, 0x50, 0xd0, 0x19, 0x79, 0x72, 0xd3, 0x93, 0x70, + 0xdd, 0xea, 0x97, 0x16, 0xa4, 0xdf, 0x42, 0x24, 0x68, 0xf4, 0x91, 0x0d, 0xdb, 0x8e, 0xe5, 0xfb, + 0x96, 0x6b, 0xea, 0x31, 0xa8, 0x47, 0x2f, 0xc9, 0x8d, 0xc5, 0xb8, 0xac, 0xa6, 0xb5, 0xc3, 0x6f, + 0xb2, 0x29, 0x9d, 0x76, 0x5d, 0x39, 0x8d, 0xb4, 0x23, 0xe9, 0x51, 0xac, 0x8b, 0x37, 0x9d, 0x59, + 0xe2, 0xf2, 0x21, 0x6c, 0xce, 0xc4, 0xa3, 0x02, 0x2c, 0x1d, 0xd7, 0x1a, 0x4d, 0x35, 0x87, 0xd6, + 0xa1, 0xd8, 0x6d, 0x6b, 0xfa, 0x0b, 0xed, 0xb8, 0xd6, 0x6d, 0x76, 0x54, 0xa5, 0xdc, 0x82, 0xf5, + 0x69, 0x97, 0x61, 0x41, 0xae, 0xb2, 0x64, 0x11, 0xdf, 0xfc, 0xa3, 0xf7, 0x87, 0x89, 0xc7, 0x0a, + 0xe5, 0x7f, 0x16, 0x61, 0xfb, 0x0d, 0xe3, 0x57, 0x03, 0x9b, 0x0d, 0xc7, 0x79, 0x67, 0xee, 0xc0, + 0x32, 0xd1, 0x13, 0x58, 0x77, 0xc8, 0xad, 0xee, 0x11, 0x4e, 0x6c, 0x9b, 0xda, 0x96, 0xef, 0xc8, + 0x74, 0x2e, 0xe3, 0x35, 0x87, 0xdc, 0x9e, 0x8f, 0xa5, 0xa8, 0x01, 0x6a, 0xd2, 0xf8, 0xba, 0xc1, + 0x5c, 0x41, 0x6f, 0x45, 0x5c, 0xdf, 0x13, 0x91, 0x84, 0xfc, 0x50, 0x69, 0xc7, 0xb0, 0x7a, 0x84, + 0xc2, 0xeb, 0xfe, 0xb4, 0x00, 0xbd, 0x85, 0x2d, 0x4e, 0x86, 0x3a, 0x0b, 0x84, 0x17, 0x08, 0xbd, + 0x4f, 0x04, 0x09, 0x2d, 0x0e, 0x2c, 0x53, 0x36, 0x7a, 0xf1, 0xf0, 0xe3, 0xec, 0xd1, 0x30, 0x19, + 0xb6, 0x24, 0xf8, 0x05, 0x11, 0x24, 0x0a, 0x1c, 0xdf, 0xe7, 0x77, 0x85, 0xa8, 0x02, 0x79, 0x3b, + 0x2c, 0x24, 0x5f, 0xde, 0x65, 0xf1, 0x70, 0x2b, 0x6b, 0x49, 0x96, 0x99, 0x8f, 0x63, 0x14, 0x7a, + 0x0e, 0x45, 0xe2, 0xba, 0x4c, 0x90, 0x30, 0x23, 0xbe, 0xa4, 0x8a, 0xe2, 0xe1, 0x5e, 0x56, 0xa9, + 0x36, 0x86, 0xe0, 0x49, 0x3c, 0xfa, 0x11, 0xee, 0x59, 0xae, 0xa0, 0x9c, 0x07, 0x9e, 0xb0, 0x7a, + 0x36, 0x2d, 0xe5, 0xa5, 0x81, 0xdd, 0x4a, 0x44, 0x89, 0x95, 0x84, 0x12, 0x2b, 0x47, 0x8c, 0xd9, + 0xaf, 0xc3, 0x5a, 0xc6, 0xd3, 0x0a, 0x61, 0xfa, 0xc3, 0x7b, 0x1a, 0x72, 0x4b, 0x50, 0xdd, 0x20, + 0xc6, 0x25, 0x2d, 0xad, 0xec, 0x2b, 0x07, 0x05, 0xbc, 0x96, 0x8a, 0xeb, 0xa1, 0x14, 0x1d, 0xc0, + 0x12, 0x75, 0x6f, 0xfc, 0x52, 0x41, 0x7a, 0x78, 0x90, 0x0d, 0x51, 0x73, 0x6f, 0x7c, 0x2c, 0x11, + 0xe5, 0xdf, 0xf3, 0x80, 0x4e, 0xc3, 0xf9, 0x61, 0xb9, 0xe6, 0x44, 0x1f, 0xf6, 0xa0, 0x24, 0x3b, + 0x26, 0x19, 0x28, 0xfa, 0x14, 0x97, 0x84, 0x46, 0x3f, 0x7b, 0x1f, 0x4f, 0x8d, 0x2d, 0x9d, 0xe4, + 0xf0, 0x96, 0x98, 0xcd, 0x9b, 0x57, 0xb0, 0x97, 0xcc, 0x98, 0x59, 0x6e, 0xa2, 0x72, 0xf9, 0xfc, + 0x83, 0x29, 0xeb, 0x24, 0x87, 0x77, 0x8c, 0xb9, 0x74, 0x78, 0x09, 0xbb, 0xe9, 0xa0, 0xd2, 0xaf, + 0x43, 0xd6, 0x99, 0xf4, 0x15, 0x55, 0xd2, 0xc1, 0x9d, 0x3c, 0xcd, 0xa1, 0xa9, 0x93, 0x1c, 0x2e, + 0xd1, 0x79, 0x14, 0xa6, 0xc3, 0xf6, 0xd8, 0x53, 0x72, 0x40, 0x59, 0x41, 0x71, 0x99, 0x7d, 0x3a, + 0xd7, 0xcd, 0x24, 0xbb, 0x9d, 0xe4, 0xf0, 0x26, 0x9d, 0x49, 0x7b, 0x2d, 0x40, 0xd7, 0x01, 0xb1, + 0xc3, 0xd6, 0x62, 0x03, 0xdd, 0xa7, 0xfc, 0xc6, 0x32, 0x68, 0x5c, 0x8d, 0x8f, 0x33, 0xdd, 0x75, + 0x11, 0x01, 0x5b, 0x83, 0x76, 0x04, 0x3b, 0xc9, 0x61, 0xf5, 0x3a, 0x23, 0x43, 0x4d, 0x50, 0x63, + 0x72, 0x1b, 0xd3, 0x46, 0x3e, 0x6b, 0x6e, 0x16, 0x6d, 0x84, 0x89, 0x58, 0xf7, 0x32, 0xec, 0x43, + 0x61, 0x67, 0x18, 0xd3, 0x87, 0x3e, 0x91, 0x88, 0xa8, 0x65, 0x57, 0xa4, 0xd9, 0x27, 0x59, 0xb3, + 0x73, 0xf8, 0xe6, 0x24, 0x87, 0xb7, 0x87, 0x73, 0xa8, 0x08, 0x03, 0xba, 0xfb, 0x42, 0x89, 0x0b, + 0xfe, 0xa3, 0x39, 0x45, 0x53, 0x4b, 0x81, 0x27, 0x39, 0xbc, 0x61, 0x64, 0x85, 0x47, 0x05, 0xc8, + 0x0b, 0xc2, 0x4d, 0x2a, 0xca, 0x7f, 0x2b, 0xb0, 0x7f, 0x9a, 0x3c, 0xab, 0xc6, 0x97, 0x1b, 0xf9, + 0x0e, 0xb8, 0xec, 0x68, 0x74, 0x94, 0x19, 0xb1, 0xa1, 0xeb, 0x72, 0xd6, 0xf5, 0xdd, 0xe6, 0x9a, + 0x9c, 0xa5, 0xe1, 0xb3, 0xa5, 0xcf, 0x1c, 0x62, 0xb9, 0xf1, 0x9c, 0x8c, 0x57, 0xe1, 0xb3, 0xc5, + 0xe3, 0xec, 0x67, 0x6a, 0x88, 0xf8, 0x3d, 0x93, 0x2c, 0xd1, 0x2e, 0x14, 0x92, 0x9c, 0xc4, 0x2f, + 0x9a, 0x74, 0x8d, 0x1e, 0x43, 0xd1, 0x26, 0x81, 0x6b, 0x5c, 0xea, 0x9e, 0x4d, 0xdc, 0xf8, 0x31, + 0x03, 0x91, 0xe8, 0xdc, 0x26, 0x6e, 0x38, 0xa7, 0x19, 0x37, 0xe5, 0xed, 0xae, 0xe2, 0xf0, 0xb3, + 0xfc, 0x0b, 0x3c, 0x6a, 0x5a, 0xbe, 0x98, 0x71, 0x58, 0x4c, 0xaf, 0x03, 0xea, 0x0b, 0x74, 0x0c, + 0xf7, 0xd2, 0xfe, 0x4c, 0x27, 0xe8, 0xda, 0xdd, 0x24, 0xa7, 0x26, 0x92, 0xf6, 0xc3, 0xff, 0x4b, + 0xf4, 0xe4, 0xa0, 0x8d, 0x7d, 0x2f, 0x8c, 0x7d, 0xff, 0x0a, 0x8f, 0xe7, 0xfa, 0xf6, 0x3d, 0xe6, + 0xfa, 0x14, 0xbd, 0x85, 0x35, 0x63, 0x32, 0xe9, 0xc9, 0x40, 0xfb, 0x6a, 0xae, 0xf7, 0x39, 0xb7, + 0x85, 0x33, 0x76, 0xbe, 0xf8, 0x4b, 0x81, 0x8d, 0x3b, 0x21, 0xa3, 0x0d, 0xb8, 0xd7, 0xa9, 0xb5, + 0x5f, 0xe9, 0x58, 0x6b, 0xb7, 0xba, 0xb8, 0xae, 0xa9, 0x39, 0xf4, 0x00, 0xd4, 0x7a, 0xb3, 0xdb, + 0xee, 0x68, 0x78, 0x2c, 0x55, 0xd0, 0x7d, 0x58, 0xd7, 0xde, 0x6a, 0xf5, 0x6e, 0xa7, 0xd1, 0x3a, + 0xd3, 0x2f, 0xba, 0x5a, 0x57, 0x53, 0x17, 0xd0, 0x1e, 0x6c, 0x8f, 0x85, 0x89, 0x52, 0xb3, 0x76, + 0xa4, 0x35, 0xd5, 0x45, 0xf4, 0x09, 0xec, 0x5f, 0x74, 0x6b, 0xcd, 0x46, 0xe7, 0x9d, 0xde, 0x3a, + 0xd6, 0xdb, 0x1a, 0x7e, 0xdd, 0xa8, 0x6b, 0x7a, 0xfb, 0x5c, 0xab, 0x37, 0x8e, 0x1b, 0xf5, 0x5a, + 0xa8, 0xa3, 0x2e, 0x85, 0x76, 0xcf, 0x9b, 0xdd, 0x97, 0x8d, 0x33, 0xbd, 0xf5, 0x5a, 0xc3, 0xb8, + 0xf1, 0x42, 0x53, 0x97, 0xd1, 0x43, 0xd8, 0x79, 0xd3, 0xc2, 0xaf, 0x8e, 0x9b, 0xad, 0x37, 0xfa, + 0x84, 0x83, 0xd6, 0xd9, 0x71, 0xe3, 0xa5, 0x9a, 0x47, 0x5b, 0x80, 0x12, 0x67, 0xb5, 0x76, 0xbb, + 0xf1, 0xf2, 0xec, 0x54, 0x3b, 0xeb, 0xa8, 0x2b, 0x47, 0xcf, 0x7f, 0xfa, 0xde, 0xb4, 0xc4, 0x65, + 0xd0, 0xab, 0x18, 0xcc, 0xa9, 0xca, 0x84, 0x31, 0x6e, 0x46, 0x1f, 0xd5, 0xf4, 0x99, 0x6e, 0x52, + 0xb7, 0xea, 0xf5, 0x9e, 0x9a, 0xac, 0x3a, 0xfd, 0x07, 0xd0, 0xcb, 0xcb, 0x89, 0xf4, 0xf5, 0x7f, + 0x01, 0x00, 0x00, 0xff, 0xff, 0xe3, 0xc1, 0x68, 0x9a, 0x70, 0x0c, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go index f1e7505a38..0e19a4af16 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/node_execution.pb.go @@ -950,6 +950,84 @@ func (m *NodeExecutionGetDataResponse) GetFlyteUrls() *FlyteURLs { return nil } +type GetDynamicNodeWorkflowRequest struct { + Id *core.NodeExecutionIdentifier `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDynamicNodeWorkflowRequest) Reset() { *m = GetDynamicNodeWorkflowRequest{} } +func (m *GetDynamicNodeWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*GetDynamicNodeWorkflowRequest) ProtoMessage() {} +func (*GetDynamicNodeWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_f73b3eae493fd736, []int{12} +} + +func (m *GetDynamicNodeWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDynamicNodeWorkflowRequest.Unmarshal(m, b) +} +func (m *GetDynamicNodeWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDynamicNodeWorkflowRequest.Marshal(b, m, deterministic) +} +func (m *GetDynamicNodeWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDynamicNodeWorkflowRequest.Merge(m, src) +} +func (m *GetDynamicNodeWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_GetDynamicNodeWorkflowRequest.Size(m) +} +func (m *GetDynamicNodeWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDynamicNodeWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDynamicNodeWorkflowRequest proto.InternalMessageInfo + +func (m *GetDynamicNodeWorkflowRequest) GetId() *core.NodeExecutionIdentifier { + if m != nil { + return m.Id + } + return nil +} + +type DynamicNodeWorkflowResponse struct { + CompiledWorkflow *core.CompiledWorkflowClosure `protobuf:"bytes,1,opt,name=compiled_workflow,json=compiledWorkflow,proto3" json:"compiled_workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DynamicNodeWorkflowResponse) Reset() { *m = DynamicNodeWorkflowResponse{} } +func (m *DynamicNodeWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*DynamicNodeWorkflowResponse) ProtoMessage() {} +func (*DynamicNodeWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_f73b3eae493fd736, []int{13} +} + +func (m *DynamicNodeWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DynamicNodeWorkflowResponse.Unmarshal(m, b) +} +func (m *DynamicNodeWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DynamicNodeWorkflowResponse.Marshal(b, m, deterministic) +} +func (m *DynamicNodeWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DynamicNodeWorkflowResponse.Merge(m, src) +} +func (m *DynamicNodeWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_DynamicNodeWorkflowResponse.Size(m) +} +func (m *DynamicNodeWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DynamicNodeWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DynamicNodeWorkflowResponse proto.InternalMessageInfo + +func (m *DynamicNodeWorkflowResponse) GetCompiledWorkflow() *core.CompiledWorkflowClosure { + if m != nil { + return m.CompiledWorkflow + } + return nil +} + func init() { proto.RegisterType((*NodeExecutionGetRequest)(nil), "flyteidl.admin.NodeExecutionGetRequest") proto.RegisterType((*NodeExecutionListRequest)(nil), "flyteidl.admin.NodeExecutionListRequest") @@ -963,6 +1041,8 @@ func init() { proto.RegisterType((*DynamicWorkflowNodeMetadata)(nil), "flyteidl.admin.DynamicWorkflowNodeMetadata") proto.RegisterType((*NodeExecutionGetDataRequest)(nil), "flyteidl.admin.NodeExecutionGetDataRequest") proto.RegisterType((*NodeExecutionGetDataResponse)(nil), "flyteidl.admin.NodeExecutionGetDataResponse") + proto.RegisterType((*GetDynamicNodeWorkflowRequest)(nil), "flyteidl.admin.GetDynamicNodeWorkflowRequest") + proto.RegisterType((*DynamicNodeWorkflowResponse)(nil), "flyteidl.admin.DynamicNodeWorkflowResponse") } func init() { @@ -970,81 +1050,83 @@ func init() { } var fileDescriptor_f73b3eae493fd736 = []byte{ - // 1208 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x6d, 0x6f, 0x1b, 0xc5, - 0x13, 0xef, 0x25, 0x4d, 0x62, 0x8f, 0x13, 0x27, 0xd9, 0xbf, 0xfb, 0xaf, 0xdb, 0xf4, 0xc1, 0x5c, - 0x5b, 0x14, 0x40, 0xb5, 0x25, 0x57, 0x45, 0xe5, 0x99, 0x38, 0xe9, 0x43, 0x20, 0x85, 0xb2, 0xa9, - 0x41, 0x42, 0x88, 0xd3, 0xfa, 0x6e, 0xed, 0x2c, 0x3e, 0xdf, 0x5e, 0x77, 0xf7, 0x14, 0xfc, 0x45, - 0x90, 0xf8, 0x2a, 0xbc, 0xe2, 0x13, 0xf0, 0x35, 0x90, 0xf8, 0x00, 0xbc, 0x46, 0xbb, 0xb7, 0x77, - 0xf6, 0x5d, 0xdc, 0x44, 0x2a, 0x2f, 0x78, 0xe7, 0x9d, 0xf9, 0xcd, 0xcc, 0xce, 0xcc, 0x6f, 0x66, - 0xcf, 0x70, 0x67, 0x18, 0x4e, 0x15, 0x65, 0x41, 0xd8, 0x21, 0xc1, 0x84, 0x45, 0x9d, 0x88, 0x07, - 0xd4, 0xa3, 0x3f, 0x53, 0x3f, 0x51, 0x8c, 0x47, 0xed, 0x58, 0x70, 0xc5, 0x51, 0x3d, 0x03, 0xb5, - 0x0d, 0xe8, 0xfa, 0x4e, 0xc9, 0xc8, 0xe7, 0x93, 0x49, 0x06, 0xbe, 0x7e, 0x33, 0x57, 0xfa, 0x5c, - 0xd0, 0x4e, 0xc9, 0xd7, 0x9c, 0xad, 0x51, 0xfb, 0x44, 0x91, 0x90, 0x8f, 0xac, 0xf2, 0x46, 0x49, - 0xc9, 0x27, 0x31, 0x0b, 0xa9, 0xb0, 0xda, 0x5b, 0x45, 0x2d, 0x0b, 0x68, 0xa4, 0xd8, 0x90, 0xe5, - 0xfa, 0x92, 0x75, 0xc8, 0x14, 0x15, 0x24, 0x94, 0x56, 0x7b, 0x7b, 0xc4, 0xf9, 0x28, 0xa4, 0x1d, - 0x73, 0x1a, 0x24, 0xc3, 0x8e, 0x62, 0x13, 0x2a, 0x15, 0x99, 0xc4, 0x99, 0xfb, 0x32, 0x20, 0x48, - 0x04, 0x99, 0xdd, 0xdc, 0xfd, 0x06, 0xae, 0x7e, 0xc5, 0x03, 0xfa, 0x38, 0x4b, 0xe8, 0x29, 0x55, - 0x98, 0xbe, 0x4a, 0xa8, 0x54, 0xe8, 0x7d, 0x58, 0x62, 0x41, 0xd3, 0x69, 0x39, 0xbb, 0xb5, 0xee, - 0xdb, 0xed, 0xbc, 0x5a, 0xfa, 0x1a, 0xed, 0x82, 0xcd, 0x61, 0x7e, 0x67, 0xbc, 0xc4, 0x02, 0xf7, - 0xd7, 0x25, 0x68, 0x16, 0xf4, 0x47, 0x4c, 0xe6, 0x4e, 0x7f, 0x84, 0x2b, 0xa7, 0x5c, 0x8c, 0x87, - 0x21, 0x3f, 0x9d, 0x75, 0xc4, 0xcb, 0xe3, 0xbc, 0x5b, 0x8a, 0xf3, 0x9d, 0xc5, 0x2e, 0x8a, 0xf5, - 0xbf, 0xd3, 0xb3, 0x4a, 0xd4, 0x80, 0x95, 0x90, 0x4d, 0x98, 0x6a, 0x2e, 0xb5, 0x9c, 0xdd, 0x0d, - 0x9c, 0x1e, 0xb4, 0x54, 0xf1, 0x31, 0x8d, 0x9a, 0xcb, 0x2d, 0x67, 0xb7, 0x8a, 0xd3, 0x03, 0x6a, - 0xc2, 0xda, 0x90, 0x85, 0x8a, 0x0a, 0xd9, 0xbc, 0x6c, 0xe4, 0xd9, 0x11, 0xdd, 0x87, 0x35, 0xc9, - 0x85, 0xf2, 0x06, 0xd3, 0xe6, 0x8a, 0xb9, 0x57, 0xa3, 0x5d, 0x64, 0x4b, 0xfb, 0x98, 0x0b, 0x85, - 0x57, 0x35, 0xa8, 0x37, 0x45, 0xbb, 0xb0, 0x95, 0x44, 0xec, 0x55, 0x42, 0xbd, 0x98, 0x08, 0x1a, - 0x29, 0x9d, 0xcf, 0xaa, 0xf1, 0x58, 0x4f, 0xe5, 0x2f, 0x8c, 0xf8, 0x30, 0x70, 0xff, 0x72, 0xe0, - 0x76, 0xa1, 0x36, 0x4f, 0xb8, 0x78, 0x49, 0xe4, 0x78, 0xbe, 0x44, 0x18, 0xb6, 0x15, 0x91, 0xe3, - 0x45, 0xe5, 0x29, 0xb7, 0x41, 0x9b, 0x2e, 0x2a, 0xcd, 0xa6, 0x2a, 0x2a, 0xfe, 0x93, 0xb2, 0xb8, - 0x7f, 0x3a, 0xb0, 0x51, 0x48, 0xf6, 0x4d, 0x29, 0x85, 0x76, 0xa0, 0xca, 0xa2, 0x38, 0x51, 0x5e, - 0x22, 0x98, 0x49, 0xa1, 0x8a, 0x2b, 0x46, 0xd0, 0x17, 0x0c, 0x7d, 0x0a, 0x6b, 0x7e, 0xc8, 0x65, - 0x22, 0xa8, 0xc9, 0xa3, 0xd6, 0xbd, 0x5b, 0xbe, 0x55, 0xc1, 0xf5, 0x7e, 0x8a, 0xc5, 0x99, 0x11, - 0xda, 0x83, 0xca, 0x84, 0x2a, 0x12, 0x10, 0x45, 0x4c, 0xc2, 0xb5, 0xee, 0xbd, 0x73, 0x1d, 0x3c, - 0xa7, 0x8a, 0x1c, 0x10, 0x45, 0x70, 0x6e, 0xe6, 0xfe, 0xe6, 0xc0, 0x95, 0x85, 0x18, 0x74, 0x1b, - 0x6a, 0x82, 0x2a, 0x31, 0xf5, 0x46, 0x82, 0x27, 0xb1, 0x49, 0xbd, 0x8a, 0xc1, 0x88, 0x9e, 0x6a, - 0x09, 0xba, 0x0b, 0x75, 0x26, 0x33, 0xde, 0xe8, 0x45, 0x65, 0xf2, 0xab, 0xe0, 0x75, 0x26, 0x53, - 0xd6, 0x68, 0xbf, 0xa8, 0x05, 0xeb, 0x32, 0xa6, 0xbe, 0x01, 0x68, 0x3a, 0xa4, 0x0d, 0x03, 0x2d, - 0xd3, 0xfa, 0xc3, 0x00, 0xdd, 0x04, 0x60, 0xd2, 0x0b, 0xa6, 0x11, 0x99, 0x30, 0xdf, 0xe4, 0x51, - 0xc1, 0x55, 0x26, 0x0f, 0x52, 0x01, 0xba, 0x06, 0x15, 0x26, 0x3d, 0x22, 0x04, 0x49, 0x7b, 0x57, - 0xc1, 0x6b, 0x4c, 0xee, 0xe9, 0xa3, 0xfb, 0x0a, 0xb6, 0xcf, 0x8c, 0x2b, 0x7a, 0x02, 0x9b, 0xc5, - 0xad, 0x29, 0x9b, 0x4e, 0x6b, 0x79, 0xb7, 0xd6, 0xbd, 0x79, 0x6e, 0x6d, 0x70, 0x3d, 0x9a, 0x3f, - 0xca, 0x19, 0xc5, 0x96, 0xe6, 0x28, 0xe6, 0xfe, 0xbd, 0x02, 0x8d, 0x45, 0x4d, 0x41, 0x77, 0x00, - 0x78, 0xa2, 0xb2, 0x4e, 0x9b, 0x6a, 0xf5, 0x96, 0x9a, 0xce, 0xb3, 0x4b, 0xb8, 0x9a, 0xca, 0x75, - 0xc3, 0x1f, 0xc2, 0x0a, 0x15, 0x82, 0x0b, 0xe3, 0xb3, 0x70, 0x23, 0x43, 0xa4, 0xdc, 0xe9, 0x63, - 0x0d, 0x7a, 0x76, 0x09, 0xa7, 0x68, 0xf4, 0x39, 0xd4, 0xac, 0x6f, 0xd3, 0x6a, 0x30, 0xc6, 0xd7, - 0x4a, 0xc6, 0x47, 0xe9, 0x7e, 0x7d, 0x4e, 0x62, 0x1b, 0xd7, 0xde, 0xc7, 0x34, 0xf3, 0x11, 0xac, - 0xc4, 0x27, 0x44, 0xa6, 0x3c, 0xab, 0x77, 0xdd, 0xf3, 0x18, 0xdc, 0x7e, 0xa1, 0x91, 0x38, 0x35, - 0x40, 0x1f, 0x00, 0x48, 0x45, 0x84, 0xa2, 0x81, 0x47, 0x94, 0x65, 0xd9, 0xf5, 0x76, 0xba, 0x9b, - 0xdb, 0xd9, 0x6e, 0x6e, 0xbf, 0xcc, 0x96, 0x37, 0xae, 0x5a, 0xf4, 0x9e, 0x42, 0x0f, 0xa1, 0x92, - 0xed, 0x6c, 0x3b, 0x75, 0xd7, 0xce, 0x18, 0x1e, 0x58, 0x00, 0xce, 0xa1, 0x3a, 0xa2, 0x2f, 0x28, - 0xb1, 0x11, 0x57, 0x2f, 0x8e, 0x68, 0xd1, 0x7b, 0x4a, 0x9b, 0x26, 0x71, 0x90, 0x99, 0xae, 0x5d, - 0x6c, 0x6a, 0xd1, 0x7b, 0x0a, 0xfd, 0x00, 0xff, 0xcf, 0xd7, 0xbb, 0xe1, 0x4f, 0x3e, 0x59, 0x95, - 0xc5, 0xa3, 0x99, 0x2d, 0x78, 0x5d, 0xbb, 0xe7, 0x16, 0xfb, 0xcc, 0xc1, 0x8d, 0xd3, 0x05, 0x72, - 0xf4, 0x02, 0x90, 0xd9, 0x8c, 0x45, 0xcf, 0x55, 0xe3, 0xb9, 0x55, 0xf6, 0xac, 0x77, 0x63, 0xc9, - 0xeb, 0x96, 0x2a, 0xc9, 0xf4, 0x58, 0x04, 0xd4, 0x1f, 0x1b, 0xb6, 0xd5, 0xd2, 0x65, 0xa7, 0xcf, - 0x9a, 0x65, 0x1d, 0x68, 0xd8, 0x69, 0xf2, 0x7e, 0xe2, 0x03, 0xcf, 0x8c, 0x9f, 0x86, 0xad, 0x1b, - 0xd8, 0xb6, 0xd5, 0x7d, 0xc1, 0x07, 0xc7, 0x31, 0xf5, 0xfb, 0x82, 0xf5, 0x36, 0x61, 0xc3, 0xf2, - 0x4b, 0x50, 0x99, 0x84, 0xaa, 0xb7, 0x0d, 0x9b, 0x8a, 0x88, 0x11, 0x55, 0xf9, 0x5d, 0xdd, 0x00, - 0x1a, 0x8b, 0x32, 0x46, 0x47, 0x50, 0xa3, 0xb3, 0xdd, 0xf7, 0x06, 0x8f, 0xe1, 0xbc, 0xb9, 0xfb, - 0xbb, 0x03, 0x5b, 0xe5, 0xf4, 0xd1, 0x01, 0xac, 0xfb, 0xc4, 0x3f, 0xa1, 0x9e, 0x54, 0x44, 0x25, - 0xd2, 0xc4, 0xa8, 0x77, 0xdf, 0x2a, 0xc5, 0xd8, 0x4f, 0x3f, 0x5d, 0xf6, 0x35, 0xf2, 0xd8, 0x00, - 0x71, 0xcd, 0x9f, 0x1d, 0xd0, 0x67, 0x50, 0xb3, 0x5f, 0x37, 0xde, 0x98, 0x4e, 0xed, 0x04, 0xde, - 0x5a, 0xec, 0x24, 0x0b, 0x8d, 0xc1, 0x9a, 0x7c, 0x49, 0xa7, 0xe8, 0x1e, 0xd4, 0xfd, 0x13, 0xea, - 0x8f, 0x63, 0xce, 0xa2, 0x74, 0xca, 0xd3, 0x47, 0x66, 0x63, 0x26, 0xed, 0x0b, 0xe6, 0xfe, 0xe1, - 0xc0, 0x8e, 0xdd, 0x5d, 0x0b, 0x0b, 0xf6, 0xce, 0xdc, 0x4b, 0x52, 0x9e, 0xe1, 0xd2, 0xe3, 0x71, - 0x0c, 0xdb, 0xf6, 0x9b, 0x2b, 0xf0, 0x32, 0x5a, 0xd9, 0x8b, 0x97, 0xdf, 0xa0, 0x7d, 0x8b, 0xcb, - 0x42, 0x66, 0x6f, 0xc5, 0x96, 0x5f, 0x52, 0xbc, 0x96, 0x1d, 0xcb, 0xaf, 0x61, 0x87, 0xdb, 0x87, - 0x9d, 0xf2, 0x87, 0x96, 0x79, 0x44, 0xfe, 0xe5, 0xc7, 0xd6, 0x2f, 0xcb, 0x70, 0x63, 0xb1, 0x5f, - 0x19, 0xf3, 0x48, 0x52, 0xf4, 0x00, 0x56, 0xcd, 0x4b, 0x29, 0xad, 0xf3, 0xab, 0xe5, 0x39, 0xe9, - 0x8b, 0xb0, 0x17, 0xf2, 0x81, 0x5e, 0x77, 0xd8, 0x42, 0xd1, 0x43, 0x58, 0x4b, 0xa9, 0x2c, 0x6d, - 0xa1, 0xce, 0xb5, 0xca, 0xb0, 0xe8, 0x43, 0xa8, 0x0d, 0x93, 0x30, 0xf4, 0x6c, 0xc0, 0xe5, 0x0b, - 0x36, 0x2c, 0x06, 0x8d, 0x3e, 0x4c, 0x43, 0x7e, 0x0c, 0xeb, 0xc6, 0x36, 0x8b, 0x7b, 0xf9, 0x22, - 0x63, 0x13, 0xea, 0x6b, 0x1b, 0xf9, 0x5b, 0xd8, 0xca, 0xda, 0x91, 0xb7, 0x78, 0xcb, 0x78, 0x78, - 0xaf, 0x7c, 0xf3, 0x73, 0x58, 0x85, 0x37, 0x83, 0xa2, 0x12, 0x3d, 0x02, 0x30, 0xe6, 0x5e, 0x22, - 0x42, 0xd9, 0xdc, 0x2e, 0xdf, 0x29, 0xf5, 0xf8, 0x44, 0x1f, 0xfb, 0xf8, 0x48, 0xe2, 0xaa, 0xd1, - 0xf4, 0x45, 0x28, 0x7b, 0x9f, 0x7c, 0xff, 0xd1, 0x88, 0xa9, 0x93, 0x64, 0xd0, 0xf6, 0xf9, 0xa4, - 0x63, 0xe4, 0x5c, 0x8c, 0xd2, 0x1f, 0x9d, 0xfc, 0x9b, 0x7e, 0x44, 0xa3, 0x4e, 0x3c, 0xb8, 0x3f, - 0xe2, 0x9d, 0xe2, 0xbf, 0x8f, 0xc1, 0xaa, 0xd9, 0xb3, 0x0f, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, - 0xe3, 0xde, 0x8f, 0xda, 0xcb, 0x0c, 0x00, 0x00, + // 1236 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0xdf, 0x6e, 0xdb, 0xb6, + 0x17, 0xae, 0x92, 0x26, 0xb1, 0x8f, 0x13, 0x27, 0xe1, 0x2f, 0xfd, 0xd5, 0x6d, 0x9a, 0x36, 0x53, + 0xdb, 0x21, 0xdb, 0x50, 0x1b, 0x48, 0xd1, 0xa1, 0xfb, 0xbf, 0x38, 0xe9, 0x9f, 0x6c, 0xe9, 0xd6, + 0x31, 0xf5, 0x0a, 0x0c, 0xc3, 0x04, 0x5a, 0xa2, 0x1d, 0xce, 0xb2, 0xa8, 0x92, 0x14, 0x32, 0xbf, + 0xc8, 0x80, 0xbd, 0xca, 0xae, 0xf6, 0x04, 0x7b, 0x8d, 0x01, 0x7b, 0x80, 0x5d, 0x0f, 0xa4, 0x28, + 0xd9, 0x52, 0xd5, 0x04, 0x68, 0x2e, 0x76, 0x67, 0x9e, 0xf3, 0x9d, 0xef, 0x90, 0x3c, 0xdf, 0x39, + 0x94, 0xe1, 0xf6, 0x20, 0x9c, 0x28, 0xca, 0x82, 0xb0, 0x43, 0x82, 0x31, 0x8b, 0x3a, 0x11, 0x0f, + 0xa8, 0x47, 0x7f, 0xa1, 0x7e, 0xa2, 0x18, 0x8f, 0xda, 0xb1, 0xe0, 0x8a, 0xa3, 0x66, 0x06, 0x6a, + 0x1b, 0xd0, 0xf5, 0xcd, 0x52, 0x90, 0xcf, 0xc7, 0xe3, 0x0c, 0x7c, 0x7d, 0x2b, 0x77, 0xfa, 0x5c, + 0xd0, 0x4e, 0x89, 0x6b, 0x26, 0xd6, 0xb8, 0x7d, 0xa2, 0x48, 0xc8, 0x87, 0xd6, 0x79, 0xa3, 0xe4, + 0xe4, 0xe3, 0x98, 0x85, 0x54, 0x58, 0xef, 0xcd, 0xa2, 0x97, 0x05, 0x34, 0x52, 0x6c, 0xc0, 0x72, + 0x7f, 0x29, 0x3a, 0x64, 0x8a, 0x0a, 0x12, 0x4a, 0xeb, 0xbd, 0x35, 0xe4, 0x7c, 0x18, 0xd2, 0x8e, + 0x59, 0xf5, 0x93, 0x41, 0x47, 0xb1, 0x31, 0x95, 0x8a, 0x8c, 0xe3, 0x8c, 0xbe, 0x0c, 0x08, 0x12, + 0x41, 0xa6, 0x3b, 0x77, 0xbf, 0x83, 0xab, 0xdf, 0xf0, 0x80, 0x3e, 0xca, 0x0e, 0xf4, 0x84, 0x2a, + 0x4c, 0x5f, 0x25, 0x54, 0x2a, 0xf4, 0x21, 0xcc, 0xb1, 0xa0, 0xe5, 0x6c, 0x3b, 0x3b, 0x8d, 0xdd, + 0x77, 0xdb, 0xf9, 0x6d, 0xe9, 0x6d, 0xb4, 0x0b, 0x31, 0x87, 0xf9, 0x9e, 0xf1, 0x1c, 0x0b, 0xdc, + 0xdf, 0xe6, 0xa0, 0x55, 0xf0, 0x1f, 0x31, 0x99, 0x93, 0xfe, 0x04, 0x57, 0x4e, 0xb9, 0x18, 0x0d, + 0x42, 0x7e, 0x3a, 0xad, 0x88, 0x97, 0xe7, 0x79, 0xbf, 0x94, 0xe7, 0xa5, 0xc5, 0x56, 0xe5, 0xfa, + 0xdf, 0xe9, 0xeb, 0x4e, 0xb4, 0x01, 0x0b, 0x21, 0x1b, 0x33, 0xd5, 0x9a, 0xdb, 0x76, 0x76, 0x56, + 0x70, 0xba, 0xd0, 0x56, 0xc5, 0x47, 0x34, 0x6a, 0xcd, 0x6f, 0x3b, 0x3b, 0x75, 0x9c, 0x2e, 0x50, + 0x0b, 0x96, 0x06, 0x2c, 0x54, 0x54, 0xc8, 0xd6, 0x65, 0x63, 0xcf, 0x96, 0xe8, 0x1e, 0x2c, 0x49, + 0x2e, 0x94, 0xd7, 0x9f, 0xb4, 0x16, 0xcc, 0xbe, 0x36, 0xda, 0x45, 0xb5, 0xb4, 0x8f, 0xb9, 0x50, + 0x78, 0x51, 0x83, 0xba, 0x13, 0xb4, 0x03, 0x6b, 0x49, 0xc4, 0x5e, 0x25, 0xd4, 0x8b, 0x89, 0xa0, + 0x91, 0xd2, 0xe7, 0x59, 0x34, 0x8c, 0xcd, 0xd4, 0xfe, 0xdc, 0x98, 0x0f, 0x03, 0xf7, 0x6f, 0x07, + 0x6e, 0x15, 0xee, 0xe6, 0x31, 0x17, 0x2f, 0x88, 0x1c, 0xcd, 0x5e, 0x11, 0x86, 0x75, 0x45, 0xe4, + 0xa8, 0xea, 0x7a, 0xca, 0x65, 0xd0, 0xa1, 0x55, 0x57, 0xb3, 0xaa, 0x8a, 0x8e, 0xff, 0xe4, 0x5a, + 0xdc, 0xbf, 0x1c, 0x58, 0x29, 0x1c, 0xf6, 0x6d, 0x25, 0x85, 0x36, 0xa1, 0xce, 0xa2, 0x38, 0x51, + 0x5e, 0x22, 0x98, 0x39, 0x42, 0x1d, 0xd7, 0x8c, 0xa1, 0x27, 0x18, 0xfa, 0x1c, 0x96, 0xfc, 0x90, + 0xcb, 0x44, 0x50, 0x73, 0x8e, 0xc6, 0xee, 0x9d, 0xf2, 0xae, 0x0a, 0xd4, 0xfb, 0x29, 0x16, 0x67, + 0x41, 0x68, 0x0f, 0x6a, 0x63, 0xaa, 0x48, 0x40, 0x14, 0x31, 0x07, 0x6e, 0xec, 0xde, 0x3d, 0x93, + 0xe0, 0x19, 0x55, 0xe4, 0x80, 0x28, 0x82, 0xf3, 0x30, 0xf7, 0x77, 0x07, 0xae, 0x54, 0x62, 0xd0, + 0x2d, 0x68, 0x08, 0xaa, 0xc4, 0xc4, 0x1b, 0x0a, 0x9e, 0xc4, 0xe6, 0xe8, 0x75, 0x0c, 0xc6, 0xf4, + 0x44, 0x5b, 0xd0, 0x1d, 0x68, 0x32, 0x99, 0xe9, 0x46, 0x0f, 0x2a, 0x73, 0xbe, 0x1a, 0x5e, 0x66, + 0x32, 0x55, 0x8d, 0xe6, 0x45, 0xdb, 0xb0, 0x2c, 0x63, 0xea, 0x1b, 0x80, 0x96, 0x43, 0x5a, 0x30, + 0xd0, 0x36, 0xed, 0x3f, 0x0c, 0xd0, 0x16, 0x00, 0x93, 0x5e, 0x30, 0x89, 0xc8, 0x98, 0xf9, 0xe6, + 0x1c, 0x35, 0x5c, 0x67, 0xf2, 0x20, 0x35, 0xa0, 0x6b, 0x50, 0x63, 0xd2, 0x23, 0x42, 0x90, 0xb4, + 0x76, 0x35, 0xbc, 0xc4, 0xe4, 0x9e, 0x5e, 0xba, 0xaf, 0x60, 0xfd, 0xb5, 0x76, 0x45, 0x8f, 0x61, + 0xb5, 0x38, 0x35, 0x65, 0xcb, 0xd9, 0x9e, 0xdf, 0x69, 0xec, 0x6e, 0x9d, 0x79, 0x37, 0xb8, 0x19, + 0xcd, 0x2e, 0xe5, 0x54, 0x62, 0x73, 0x33, 0x12, 0x73, 0xff, 0x59, 0x80, 0x8d, 0xaa, 0xa2, 0xa0, + 0xdb, 0x00, 0x3c, 0x51, 0x59, 0xa5, 0xcd, 0x6d, 0x75, 0xe7, 0x5a, 0xce, 0xd3, 0x4b, 0xb8, 0x9e, + 0xda, 0x75, 0xc1, 0x1f, 0xc0, 0x02, 0x15, 0x82, 0x0b, 0xc3, 0x59, 0xd8, 0x91, 0x11, 0x52, 0x4e, + 0xfa, 0x48, 0x83, 0x9e, 0x5e, 0xc2, 0x29, 0x1a, 0x7d, 0x09, 0x0d, 0xcb, 0x6d, 0x4a, 0x0d, 0x26, + 0xf8, 0x5a, 0x29, 0xf8, 0x28, 0x9d, 0xaf, 0xcf, 0x48, 0x6c, 0xf3, 0xda, 0xfd, 0x98, 0x62, 0x3e, + 0x84, 0x85, 0xf8, 0x84, 0xc8, 0x54, 0x67, 0xcd, 0x5d, 0xf7, 0x2c, 0x05, 0xb7, 0x9f, 0x6b, 0x24, + 0x4e, 0x03, 0xd0, 0x47, 0x00, 0x52, 0x11, 0xa1, 0x68, 0xe0, 0x11, 0x65, 0x55, 0x76, 0xbd, 0x9d, + 0xce, 0xe6, 0x76, 0x36, 0x9b, 0xdb, 0x2f, 0xb2, 0xe1, 0x8d, 0xeb, 0x16, 0xbd, 0xa7, 0xd0, 0x03, + 0xa8, 0x65, 0x33, 0xdb, 0x76, 0xdd, 0xb5, 0xd7, 0x02, 0x0f, 0x2c, 0x00, 0xe7, 0x50, 0x9d, 0xd1, + 0x17, 0x94, 0xd8, 0x8c, 0x8b, 0xe7, 0x67, 0xb4, 0xe8, 0x3d, 0xa5, 0x43, 0x93, 0x38, 0xc8, 0x42, + 0x97, 0xce, 0x0f, 0xb5, 0xe8, 0x3d, 0x85, 0x7e, 0x84, 0xff, 0xe7, 0xe3, 0xdd, 0xe8, 0x27, 0xef, + 0xac, 0x5a, 0x75, 0x6b, 0x66, 0x03, 0x5e, 0xdf, 0xdd, 0x33, 0x8b, 0x7d, 0xea, 0xe0, 0x8d, 0xd3, + 0x0a, 0x3b, 0x7a, 0x0e, 0xc8, 0x4c, 0xc6, 0x22, 0x73, 0xdd, 0x30, 0x6f, 0x97, 0x99, 0xf5, 0x6c, + 0x2c, 0xb1, 0xae, 0xa9, 0x92, 0x4d, 0xb7, 0x45, 0x40, 0xfd, 0x91, 0x51, 0x5b, 0x23, 0x1d, 0x76, + 0x7a, 0xad, 0x55, 0xd6, 0x81, 0x0d, 0xdb, 0x4d, 0xde, 0xcf, 0xbc, 0xef, 0x99, 0xf6, 0xd3, 0xb0, + 0x65, 0x03, 0x5b, 0xb7, 0xbe, 0xaf, 0x78, 0xff, 0x38, 0xa6, 0x7e, 0x4f, 0xb0, 0xee, 0x2a, 0xac, + 0x58, 0x7d, 0x09, 0x2a, 0x93, 0x50, 0x75, 0xd7, 0x61, 0x55, 0x11, 0x31, 0xa4, 0x2a, 0xdf, 0xab, + 0x1b, 0xc0, 0x46, 0xd5, 0x89, 0xd1, 0x11, 0x34, 0xe8, 0x74, 0xf6, 0xbd, 0xc5, 0x63, 0x38, 0x1b, + 0xee, 0xfe, 0xe1, 0xc0, 0x5a, 0xf9, 0xf8, 0xe8, 0x00, 0x96, 0x7d, 0xe2, 0x9f, 0x50, 0x4f, 0x2a, + 0xa2, 0x12, 0x69, 0x72, 0x34, 0x77, 0xdf, 0x29, 0xe5, 0xd8, 0x4f, 0x3f, 0x5d, 0xf6, 0x35, 0xf2, + 0xd8, 0x00, 0x71, 0xc3, 0x9f, 0x2e, 0xd0, 0x17, 0xd0, 0xb0, 0x5f, 0x37, 0xde, 0x88, 0x4e, 0x6c, + 0x07, 0xde, 0xac, 0x26, 0xc9, 0x52, 0x63, 0xb0, 0x21, 0x5f, 0xd3, 0x09, 0xba, 0x0b, 0x4d, 0xff, + 0x84, 0xfa, 0xa3, 0x98, 0xb3, 0x28, 0xed, 0xf2, 0xf4, 0x91, 0x59, 0x99, 0x5a, 0x7b, 0x82, 0xb9, + 0x7f, 0x3a, 0xb0, 0x69, 0x67, 0x57, 0xe5, 0x85, 0xbd, 0x37, 0xf3, 0x92, 0x94, 0x7b, 0xb8, 0xf4, + 0x78, 0x1c, 0xc3, 0xba, 0xfd, 0xe6, 0x0a, 0xbc, 0x4c, 0x56, 0x76, 0xe3, 0xe5, 0x37, 0x68, 0xdf, + 0xe2, 0xb2, 0x94, 0xd9, 0x5b, 0xb1, 0xe6, 0x97, 0x1c, 0x6f, 0x54, 0xc7, 0xfc, 0x1b, 0xd4, 0xe1, + 0xf6, 0x60, 0xb3, 0xfc, 0xa1, 0x65, 0x1e, 0x91, 0x0b, 0x7e, 0x6c, 0xfd, 0x3a, 0x0f, 0x37, 0xaa, + 0x79, 0x65, 0xcc, 0x23, 0x49, 0xd1, 0x7d, 0x58, 0x34, 0x2f, 0xa5, 0xb4, 0xe4, 0x57, 0xcb, 0x7d, + 0xd2, 0x13, 0x61, 0x37, 0xe4, 0x7d, 0x3d, 0xee, 0xb0, 0x85, 0xa2, 0x07, 0xb0, 0x94, 0x4a, 0x59, + 0xda, 0x8b, 0x3a, 0x33, 0x2a, 0xc3, 0xa2, 0x8f, 0xa1, 0x31, 0x48, 0xc2, 0xd0, 0xb3, 0x09, 0xe7, + 0xcf, 0x99, 0xb0, 0x18, 0x34, 0xfa, 0x30, 0x4d, 0xf9, 0x29, 0x2c, 0x9b, 0xd8, 0x2c, 0xef, 0xe5, + 0xf3, 0x82, 0x4d, 0xaa, 0x6f, 0x6d, 0xe6, 0xef, 0x61, 0x2d, 0x2b, 0x47, 0x5e, 0xe2, 0x35, 0xc3, + 0xf0, 0x41, 0x79, 0xe7, 0x67, 0xa8, 0x0a, 0xaf, 0x06, 0x45, 0x27, 0x7a, 0x08, 0x60, 0xc2, 0xbd, + 0x44, 0x84, 0xb2, 0xb5, 0x5e, 0xde, 0x53, 0xca, 0xf8, 0x58, 0x2f, 0x7b, 0xf8, 0x48, 0xe2, 0xba, + 0xf1, 0xf4, 0x44, 0x28, 0xdd, 0x97, 0xb0, 0xa5, 0x4b, 0x91, 0xf2, 0xe9, 0x24, 0x19, 0xe7, 0x45, + 0x2b, 0x2e, 0xf2, 0xc6, 0x28, 0xb2, 0xda, 0x7a, 0x57, 0xaa, 0xdd, 0xb9, 0x98, 0xda, 0xbb, 0x9f, + 0xfd, 0xf0, 0xc9, 0x90, 0xa9, 0x93, 0xa4, 0xdf, 0xf6, 0xf9, 0xb8, 0x63, 0x58, 0xb8, 0x18, 0xa6, + 0x3f, 0x3a, 0xf9, 0x1f, 0x94, 0x21, 0x8d, 0x3a, 0x71, 0xff, 0xde, 0x90, 0x77, 0x8a, 0x7f, 0xa5, + 0xfa, 0x8b, 0xe6, 0xd1, 0xb8, 0xff, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x75, 0xb4, 0x2b, 0xe9, + 0x98, 0x0d, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/project.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/project.pb.go index 662d2311ac..7864459c54 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/project.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/project.pb.go @@ -113,11 +113,13 @@ type Project struct { Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` // Leverage Labels from flyteidl.admin.common.proto to // tag projects with ownership information. - Labels *Labels `protobuf:"bytes,5,opt,name=labels,proto3" json:"labels,omitempty"` - State Project_ProjectState `protobuf:"varint,6,opt,name=state,proto3,enum=flyteidl.admin.Project_ProjectState" json:"state,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Labels *Labels `protobuf:"bytes,5,opt,name=labels,proto3" json:"labels,omitempty"` + State Project_ProjectState `protobuf:"varint,6,opt,name=state,proto3,enum=flyteidl.admin.Project_ProjectState" json:"state,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,7,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Project) Reset() { *m = Project{} } @@ -187,6 +189,13 @@ func (m *Project) GetState() Project_ProjectState { return Project_ACTIVE } +func (m *Project) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Represents a list of projects. // See :ref:`ref_flyteidl.admin.Project` for more details type Projects struct { @@ -254,7 +263,9 @@ type ProjectListRequest struct { Filters string `protobuf:"bytes,3,opt,name=filters,proto3" json:"filters,omitempty"` // Sort ordering. // +optional - SortBy *Sort `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + SortBy *Sort `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + // Optional, org filter applied to list project requests. + Org string `protobuf:"bytes,5,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -313,6 +324,13 @@ func (m *ProjectListRequest) GetSortBy() *Sort { return nil } +func (m *ProjectListRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Adds a new user-project within the Flyte deployment. // See :ref:`ref_flyteidl.admin.Project` for more details type ProjectRegisterRequest struct { @@ -433,34 +451,35 @@ func init() { func init() { proto.RegisterFile("flyteidl/admin/project.proto", fileDescriptor_2db065ce03bf106d) } var fileDescriptor_2db065ce03bf106d = []byte{ - // 459 bytes of a gzipped FileDescriptorProto + // 474 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcd, 0x6e, 0xd3, 0x40, - 0x10, 0xc7, 0xb1, 0xdb, 0xd8, 0x61, 0x52, 0xa2, 0x68, 0x14, 0x1a, 0xf3, 0x71, 0xb0, 0x2c, 0x0e, - 0x39, 0x50, 0x1b, 0xd2, 0x1b, 0x08, 0xa4, 0xb4, 0xb1, 0xa0, 0xa2, 0x20, 0xb4, 0x49, 0x2b, 0xc1, - 0xa5, 0xf2, 0xc7, 0xd6, 0x2c, 0xd8, 0x5e, 0xe3, 0xdd, 0x1e, 0xf2, 0x08, 0x3c, 0x0c, 0xef, 0x88, - 0xbc, 0x5e, 0x57, 0x6d, 0x08, 0xa8, 0x27, 0xef, 0xcc, 0xfc, 0x76, 0xe6, 0x3f, 0x33, 0x5e, 0x78, - 0x7a, 0x99, 0xaf, 0x25, 0x65, 0x69, 0x1e, 0x44, 0x69, 0xc1, 0xca, 0xa0, 0xaa, 0xf9, 0x77, 0x9a, - 0x48, 0xbf, 0xaa, 0xb9, 0xe4, 0x38, 0xec, 0xa2, 0xbe, 0x8a, 0x3e, 0x7e, 0xb2, 0x41, 0x27, 0xbc, - 0x28, 0x78, 0xd9, 0xc2, 0xde, 0x73, 0xb0, 0x16, 0xbc, 0x88, 0x58, 0x89, 0x43, 0x30, 0x59, 0xea, - 0x18, 0xae, 0x31, 0xbd, 0x4f, 0x4c, 0x96, 0x22, 0xc2, 0x6e, 0x19, 0x15, 0xd4, 0x31, 0x95, 0x47, - 0x9d, 0xbd, 0xdf, 0x26, 0xd8, 0x9f, 0xdb, 0x62, 0x77, 0xe1, 0xf1, 0x05, 0xd8, 0xa9, 0xca, 0x2e, - 0x9c, 0x1d, 0x77, 0x67, 0x3a, 0x98, 0xed, 0xfb, 0xb7, 0xc5, 0xf9, 0x6d, 0x71, 0xd2, 0x61, 0xe8, - 0xc2, 0x20, 0xa5, 0x22, 0xa9, 0x59, 0x25, 0x19, 0x2f, 0x9d, 0x5d, 0x95, 0xec, 0xa6, 0x0b, 0x7d, - 0xb0, 0xf2, 0x28, 0xa6, 0xb9, 0x70, 0x7a, 0xae, 0xb1, 0x2d, 0xe5, 0xa9, 0x8a, 0x12, 0x4d, 0xe1, - 0x2b, 0xe8, 0x09, 0x19, 0x49, 0xea, 0x58, 0xae, 0x31, 0x1d, 0xce, 0x9e, 0x6d, 0xe2, 0xba, 0x9f, - 0xee, 0xbb, 0x6c, 0x58, 0xd2, 0x5e, 0xf1, 0xde, 0xc2, 0xde, 0x4d, 0x37, 0x02, 0x58, 0xf3, 0xe3, - 0xd5, 0xc9, 0x79, 0x38, 0xba, 0x87, 0x7b, 0xd0, 0x9f, 0x93, 0xe3, 0xf7, 0x27, 0xe7, 0xe1, 0x62, - 0x64, 0xe0, 0x18, 0x46, 0xcb, 0x2f, 0xcb, 0x55, 0xf8, 0xf1, 0xe2, 0x5d, 0xf8, 0x29, 0x24, 0xf3, - 0x55, 0xb8, 0x18, 0x99, 0xde, 0x19, 0xf4, 0xf5, 0x7d, 0x81, 0x87, 0xd0, 0xd7, 0x7b, 0x12, 0x8e, - 0xa1, 0x86, 0x31, 0xf9, 0x87, 0x14, 0x72, 0x0d, 0xe2, 0x18, 0x7a, 0x92, 0xff, 0xa0, 0xa5, 0x9e, - 0x6a, 0x6b, 0x78, 0xbf, 0x0c, 0x40, 0xcd, 0x9e, 0x32, 0x21, 0x09, 0xfd, 0x79, 0x45, 0x85, 0x6c, - 0xe0, 0x9c, 0x15, 0x4c, 0xaa, 0xa5, 0x3c, 0x20, 0xad, 0xb1, 0x3d, 0x05, 0x3a, 0x60, 0x5f, 0xb2, - 0x5c, 0xd2, 0xba, 0xd9, 0x4c, 0xe3, 0xef, 0x4c, 0x3c, 0x00, 0x5b, 0xf0, 0x5a, 0x5e, 0xc4, 0x6b, - 0x35, 0xfd, 0xc1, 0x6c, 0xbc, 0x29, 0x73, 0xc9, 0x6b, 0x49, 0xac, 0x06, 0x3a, 0x5a, 0x7b, 0x1f, - 0x60, 0xbf, 0x93, 0x4d, 0x33, 0x26, 0x24, 0xad, 0x3b, 0x39, 0x2f, 0xc1, 0xd6, 0x7d, 0x28, 0x41, - 0xff, 0xe9, 0xb7, 0xe3, 0xbc, 0x47, 0x30, 0xf9, 0x2b, 0x99, 0xa8, 0x78, 0x29, 0xa8, 0x37, 0x81, - 0x87, 0x3a, 0x74, 0x56, 0xa5, 0xcd, 0x8a, 0x74, 0xe0, 0xe8, 0xcd, 0xd7, 0xd7, 0x19, 0x93, 0xdf, - 0xae, 0x62, 0x3f, 0xe1, 0x45, 0xa0, 0x2a, 0xf0, 0x3a, 0x6b, 0x0f, 0xc1, 0xf5, 0xaf, 0x9f, 0xd1, - 0x32, 0xa8, 0xe2, 0x83, 0x8c, 0x07, 0xb7, 0x5f, 0x43, 0x6c, 0xa9, 0x77, 0x70, 0xf8, 0x27, 0x00, - 0x00, 0xff, 0xff, 0x33, 0x79, 0x69, 0x68, 0x54, 0x03, 0x00, 0x00, + 0x10, 0xc7, 0xb1, 0xd3, 0xd8, 0x61, 0x52, 0x22, 0x6b, 0x15, 0x9a, 0xe5, 0xe3, 0x60, 0x59, 0x1c, + 0x72, 0xa0, 0x36, 0xa4, 0x37, 0x10, 0x48, 0x69, 0x63, 0x41, 0x45, 0x41, 0x68, 0x93, 0x56, 0x82, + 0x4b, 0x65, 0xc7, 0x5b, 0xb3, 0x60, 0x7b, 0xcd, 0xee, 0xf6, 0x90, 0x67, 0xe1, 0x61, 0x78, 0x35, + 0xe4, 0xf5, 0xba, 0xb4, 0xa1, 0x20, 0x4e, 0xde, 0x99, 0xf9, 0x79, 0xe6, 0x3f, 0x33, 0x1a, 0x78, + 0x7c, 0x51, 0x6c, 0x14, 0x65, 0x59, 0x11, 0x25, 0x59, 0xc9, 0xaa, 0xa8, 0x16, 0xfc, 0x2b, 0x5d, + 0xab, 0xb0, 0x16, 0x5c, 0x71, 0x34, 0xea, 0xa2, 0xa1, 0x8e, 0x3e, 0x7c, 0xb4, 0x45, 0xaf, 0x79, + 0x59, 0xf2, 0xaa, 0x85, 0x83, 0xa7, 0xe0, 0x2c, 0x78, 0x99, 0xb0, 0x0a, 0x8d, 0xc0, 0x66, 0x19, + 0xb6, 0x7c, 0x6b, 0x7a, 0x97, 0xd8, 0x2c, 0x43, 0x08, 0x76, 0xaa, 0xa4, 0xa4, 0xd8, 0xd6, 0x1e, + 0xfd, 0x0e, 0x7e, 0xda, 0xe0, 0x7e, 0x6c, 0x8b, 0xfd, 0x0f, 0x8f, 0x9e, 0x81, 0x9b, 0xe9, 0xec, + 0x12, 0xf7, 0xfc, 0xde, 0x74, 0x38, 0xdb, 0x0b, 0x6f, 0x8a, 0x0b, 0xdb, 0xe2, 0xa4, 0xc3, 0x90, + 0x0f, 0xc3, 0x8c, 0xca, 0xb5, 0x60, 0xb5, 0x62, 0xbc, 0xc2, 0x3b, 0x3a, 0xd9, 0x75, 0x17, 0x0a, + 0xc1, 0x29, 0x92, 0x94, 0x16, 0x12, 0xf7, 0x7d, 0xeb, 0xb6, 0x94, 0x27, 0x3a, 0x4a, 0x0c, 0x85, + 0x5e, 0x40, 0x5f, 0xaa, 0x44, 0x51, 0xec, 0xf8, 0xd6, 0x74, 0x34, 0x7b, 0xb2, 0x8d, 0x9b, 0x7e, + 0xba, 0xef, 0xb2, 0x61, 0x49, 0xfb, 0x0b, 0xf2, 0xa0, 0xc7, 0x45, 0x8e, 0x5d, 0xad, 0xa2, 0x79, + 0x06, 0xaf, 0x61, 0xf7, 0x3a, 0x88, 0x00, 0x9c, 0xf9, 0xd1, 0xea, 0xf8, 0x2c, 0xf6, 0xee, 0xa0, + 0x5d, 0x18, 0xcc, 0xc9, 0xd1, 0xdb, 0xe3, 0xb3, 0x78, 0xe1, 0x59, 0x68, 0x0c, 0xde, 0xf2, 0xd3, + 0x72, 0x15, 0xbf, 0x3f, 0x7f, 0x13, 0x7f, 0x88, 0xc9, 0x7c, 0x15, 0x2f, 0x3c, 0x3b, 0x38, 0x85, + 0x81, 0xf9, 0x5f, 0xa2, 0x03, 0x18, 0x98, 0xcd, 0x49, 0x6c, 0xe9, 0xf1, 0x4c, 0xfe, 0x22, 0x8e, + 0x5c, 0x81, 0x68, 0x0c, 0x7d, 0xc5, 0xbf, 0xd1, 0xca, 0xcc, 0xb9, 0x35, 0x82, 0x1f, 0x16, 0x20, + 0xc3, 0x9e, 0x30, 0xa9, 0x08, 0xfd, 0x7e, 0x49, 0xa5, 0x6a, 0xe0, 0x82, 0x95, 0x4c, 0xe9, 0x35, + 0xdd, 0x23, 0xad, 0x71, 0x7b, 0x0a, 0x84, 0xc1, 0xbd, 0x60, 0x85, 0xa2, 0xa2, 0xd9, 0x55, 0xe3, + 0xef, 0x4c, 0xb4, 0x0f, 0xae, 0xe4, 0x42, 0x9d, 0xa7, 0x1b, 0xbd, 0x8f, 0xe1, 0x6c, 0xbc, 0x2d, + 0x73, 0xc9, 0x85, 0x22, 0x4e, 0x03, 0x1d, 0x6e, 0xba, 0xa1, 0xf5, 0x7f, 0x0f, 0xed, 0x1d, 0xec, + 0x75, 0x8d, 0xd0, 0x9c, 0x49, 0x45, 0x45, 0x27, 0xf0, 0x39, 0xb8, 0xa6, 0x33, 0x2d, 0xf1, 0x1f, + 0x13, 0xe8, 0xb8, 0xe0, 0x01, 0x4c, 0xfe, 0x48, 0x26, 0x6b, 0x5e, 0x49, 0x1a, 0x4c, 0xe0, 0xbe, + 0x09, 0x9d, 0xd6, 0x59, 0xb3, 0x46, 0x13, 0x38, 0x7c, 0xf5, 0xf9, 0x65, 0xce, 0xd4, 0x97, 0xcb, + 0x34, 0x5c, 0xf3, 0x32, 0xd2, 0x15, 0xb8, 0xc8, 0xdb, 0x47, 0x74, 0x75, 0x1e, 0x39, 0xad, 0xa2, + 0x3a, 0xdd, 0xcf, 0x79, 0x74, 0xf3, 0x62, 0x52, 0x47, 0xdf, 0xca, 0xc1, 0xaf, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xed, 0xc0, 0x53, 0xec, 0x78, 0x03, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/project_attributes.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/project_attributes.pb.go index 349d2cfd85..befb738eb7 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/project_attributes.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/project_attributes.pb.go @@ -24,11 +24,13 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` type ProjectAttributes struct { // Unique project id for which this set of attributes will be applied. - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - MatchingAttributes *MatchingAttributes `protobuf:"bytes,2,opt,name=matching_attributes,json=matchingAttributes,proto3" json:"matching_attributes,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + MatchingAttributes *MatchingAttributes `protobuf:"bytes,2,opt,name=matching_attributes,json=matchingAttributes,proto3" json:"matching_attributes,omitempty"` + // Optional, org key applied to the project. + Org string `protobuf:"bytes,3,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ProjectAttributes) Reset() { *m = ProjectAttributes{} } @@ -70,6 +72,13 @@ func (m *ProjectAttributes) GetMatchingAttributes() *MatchingAttributes { return nil } +func (m *ProjectAttributes) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Sets custom attributes for a project // For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` type ProjectAttributesUpdateRequest struct { @@ -152,10 +161,12 @@ type ProjectAttributesGetRequest struct { Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` // Which type of matchable attributes to return. // +required - ResourceType MatchableResource `protobuf:"varint,2,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResourceType MatchableResource `protobuf:"varint,2,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` + // Optional, org key applied to the project. + Org string `protobuf:"bytes,3,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ProjectAttributesGetRequest) Reset() { *m = ProjectAttributesGetRequest{} } @@ -197,6 +208,13 @@ func (m *ProjectAttributesGetRequest) GetResourceType() MatchableResource { return MatchableResource_TASK_RESOURCE } +func (m *ProjectAttributesGetRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Response to get an individual project level attribute override. // For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` type ProjectAttributesGetResponse struct { @@ -246,10 +264,12 @@ type ProjectAttributesDeleteRequest struct { Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` // Which type of matchable attributes to delete. // +required - ResourceType MatchableResource `protobuf:"varint,2,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResourceType MatchableResource `protobuf:"varint,2,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` + // Optional, org key applied to the project. + Org string `protobuf:"bytes,3,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ProjectAttributesDeleteRequest) Reset() { *m = ProjectAttributesDeleteRequest{} } @@ -291,6 +311,13 @@ func (m *ProjectAttributesDeleteRequest) GetResourceType() MatchableResource { return MatchableResource_TASK_RESOURCE } +func (m *ProjectAttributesDeleteRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Purposefully empty, may be populated in the future. type ProjectAttributesDeleteResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -338,25 +365,26 @@ func init() { } var fileDescriptor_cb8dc9faa9640256 = []byte{ - // 317 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x92, 0x31, 0x4f, 0xc3, 0x30, - 0x10, 0x85, 0x65, 0x06, 0x10, 0x06, 0x2a, 0x11, 0x96, 0x0a, 0x10, 0xb4, 0x5e, 0xe8, 0x42, 0x2c, - 0xc1, 0x88, 0x18, 0x8a, 0x10, 0x4c, 0x95, 0x90, 0x81, 0x85, 0xa5, 0x72, 0xd2, 0x23, 0x0d, 0x4a, - 0x62, 0xe3, 0x5c, 0x86, 0x4c, 0x48, 0xfd, 0xe5, 0x48, 0x4e, 0xdc, 0xa6, 0x4d, 0xba, 0xc1, 0x96, - 0x58, 0xef, 0xde, 0xfb, 0xfc, 0x7c, 0xf4, 0xea, 0x33, 0x29, 0x11, 0xe2, 0x59, 0xc2, 0xe5, 0x2c, - 0x8d, 0x33, 0xae, 0x8d, 0xfa, 0x82, 0x10, 0xa7, 0x12, 0xd1, 0xc4, 0x41, 0x81, 0x90, 0xfb, 0xda, - 0x28, 0x54, 0x5e, 0xcf, 0x09, 0x7d, 0x2b, 0x3c, 0xdd, 0x1c, 0x4c, 0x25, 0x86, 0x73, 0x19, 0x24, - 0x30, 0x35, 0x90, 0xab, 0xc2, 0x84, 0x50, 0x0d, 0xb2, 0x05, 0xa1, 0xc7, 0x2f, 0x95, 0xeb, 0x78, - 0x69, 0xea, 0xf5, 0xe9, 0x5e, 0x1d, 0xd5, 0x27, 0x03, 0x32, 0xda, 0x17, 0xee, 0xd7, 0x7b, 0xa5, - 0x27, 0xd6, 0x2b, 0xce, 0xa2, 0x06, 0x45, 0x7f, 0x67, 0x40, 0x46, 0x07, 0x37, 0xcc, 0x5f, 0xc7, - 0xf0, 0x27, 0xb5, 0x74, 0x65, 0x2d, 0xbc, 0xb4, 0x75, 0xc6, 0x42, 0x7a, 0xd1, 0x62, 0x78, 0xd7, - 0x33, 0x89, 0x20, 0xe0, 0xbb, 0x80, 0x1c, 0xbd, 0x31, 0xa5, 0x8d, 0x34, 0x62, 0xd3, 0x86, 0x9b, - 0x69, 0x2d, 0x0f, 0xd1, 0x18, 0x62, 0x43, 0x7a, 0xb9, 0x35, 0x24, 0xd7, 0x2a, 0xcb, 0x81, 0xfd, - 0xd0, 0xb3, 0x96, 0xe4, 0x19, 0xd0, 0x41, 0x6c, 0x6f, 0xe5, 0x89, 0x1e, 0xb9, 0x5e, 0xa7, 0x58, - 0x6a, 0xb0, 0x7d, 0xf4, 0xda, 0x84, 0x13, 0xf7, 0x0c, 0xa2, 0x56, 0x8b, 0x43, 0x37, 0xf7, 0x56, - 0x6a, 0x60, 0x92, 0x9e, 0x77, 0x03, 0x54, 0x80, 0x7f, 0x51, 0xc3, 0x82, 0x74, 0x94, 0xfd, 0x08, - 0x09, 0xac, 0xca, 0xfe, 0xff, 0x7b, 0x76, 0xbd, 0x85, 0x63, 0xa8, 0xae, 0xfa, 0x70, 0xff, 0x71, - 0x17, 0xc5, 0x38, 0x2f, 0x02, 0x3f, 0x54, 0x29, 0xb7, 0xfe, 0xca, 0x44, 0xd5, 0x07, 0x5f, 0x6e, - 0x77, 0x04, 0x19, 0xd7, 0xc1, 0x75, 0xa4, 0xf8, 0xfa, 0xc2, 0x07, 0xbb, 0x76, 0xbd, 0x6f, 0x7f, - 0x03, 0x00, 0x00, 0xff, 0xff, 0xda, 0x8d, 0xf5, 0x81, 0x42, 0x03, 0x00, 0x00, + // 332 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x93, 0x4f, 0x4b, 0xc3, 0x40, + 0x10, 0xc5, 0x59, 0x0b, 0x8a, 0xa3, 0x16, 0x8d, 0x97, 0xa0, 0xa2, 0x6d, 0x2e, 0xf6, 0x62, 0x02, + 0x7a, 0x14, 0x0f, 0x15, 0xd1, 0x53, 0x41, 0xa2, 0x5e, 0xbc, 0x94, 0x4d, 0x3a, 0x6e, 0x23, 0x49, + 0x76, 0xdd, 0x4c, 0x0e, 0xfd, 0x18, 0x42, 0x3f, 0xb0, 0xb8, 0xe9, 0xf6, 0x5f, 0xd2, 0x9b, 0xe0, + 0x6d, 0x13, 0xde, 0xbc, 0xf7, 0xdb, 0xb7, 0x0c, 0x5c, 0x7e, 0xa4, 0x13, 0xc2, 0x64, 0x94, 0x06, + 0x7c, 0x94, 0x25, 0x79, 0xa0, 0xb4, 0xfc, 0xc4, 0x98, 0x86, 0x9c, 0x48, 0x27, 0x51, 0x49, 0x58, + 0xf8, 0x4a, 0x4b, 0x92, 0x4e, 0xdb, 0x0a, 0x7d, 0x23, 0x3c, 0x59, 0x1f, 0xcc, 0x38, 0xc5, 0x63, + 0x1e, 0xa5, 0x38, 0xd4, 0x58, 0xc8, 0x52, 0xc7, 0x58, 0x0d, 0x7a, 0x53, 0x06, 0x47, 0xcf, 0x95, + 0x6b, 0x7f, 0x6e, 0xea, 0xb8, 0xb0, 0x33, 0x8b, 0x72, 0x59, 0x87, 0xf5, 0x76, 0x43, 0xfb, 0xe9, + 0xbc, 0xc0, 0xb1, 0xf1, 0x4a, 0x72, 0xb1, 0x44, 0xe1, 0x6e, 0x75, 0x58, 0x6f, 0xef, 0xda, 0xf3, + 0x57, 0x31, 0xfc, 0xc1, 0x4c, 0xba, 0xb0, 0x0e, 0x9d, 0xac, 0xf6, 0xcf, 0x39, 0x84, 0x96, 0xd4, + 0xc2, 0x6d, 0x99, 0xa8, 0xdf, 0xa3, 0x17, 0xc3, 0x79, 0x8d, 0xea, 0x4d, 0x8d, 0x38, 0x61, 0x88, + 0x5f, 0x25, 0x16, 0xe4, 0xf4, 0x01, 0x96, 0xf2, 0x99, 0xc9, 0xef, 0xae, 0xe7, 0xd7, 0x3c, 0xc2, + 0xa5, 0x21, 0xaf, 0x0b, 0x17, 0x1b, 0x43, 0x0a, 0x25, 0xf3, 0x02, 0xbd, 0x6f, 0x06, 0xa7, 0x35, + 0xcd, 0x13, 0x92, 0xa5, 0xd8, 0x5c, 0xd4, 0x23, 0x1c, 0xd8, 0xaa, 0x87, 0x34, 0x51, 0x68, 0x2a, + 0x6a, 0xd7, 0x11, 0x07, 0xf6, 0x65, 0xc2, 0x99, 0x3a, 0xdc, 0xb7, 0x73, 0xaf, 0x13, 0x85, 0x0d, + 0xdd, 0x70, 0x38, 0x6b, 0x46, 0xaa, 0x98, 0xff, 0xa2, 0x99, 0x29, 0x6b, 0xe8, 0xff, 0x01, 0x53, + 0x5c, 0xf4, 0xff, 0x1f, 0x37, 0x6f, 0x7a, 0x30, 0x4b, 0x55, 0x5d, 0xfe, 0xfe, 0xee, 0xfd, 0x56, + 0x24, 0x34, 0x2e, 0x23, 0x3f, 0x96, 0x59, 0x60, 0x12, 0xa5, 0x16, 0xd5, 0x21, 0x98, 0x2f, 0x85, + 0xc0, 0x3c, 0x50, 0xd1, 0x95, 0x90, 0xc1, 0xea, 0x9e, 0x44, 0xdb, 0x66, 0x2b, 0x6e, 0x7e, 0x02, + 0x00, 0x00, 0xff, 0xff, 0x54, 0xae, 0x47, 0x2e, 0x79, 0x03, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/project_domain_attributes.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/project_domain_attributes.pb.go index 4708ef1e5e..610eb7d08f 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/project_domain_attributes.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/project_domain_attributes.pb.go @@ -26,11 +26,13 @@ type ProjectDomainAttributes struct { // Unique project id for which this set of attributes will be applied. Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` // Unique domain id for which this set of attributes will be applied. - Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` - MatchingAttributes *MatchingAttributes `protobuf:"bytes,3,opt,name=matching_attributes,json=matchingAttributes,proto3" json:"matching_attributes,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` + MatchingAttributes *MatchingAttributes `protobuf:"bytes,3,opt,name=matching_attributes,json=matchingAttributes,proto3" json:"matching_attributes,omitempty"` + // Optional, org key applied to the attributes. + Org string `protobuf:"bytes,4,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ProjectDomainAttributes) Reset() { *m = ProjectDomainAttributes{} } @@ -79,6 +81,13 @@ func (m *ProjectDomainAttributes) GetMatchingAttributes() *MatchingAttributes { return nil } +func (m *ProjectDomainAttributes) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Sets custom attributes for a project-domain combination. // For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` type ProjectDomainAttributesUpdateRequest struct { @@ -164,10 +173,12 @@ type ProjectDomainAttributesGetRequest struct { Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` // Which type of matchable attributes to return. // +required - ResourceType MatchableResource `protobuf:"varint,3,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResourceType MatchableResource `protobuf:"varint,3,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` + // Optional, org key applied to the attributes. + Org string `protobuf:"bytes,4,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ProjectDomainAttributesGetRequest) Reset() { *m = ProjectDomainAttributesGetRequest{} } @@ -216,6 +227,13 @@ func (m *ProjectDomainAttributesGetRequest) GetResourceType() MatchableResource return MatchableResource_TASK_RESOURCE } +func (m *ProjectDomainAttributesGetRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Response to get an individual project domain attribute override. // For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` type ProjectDomainAttributesGetResponse struct { @@ -268,10 +286,12 @@ type ProjectDomainAttributesDeleteRequest struct { Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` // Which type of matchable attributes to delete. // +required - ResourceType MatchableResource `protobuf:"varint,3,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResourceType MatchableResource `protobuf:"varint,3,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` + // Optional, org key applied to the attributes. + Org string `protobuf:"bytes,4,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ProjectDomainAttributesDeleteRequest) Reset() { *m = ProjectDomainAttributesDeleteRequest{} } @@ -320,6 +340,13 @@ func (m *ProjectDomainAttributesDeleteRequest) GetResourceType() MatchableResour return MatchableResource_TASK_RESOURCE } +func (m *ProjectDomainAttributesDeleteRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Purposefully empty, may be populated in the future. type ProjectDomainAttributesDeleteResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -367,27 +394,28 @@ func init() { } var fileDescriptor_e8ab0b551a649f05 = []byte{ - // 342 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x53, 0x4f, 0x4b, 0xfb, 0x40, - 0x10, 0x65, 0x7f, 0x3f, 0xa8, 0x38, 0x6a, 0x0f, 0x11, 0x34, 0x78, 0x6a, 0x17, 0xa5, 0xbd, 0x98, - 0x40, 0x3d, 0x8a, 0x07, 0xa5, 0xd8, 0x93, 0x20, 0x51, 0x2f, 0x5e, 0x42, 0xfe, 0x8c, 0x69, 0x24, - 0xc9, 0xae, 0xc9, 0xe4, 0xd0, 0x0f, 0x23, 0xf4, 0xa3, 0x8a, 0xbb, 0xd9, 0xfe, 0xd3, 0x28, 0x82, - 0xe0, 0x2d, 0x93, 0xbc, 0x79, 0xef, 0xe5, 0x3d, 0x06, 0x9c, 0xa7, 0x6c, 0x46, 0x98, 0xc6, 0x99, - 0x1b, 0xc4, 0x79, 0x5a, 0xb8, 0xb2, 0x14, 0xcf, 0x18, 0x91, 0x1f, 0x8b, 0x3c, 0x48, 0x0b, 0x3f, - 0x20, 0x2a, 0xd3, 0xb0, 0x26, 0xac, 0x1c, 0x59, 0x0a, 0x12, 0x56, 0xd7, 0xe0, 0x1d, 0x85, 0x3f, - 0x1a, 0x6c, 0xec, 0xe7, 0x01, 0x45, 0xd3, 0x20, 0xcc, 0xd0, 0x2f, 0xb1, 0x12, 0x75, 0x19, 0xa1, - 0x5e, 0xe4, 0x73, 0x06, 0x87, 0xb7, 0x9a, 0x7c, 0xac, 0xb8, 0x2f, 0x17, 0xd4, 0x96, 0x0d, 0x5b, - 0x8d, 0xae, 0xcd, 0x7a, 0x6c, 0xb8, 0xed, 0x99, 0xd1, 0x3a, 0x80, 0x8e, 0x76, 0x62, 0xff, 0x53, - 0x1f, 0x9a, 0xc9, 0xba, 0x83, 0x7d, 0xa5, 0x94, 0x16, 0xc9, 0x8a, 0x47, 0xfb, 0x7f, 0x8f, 0x0d, - 0x77, 0x46, 0xdc, 0x59, 0x37, 0xe9, 0xdc, 0x34, 0xd0, 0xa5, 0xa4, 0x67, 0xe5, 0x1f, 0xde, 0x71, - 0x01, 0xc7, 0x2d, 0x0e, 0x1f, 0x64, 0x1c, 0x10, 0x7a, 0xf8, 0x52, 0x63, 0x45, 0xd6, 0x04, 0x60, - 0x45, 0x93, 0x29, 0xcd, 0xc1, 0xa6, 0x66, 0x0b, 0x93, 0xb7, 0xb2, 0xca, 0x07, 0x70, 0xf2, 0x8d, - 0x60, 0x25, 0x45, 0x51, 0x21, 0x7f, 0x65, 0xd0, 0x6f, 0x41, 0x4e, 0x90, 0x8c, 0xaf, 0x9f, 0xc7, - 0x78, 0x0d, 0x7b, 0xa6, 0x26, 0x9f, 0x66, 0x12, 0x55, 0x80, 0xdd, 0x51, 0xff, 0xd3, 0x00, 0xdf, - 0x5b, 0xf5, 0x1a, 0xb4, 0xb7, 0x6b, 0xf6, 0xee, 0x67, 0x12, 0x79, 0x0e, 0xfc, 0x2b, 0x7b, 0xfa, - 0x2f, 0x7e, 0x2f, 0xb7, 0x39, 0x6b, 0x6d, 0x6a, 0x8c, 0x19, 0x2e, 0x9b, 0xfa, 0xbb, 0x44, 0xda, - 0xab, 0x35, 0x0e, 0x75, 0x28, 0x57, 0x17, 0x8f, 0xe7, 0x49, 0x4a, 0xd3, 0x3a, 0x74, 0x22, 0x91, - 0xbb, 0x4a, 0x45, 0x94, 0x89, 0x7e, 0x70, 0x17, 0xc7, 0x95, 0x60, 0xe1, 0xca, 0xf0, 0x34, 0x11, - 0xee, 0xfa, 0xbd, 0x85, 0x1d, 0x75, 0x5d, 0x67, 0x6f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xd5, 0x3f, - 0x53, 0x68, 0xc8, 0x03, 0x00, 0x00, + // 355 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x93, 0x4f, 0x4b, 0xc3, 0x40, + 0x10, 0xc5, 0x59, 0x2b, 0x15, 0x47, 0x2d, 0xb2, 0x82, 0x06, 0x4f, 0x6d, 0x50, 0xda, 0x8b, 0x09, + 0xd4, 0xa3, 0x78, 0x50, 0x8a, 0x3d, 0x09, 0x12, 0xf5, 0xe2, 0x25, 0xe4, 0xcf, 0x98, 0x46, 0x92, + 0xec, 0xba, 0xd9, 0x1c, 0xfa, 0xa9, 0x84, 0x7e, 0x42, 0xe9, 0x26, 0xdb, 0xa6, 0xb5, 0x51, 0x04, + 0x0f, 0xde, 0x76, 0x93, 0x99, 0x79, 0xbf, 0x7d, 0x8f, 0x01, 0xeb, 0x35, 0x99, 0x4a, 0x8c, 0xc3, + 0xc4, 0xf6, 0xc2, 0x34, 0xce, 0x6c, 0x2e, 0xd8, 0x1b, 0x06, 0xd2, 0x0d, 0x59, 0xea, 0xc5, 0x99, + 0xeb, 0x49, 0x29, 0x62, 0xbf, 0x90, 0x98, 0x5b, 0x5c, 0x30, 0xc9, 0x68, 0x47, 0xd7, 0x5b, 0xaa, + 0xfe, 0xb4, 0xbf, 0xd6, 0x9f, 0x7a, 0x32, 0x98, 0x78, 0x7e, 0x82, 0xae, 0xc0, 0x9c, 0x15, 0x22, + 0xc0, 0xb2, 0xd1, 0x9c, 0x11, 0x38, 0x79, 0x28, 0x87, 0x8f, 0xd4, 0xec, 0x9b, 0xc5, 0x68, 0x6a, + 0xc0, 0x4e, 0xa5, 0x6b, 0x90, 0x2e, 0x19, 0xec, 0x3a, 0xfa, 0x4a, 0x8f, 0xa1, 0x5d, 0x92, 0x18, + 0x5b, 0xea, 0x47, 0x75, 0xa3, 0x8f, 0x70, 0xa4, 0x94, 0xe2, 0x2c, 0xaa, 0x31, 0x1a, 0xad, 0x2e, + 0x19, 0xec, 0x0d, 0x4d, 0x6b, 0x15, 0xd2, 0xba, 0xaf, 0x4a, 0x97, 0x92, 0x0e, 0x4d, 0xbf, 0x7c, + 0xa3, 0x87, 0xd0, 0x62, 0x22, 0x32, 0xb6, 0x95, 0xd2, 0xfc, 0x68, 0x32, 0x38, 0x6b, 0x60, 0x7e, + 0xe6, 0xa1, 0x27, 0xd1, 0xc1, 0xf7, 0x02, 0x73, 0x49, 0xc7, 0x00, 0x35, 0x0a, 0xa2, 0x28, 0xfa, + 0xeb, 0x14, 0x0d, 0x93, 0x9c, 0x5a, 0xab, 0xd9, 0x87, 0xf3, 0x1f, 0x04, 0x73, 0xce, 0xb2, 0x1c, + 0xcd, 0x0f, 0x02, 0xbd, 0x86, 0xca, 0x31, 0x4a, 0xcd, 0xf5, 0x7b, 0x63, 0xef, 0xe0, 0x40, 0x07, + 0xe7, 0xca, 0x29, 0x47, 0x65, 0x69, 0x67, 0xd8, 0xdb, 0x68, 0xe9, 0x3c, 0x67, 0xa7, 0xaa, 0x76, + 0xf6, 0x75, 0xdf, 0xd3, 0x94, 0xe3, 0x06, 0x2f, 0x53, 0x30, 0xbf, 0x03, 0x2e, 0xdf, 0xf5, 0x77, + 0x4e, 0xce, 0x48, 0x63, 0x76, 0x23, 0x4c, 0x70, 0x99, 0xdd, 0x7f, 0xf2, 0xa8, 0x39, 0x7e, 0xcd, + 0x5c, 0xda, 0x74, 0x7b, 0xfd, 0x72, 0x15, 0xc5, 0x72, 0x52, 0xf8, 0x56, 0xc0, 0x52, 0x5b, 0xe9, + 0x32, 0x11, 0x95, 0x07, 0x7b, 0xb1, 0x92, 0x11, 0x66, 0x36, 0xf7, 0x2f, 0x22, 0x66, 0xaf, 0x6e, + 0xa9, 0xdf, 0x56, 0x3b, 0x79, 0xf9, 0x19, 0x00, 0x00, 0xff, 0xff, 0x21, 0xef, 0xc1, 0x38, 0xfe, + 0x03, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/admin/workflow_attributes.pb.go b/flyteidl/gen/pb-go/flyteidl/admin/workflow_attributes.pb.go index 5b3d82dce6..7d7b831b1a 100644 --- a/flyteidl/gen/pb-go/flyteidl/admin/workflow_attributes.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/admin/workflow_attributes.pb.go @@ -28,11 +28,13 @@ type WorkflowAttributes struct { // Unique domain id for which this set of attributes will be applied. Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` // Workflow name for which this set of attributes will be applied. - Workflow string `protobuf:"bytes,3,opt,name=workflow,proto3" json:"workflow,omitempty"` - MatchingAttributes *MatchingAttributes `protobuf:"bytes,4,opt,name=matching_attributes,json=matchingAttributes,proto3" json:"matching_attributes,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Workflow string `protobuf:"bytes,3,opt,name=workflow,proto3" json:"workflow,omitempty"` + MatchingAttributes *MatchingAttributes `protobuf:"bytes,4,opt,name=matching_attributes,json=matchingAttributes,proto3" json:"matching_attributes,omitempty"` + // Optional, org key applied to the attributes. + Org string `protobuf:"bytes,5,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *WorkflowAttributes) Reset() { *m = WorkflowAttributes{} } @@ -88,6 +90,13 @@ func (m *WorkflowAttributes) GetMatchingAttributes() *MatchingAttributes { return nil } +func (m *WorkflowAttributes) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Sets custom attributes for a project, domain and workflow combination. // For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` type WorkflowAttributesUpdateRequest struct { @@ -175,10 +184,12 @@ type WorkflowAttributesGetRequest struct { Workflow string `protobuf:"bytes,3,opt,name=workflow,proto3" json:"workflow,omitempty"` // Which type of matchable attributes to return. // +required - ResourceType MatchableResource `protobuf:"varint,4,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResourceType MatchableResource `protobuf:"varint,4,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` + // Optional, org key applied to the attributes. + Org string `protobuf:"bytes,5,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *WorkflowAttributesGetRequest) Reset() { *m = WorkflowAttributesGetRequest{} } @@ -234,6 +245,13 @@ func (m *WorkflowAttributesGetRequest) GetResourceType() MatchableResource { return MatchableResource_TASK_RESOURCE } +func (m *WorkflowAttributesGetRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Response to get an individual workflow attribute override. type WorkflowAttributesGetResponse struct { Attributes *WorkflowAttributes `protobuf:"bytes,1,opt,name=attributes,proto3" json:"attributes,omitempty"` @@ -288,10 +306,12 @@ type WorkflowAttributesDeleteRequest struct { Workflow string `protobuf:"bytes,3,opt,name=workflow,proto3" json:"workflow,omitempty"` // Which type of matchable attributes to delete. // +required - ResourceType MatchableResource `protobuf:"varint,4,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ResourceType MatchableResource `protobuf:"varint,4,opt,name=resource_type,json=resourceType,proto3,enum=flyteidl.admin.MatchableResource" json:"resource_type,omitempty"` + // Optional, org key applied to the attributes. + Org string `protobuf:"bytes,5,opt,name=org,proto3" json:"org,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *WorkflowAttributesDeleteRequest) Reset() { *m = WorkflowAttributesDeleteRequest{} } @@ -347,6 +367,13 @@ func (m *WorkflowAttributesDeleteRequest) GetResourceType() MatchableResource { return MatchableResource_TASK_RESOURCE } +func (m *WorkflowAttributesDeleteRequest) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Purposefully empty, may be populated in the future. type WorkflowAttributesDeleteResponse struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -394,27 +421,28 @@ func init() { } var fileDescriptor_8ba8a51ab86bc38c = []byte{ - // 349 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x93, 0x4f, 0x4b, 0xc3, 0x40, - 0x10, 0xc5, 0x59, 0x95, 0xaa, 0xa3, 0xf6, 0xb0, 0x82, 0x84, 0xa2, 0x58, 0xf7, 0x62, 0x2f, 0x26, - 0x50, 0x8f, 0xe2, 0xc1, 0x22, 0x7a, 0xf2, 0x12, 0x15, 0xc1, 0x4b, 0xc9, 0x9f, 0x69, 0x1a, 0x4d, - 0xb2, 0xeb, 0x66, 0x43, 0xe9, 0xf7, 0x12, 0xfa, 0xf5, 0xc4, 0x4d, 0xb6, 0x7f, 0x4c, 0x7a, 0xeb, - 0xc1, 0x5b, 0xa7, 0xfb, 0x66, 0xe6, 0x97, 0xf7, 0x18, 0xe8, 0x8d, 0x92, 0xa9, 0xc2, 0x38, 0x4c, - 0x1c, 0x2f, 0x4c, 0xe3, 0xcc, 0x99, 0x70, 0xf9, 0x39, 0x4a, 0xf8, 0x64, 0xe8, 0x29, 0x25, 0x63, - 0xbf, 0x50, 0x98, 0xdb, 0x42, 0x72, 0xc5, 0x69, 0xdb, 0x28, 0x6d, 0xad, 0xec, 0x5c, 0xfe, 0xe9, - 0x4c, 0x3d, 0x15, 0x8c, 0x3d, 0x3f, 0xc1, 0xa1, 0xc4, 0x9c, 0x17, 0x32, 0xc0, 0xb2, 0x91, 0xcd, - 0x08, 0xd0, 0xb7, 0x6a, 0xec, 0xdd, 0x7c, 0x2a, 0xb5, 0x60, 0x57, 0x48, 0xfe, 0x81, 0x81, 0xb2, - 0x48, 0x97, 0xf4, 0xf6, 0x5d, 0x53, 0xd2, 0x13, 0x68, 0x85, 0x3c, 0xf5, 0xe2, 0xcc, 0xda, 0xd2, - 0x0f, 0x55, 0x45, 0x3b, 0xb0, 0x67, 0xf0, 0xac, 0x6d, 0xfd, 0x32, 0xaf, 0xe9, 0x33, 0x1c, 0x6b, - 0x80, 0x38, 0x8b, 0x96, 0xd0, 0xad, 0x9d, 0x2e, 0xe9, 0x1d, 0xf4, 0x99, 0xbd, 0xca, 0x6e, 0x3f, - 0x55, 0xd2, 0x05, 0x8e, 0x4b, 0xd3, 0xda, 0x7f, 0x0c, 0xe1, 0xbc, 0x0e, 0xfe, 0x2a, 0x42, 0x4f, - 0xa1, 0x8b, 0x5f, 0x05, 0xe6, 0x8a, 0x0e, 0x00, 0x96, 0xd6, 0x91, 0xe6, 0x75, 0xf5, 0x21, 0xee, - 0x52, 0x17, 0x63, 0xd0, 0x5d, 0xbf, 0x26, 0x17, 0x3c, 0xcb, 0x91, 0x7d, 0x13, 0x38, 0xad, 0x8b, - 0x1e, 0x51, 0x19, 0x90, 0xcd, 0xda, 0xf9, 0x00, 0x47, 0x26, 0xc5, 0xa1, 0x9a, 0x0a, 0xd4, 0x46, - 0xb6, 0xfb, 0x17, 0x8d, 0x46, 0xfe, 0x86, 0xee, 0x56, 0x6a, 0xf7, 0xd0, 0xf4, 0xbd, 0x4c, 0x05, - 0xb2, 0x00, 0xce, 0xd6, 0x50, 0x97, 0xdf, 0xb5, 0x11, 0xff, 0x66, 0xa4, 0x29, 0xa7, 0x7b, 0x4c, - 0x70, 0x91, 0xd3, 0xff, 0xb4, 0xa7, 0x31, 0x79, 0x03, 0x5e, 0x3a, 0x34, 0xb8, 0x7d, 0xbf, 0x89, - 0x62, 0x35, 0x2e, 0x7c, 0x3b, 0xe0, 0xa9, 0xa3, 0x17, 0x70, 0x19, 0x95, 0x3f, 0x9c, 0xf9, 0x0d, - 0x46, 0x98, 0x39, 0xc2, 0xbf, 0x8a, 0xb8, 0xb3, 0x7a, 0x96, 0x7e, 0x4b, 0x1f, 0xe1, 0xf5, 0x4f, - 0x00, 0x00, 0x00, 0xff, 0xff, 0x90, 0x9e, 0xe0, 0xf5, 0xe9, 0x03, 0x00, 0x00, + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x53, 0x4d, 0x4b, 0xc3, 0x40, + 0x10, 0x65, 0xad, 0x56, 0x1d, 0xb5, 0xc8, 0x0a, 0x12, 0x8a, 0x62, 0xcd, 0xc5, 0x5e, 0x4c, 0xa0, + 0x1e, 0xc5, 0x83, 0x45, 0xf4, 0xe4, 0x25, 0x2a, 0x82, 0x97, 0x92, 0x8f, 0x69, 0x1a, 0x4d, 0xb2, + 0xeb, 0x66, 0x43, 0xe9, 0x1f, 0x14, 0xfa, 0xaf, 0xa4, 0x9b, 0x6c, 0x3f, 0x4c, 0x7a, 0xeb, 0xc5, + 0xdb, 0xce, 0xee, 0x9b, 0x99, 0xf7, 0xde, 0xec, 0x40, 0x77, 0x18, 0x4f, 0x24, 0x46, 0x41, 0x6c, + 0xbb, 0x41, 0x12, 0xa5, 0xf6, 0x98, 0x89, 0xaf, 0x61, 0xcc, 0xc6, 0x03, 0x57, 0x4a, 0x11, 0x79, + 0xb9, 0xc4, 0xcc, 0xe2, 0x82, 0x49, 0x46, 0x5b, 0x1a, 0x69, 0x29, 0x64, 0xfb, 0xea, 0x4f, 0x66, + 0xe2, 0x4a, 0x7f, 0xe4, 0x7a, 0x31, 0x0e, 0x04, 0x66, 0x2c, 0x17, 0x3e, 0x16, 0x89, 0xe6, 0x94, + 0x00, 0x7d, 0x2f, 0xcb, 0xde, 0xcf, 0xab, 0x52, 0x03, 0x76, 0xb9, 0x60, 0x9f, 0xe8, 0x4b, 0x83, + 0x74, 0x48, 0x77, 0xdf, 0xd1, 0x21, 0x3d, 0x85, 0x66, 0xc0, 0x12, 0x37, 0x4a, 0x8d, 0x2d, 0xf5, + 0x50, 0x46, 0xb4, 0x0d, 0x7b, 0x9a, 0x9e, 0xd1, 0x50, 0x2f, 0xf3, 0x98, 0xbe, 0xc0, 0x89, 0x22, + 0x10, 0xa5, 0xe1, 0x12, 0x75, 0x63, 0xbb, 0x43, 0xba, 0x07, 0x3d, 0xd3, 0x5a, 0xe5, 0x6e, 0x3d, + 0x97, 0xd0, 0x05, 0x1d, 0x87, 0x26, 0x95, 0x3b, 0x7a, 0x0c, 0x0d, 0x26, 0x42, 0x63, 0x47, 0xf5, + 0x9a, 0x1d, 0x4d, 0x84, 0x8b, 0xaa, 0x94, 0x37, 0x1e, 0xb8, 0x12, 0x1d, 0xfc, 0xce, 0x31, 0x93, + 0xb4, 0x0f, 0xb0, 0x44, 0x80, 0xd4, 0x13, 0xa8, 0x16, 0x71, 0x96, 0xb2, 0x4c, 0x13, 0x3a, 0xeb, + 0xdb, 0x64, 0x9c, 0xa5, 0x19, 0x9a, 0x3f, 0x04, 0xce, 0xaa, 0xa0, 0x27, 0x94, 0x9a, 0xc8, 0x66, + 0x0d, 0x7e, 0x84, 0x23, 0x3d, 0xd7, 0x81, 0x9c, 0x70, 0x54, 0xd6, 0xb6, 0x7a, 0x97, 0xb5, 0xd6, + 0xce, 0xbe, 0x81, 0x53, 0xa2, 0x9d, 0x43, 0x9d, 0xf7, 0x3a, 0xe1, 0x58, 0xe3, 0xa9, 0x0f, 0xe7, + 0x6b, 0x74, 0x14, 0x4a, 0x37, 0xe2, 0xe8, 0x94, 0xd4, 0x4d, 0xee, 0x01, 0x63, 0x5c, 0x4c, 0xee, + 0xbf, 0x18, 0x56, 0xfb, 0x3b, 0xb4, 0x94, 0xc2, 0xb3, 0xfe, 0xdd, 0xc7, 0x6d, 0x18, 0xc9, 0x51, + 0xee, 0x59, 0x3e, 0x4b, 0x6c, 0xd5, 0x92, 0x89, 0xb0, 0x38, 0xd8, 0xf3, 0xcd, 0x0d, 0x31, 0xb5, + 0xb9, 0x77, 0x1d, 0x32, 0x7b, 0x75, 0x99, 0xbd, 0xa6, 0x5a, 0xdd, 0x9b, 0xdf, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xf4, 0x79, 0x1c, 0x80, 0x1f, 0x04, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.pb.go b/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.pb.go deleted file mode 100644 index 1145a30d04..0000000000 --- a/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.pb.go +++ /dev/null @@ -1,1762 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: flyteidl/artifact/artifacts.proto - -package artifact - -import ( - context "context" - fmt "fmt" - admin "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin" - core "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" - _ "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event" - proto "github.com/golang/protobuf/proto" - any "github.com/golang/protobuf/ptypes/any" - _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - math "math" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package - -type FindByWorkflowExecRequest_Direction int32 - -const ( - FindByWorkflowExecRequest_INPUTS FindByWorkflowExecRequest_Direction = 0 - FindByWorkflowExecRequest_OUTPUTS FindByWorkflowExecRequest_Direction = 1 -) - -var FindByWorkflowExecRequest_Direction_name = map[int32]string{ - 0: "INPUTS", - 1: "OUTPUTS", -} - -var FindByWorkflowExecRequest_Direction_value = map[string]int32{ - "INPUTS": 0, - "OUTPUTS": 1, -} - -func (x FindByWorkflowExecRequest_Direction) String() string { - return proto.EnumName(FindByWorkflowExecRequest_Direction_name, int32(x)) -} - -func (FindByWorkflowExecRequest_Direction) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{10, 0} -} - -type Artifact struct { - ArtifactId *core.ArtifactID `protobuf:"bytes,1,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` - Spec *ArtifactSpec `protobuf:"bytes,2,opt,name=spec,proto3" json:"spec,omitempty"` - // references the tag field in ArtifactTag - Tags []string `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"` - Source *ArtifactSource `protobuf:"bytes,4,opt,name=source,proto3" json:"source,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Artifact) Reset() { *m = Artifact{} } -func (m *Artifact) String() string { return proto.CompactTextString(m) } -func (*Artifact) ProtoMessage() {} -func (*Artifact) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{0} -} - -func (m *Artifact) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Artifact.Unmarshal(m, b) -} -func (m *Artifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Artifact.Marshal(b, m, deterministic) -} -func (m *Artifact) XXX_Merge(src proto.Message) { - xxx_messageInfo_Artifact.Merge(m, src) -} -func (m *Artifact) XXX_Size() int { - return xxx_messageInfo_Artifact.Size(m) -} -func (m *Artifact) XXX_DiscardUnknown() { - xxx_messageInfo_Artifact.DiscardUnknown(m) -} - -var xxx_messageInfo_Artifact proto.InternalMessageInfo - -func (m *Artifact) GetArtifactId() *core.ArtifactID { - if m != nil { - return m.ArtifactId - } - return nil -} - -func (m *Artifact) GetSpec() *ArtifactSpec { - if m != nil { - return m.Spec - } - return nil -} - -func (m *Artifact) GetTags() []string { - if m != nil { - return m.Tags - } - return nil -} - -func (m *Artifact) GetSource() *ArtifactSource { - if m != nil { - return m.Source - } - return nil -} - -type CreateArtifactRequest struct { - // Specify just project/domain on creation - ArtifactKey *core.ArtifactKey `protobuf:"bytes,1,opt,name=artifact_key,json=artifactKey,proto3" json:"artifact_key,omitempty"` - Version string `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"` - Spec *ArtifactSpec `protobuf:"bytes,2,opt,name=spec,proto3" json:"spec,omitempty"` - Partitions map[string]string `protobuf:"bytes,4,rep,name=partitions,proto3" json:"partitions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Tag string `protobuf:"bytes,5,opt,name=tag,proto3" json:"tag,omitempty"` - Source *ArtifactSource `protobuf:"bytes,6,opt,name=source,proto3" json:"source,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateArtifactRequest) Reset() { *m = CreateArtifactRequest{} } -func (m *CreateArtifactRequest) String() string { return proto.CompactTextString(m) } -func (*CreateArtifactRequest) ProtoMessage() {} -func (*CreateArtifactRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{1} -} - -func (m *CreateArtifactRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateArtifactRequest.Unmarshal(m, b) -} -func (m *CreateArtifactRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateArtifactRequest.Marshal(b, m, deterministic) -} -func (m *CreateArtifactRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateArtifactRequest.Merge(m, src) -} -func (m *CreateArtifactRequest) XXX_Size() int { - return xxx_messageInfo_CreateArtifactRequest.Size(m) -} -func (m *CreateArtifactRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CreateArtifactRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateArtifactRequest proto.InternalMessageInfo - -func (m *CreateArtifactRequest) GetArtifactKey() *core.ArtifactKey { - if m != nil { - return m.ArtifactKey - } - return nil -} - -func (m *CreateArtifactRequest) GetVersion() string { - if m != nil { - return m.Version - } - return "" -} - -func (m *CreateArtifactRequest) GetSpec() *ArtifactSpec { - if m != nil { - return m.Spec - } - return nil -} - -func (m *CreateArtifactRequest) GetPartitions() map[string]string { - if m != nil { - return m.Partitions - } - return nil -} - -func (m *CreateArtifactRequest) GetTag() string { - if m != nil { - return m.Tag - } - return "" -} - -func (m *CreateArtifactRequest) GetSource() *ArtifactSource { - if m != nil { - return m.Source - } - return nil -} - -type ArtifactSource struct { - WorkflowExecution *core.WorkflowExecutionIdentifier `protobuf:"bytes,1,opt,name=workflow_execution,json=workflowExecution,proto3" json:"workflow_execution,omitempty"` - NodeId string `protobuf:"bytes,2,opt,name=node_id,json=nodeId,proto3" json:"node_id,omitempty"` - TaskId *core.Identifier `protobuf:"bytes,3,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` - RetryAttempt uint32 `protobuf:"varint,4,opt,name=retry_attempt,json=retryAttempt,proto3" json:"retry_attempt,omitempty"` - // Uploads, either from the UI or from the CLI, or FlyteRemote, will have this. - Principal string `protobuf:"bytes,5,opt,name=principal,proto3" json:"principal,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ArtifactSource) Reset() { *m = ArtifactSource{} } -func (m *ArtifactSource) String() string { return proto.CompactTextString(m) } -func (*ArtifactSource) ProtoMessage() {} -func (*ArtifactSource) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{2} -} - -func (m *ArtifactSource) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ArtifactSource.Unmarshal(m, b) -} -func (m *ArtifactSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ArtifactSource.Marshal(b, m, deterministic) -} -func (m *ArtifactSource) XXX_Merge(src proto.Message) { - xxx_messageInfo_ArtifactSource.Merge(m, src) -} -func (m *ArtifactSource) XXX_Size() int { - return xxx_messageInfo_ArtifactSource.Size(m) -} -func (m *ArtifactSource) XXX_DiscardUnknown() { - xxx_messageInfo_ArtifactSource.DiscardUnknown(m) -} - -var xxx_messageInfo_ArtifactSource proto.InternalMessageInfo - -func (m *ArtifactSource) GetWorkflowExecution() *core.WorkflowExecutionIdentifier { - if m != nil { - return m.WorkflowExecution - } - return nil -} - -func (m *ArtifactSource) GetNodeId() string { - if m != nil { - return m.NodeId - } - return "" -} - -func (m *ArtifactSource) GetTaskId() *core.Identifier { - if m != nil { - return m.TaskId - } - return nil -} - -func (m *ArtifactSource) GetRetryAttempt() uint32 { - if m != nil { - return m.RetryAttempt - } - return 0 -} - -func (m *ArtifactSource) GetPrincipal() string { - if m != nil { - return m.Principal - } - return "" -} - -type ArtifactSpec struct { - Value *core.Literal `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` - // This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but - // forgets to change the name, that is okay. And the reason why this is a separate field is because adding the - // type to all Literals is a lot of work. - Type *core.LiteralType `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` - ShortDescription string `protobuf:"bytes,3,opt,name=short_description,json=shortDescription,proto3" json:"short_description,omitempty"` - // Additional user metadata - UserMetadata *any.Any `protobuf:"bytes,4,opt,name=user_metadata,json=userMetadata,proto3" json:"user_metadata,omitempty"` - MetadataType string `protobuf:"bytes,5,opt,name=metadata_type,json=metadataType,proto3" json:"metadata_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ArtifactSpec) Reset() { *m = ArtifactSpec{} } -func (m *ArtifactSpec) String() string { return proto.CompactTextString(m) } -func (*ArtifactSpec) ProtoMessage() {} -func (*ArtifactSpec) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{3} -} - -func (m *ArtifactSpec) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ArtifactSpec.Unmarshal(m, b) -} -func (m *ArtifactSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ArtifactSpec.Marshal(b, m, deterministic) -} -func (m *ArtifactSpec) XXX_Merge(src proto.Message) { - xxx_messageInfo_ArtifactSpec.Merge(m, src) -} -func (m *ArtifactSpec) XXX_Size() int { - return xxx_messageInfo_ArtifactSpec.Size(m) -} -func (m *ArtifactSpec) XXX_DiscardUnknown() { - xxx_messageInfo_ArtifactSpec.DiscardUnknown(m) -} - -var xxx_messageInfo_ArtifactSpec proto.InternalMessageInfo - -func (m *ArtifactSpec) GetValue() *core.Literal { - if m != nil { - return m.Value - } - return nil -} - -func (m *ArtifactSpec) GetType() *core.LiteralType { - if m != nil { - return m.Type - } - return nil -} - -func (m *ArtifactSpec) GetShortDescription() string { - if m != nil { - return m.ShortDescription - } - return "" -} - -func (m *ArtifactSpec) GetUserMetadata() *any.Any { - if m != nil { - return m.UserMetadata - } - return nil -} - -func (m *ArtifactSpec) GetMetadataType() string { - if m != nil { - return m.MetadataType - } - return "" -} - -type CreateArtifactResponse struct { - Artifact *Artifact `protobuf:"bytes,1,opt,name=artifact,proto3" json:"artifact,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateArtifactResponse) Reset() { *m = CreateArtifactResponse{} } -func (m *CreateArtifactResponse) String() string { return proto.CompactTextString(m) } -func (*CreateArtifactResponse) ProtoMessage() {} -func (*CreateArtifactResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{4} -} - -func (m *CreateArtifactResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateArtifactResponse.Unmarshal(m, b) -} -func (m *CreateArtifactResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateArtifactResponse.Marshal(b, m, deterministic) -} -func (m *CreateArtifactResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateArtifactResponse.Merge(m, src) -} -func (m *CreateArtifactResponse) XXX_Size() int { - return xxx_messageInfo_CreateArtifactResponse.Size(m) -} -func (m *CreateArtifactResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CreateArtifactResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateArtifactResponse proto.InternalMessageInfo - -func (m *CreateArtifactResponse) GetArtifact() *Artifact { - if m != nil { - return m.Artifact - } - return nil -} - -type GetArtifactRequest struct { - Query *core.ArtifactQuery `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` - // If false, then long_description is not returned. - Details bool `protobuf:"varint,2,opt,name=details,proto3" json:"details,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetArtifactRequest) Reset() { *m = GetArtifactRequest{} } -func (m *GetArtifactRequest) String() string { return proto.CompactTextString(m) } -func (*GetArtifactRequest) ProtoMessage() {} -func (*GetArtifactRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{5} -} - -func (m *GetArtifactRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetArtifactRequest.Unmarshal(m, b) -} -func (m *GetArtifactRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetArtifactRequest.Marshal(b, m, deterministic) -} -func (m *GetArtifactRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetArtifactRequest.Merge(m, src) -} -func (m *GetArtifactRequest) XXX_Size() int { - return xxx_messageInfo_GetArtifactRequest.Size(m) -} -func (m *GetArtifactRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetArtifactRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetArtifactRequest proto.InternalMessageInfo - -func (m *GetArtifactRequest) GetQuery() *core.ArtifactQuery { - if m != nil { - return m.Query - } - return nil -} - -func (m *GetArtifactRequest) GetDetails() bool { - if m != nil { - return m.Details - } - return false -} - -type GetArtifactResponse struct { - Artifact *Artifact `protobuf:"bytes,1,opt,name=artifact,proto3" json:"artifact,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetArtifactResponse) Reset() { *m = GetArtifactResponse{} } -func (m *GetArtifactResponse) String() string { return proto.CompactTextString(m) } -func (*GetArtifactResponse) ProtoMessage() {} -func (*GetArtifactResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{6} -} - -func (m *GetArtifactResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetArtifactResponse.Unmarshal(m, b) -} -func (m *GetArtifactResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetArtifactResponse.Marshal(b, m, deterministic) -} -func (m *GetArtifactResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetArtifactResponse.Merge(m, src) -} -func (m *GetArtifactResponse) XXX_Size() int { - return xxx_messageInfo_GetArtifactResponse.Size(m) -} -func (m *GetArtifactResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetArtifactResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetArtifactResponse proto.InternalMessageInfo - -func (m *GetArtifactResponse) GetArtifact() *Artifact { - if m != nil { - return m.Artifact - } - return nil -} - -type SearchOptions struct { - // If true, this means a strict partition search. meaning if you don't specify the partition - // field, that will mean, non-partitioned, rather than any partition. - StrictPartitions bool `protobuf:"varint,1,opt,name=strict_partitions,json=strictPartitions,proto3" json:"strict_partitions,omitempty"` - // If true, only one artifact per key will be returned. It will be the latest one by creation time. - LatestByKey bool `protobuf:"varint,2,opt,name=latest_by_key,json=latestByKey,proto3" json:"latest_by_key,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SearchOptions) Reset() { *m = SearchOptions{} } -func (m *SearchOptions) String() string { return proto.CompactTextString(m) } -func (*SearchOptions) ProtoMessage() {} -func (*SearchOptions) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{7} -} - -func (m *SearchOptions) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SearchOptions.Unmarshal(m, b) -} -func (m *SearchOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SearchOptions.Marshal(b, m, deterministic) -} -func (m *SearchOptions) XXX_Merge(src proto.Message) { - xxx_messageInfo_SearchOptions.Merge(m, src) -} -func (m *SearchOptions) XXX_Size() int { - return xxx_messageInfo_SearchOptions.Size(m) -} -func (m *SearchOptions) XXX_DiscardUnknown() { - xxx_messageInfo_SearchOptions.DiscardUnknown(m) -} - -var xxx_messageInfo_SearchOptions proto.InternalMessageInfo - -func (m *SearchOptions) GetStrictPartitions() bool { - if m != nil { - return m.StrictPartitions - } - return false -} - -func (m *SearchOptions) GetLatestByKey() bool { - if m != nil { - return m.LatestByKey - } - return false -} - -type SearchArtifactsRequest struct { - ArtifactKey *core.ArtifactKey `protobuf:"bytes,1,opt,name=artifact_key,json=artifactKey,proto3" json:"artifact_key,omitempty"` - Partitions *core.Partitions `protobuf:"bytes,2,opt,name=partitions,proto3" json:"partitions,omitempty"` - Principal string `protobuf:"bytes,3,opt,name=principal,proto3" json:"principal,omitempty"` - Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` - Options *SearchOptions `protobuf:"bytes,5,opt,name=options,proto3" json:"options,omitempty"` - Token string `protobuf:"bytes,6,opt,name=token,proto3" json:"token,omitempty"` - Limit int32 `protobuf:"varint,7,opt,name=limit,proto3" json:"limit,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SearchArtifactsRequest) Reset() { *m = SearchArtifactsRequest{} } -func (m *SearchArtifactsRequest) String() string { return proto.CompactTextString(m) } -func (*SearchArtifactsRequest) ProtoMessage() {} -func (*SearchArtifactsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{8} -} - -func (m *SearchArtifactsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SearchArtifactsRequest.Unmarshal(m, b) -} -func (m *SearchArtifactsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SearchArtifactsRequest.Marshal(b, m, deterministic) -} -func (m *SearchArtifactsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_SearchArtifactsRequest.Merge(m, src) -} -func (m *SearchArtifactsRequest) XXX_Size() int { - return xxx_messageInfo_SearchArtifactsRequest.Size(m) -} -func (m *SearchArtifactsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_SearchArtifactsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_SearchArtifactsRequest proto.InternalMessageInfo - -func (m *SearchArtifactsRequest) GetArtifactKey() *core.ArtifactKey { - if m != nil { - return m.ArtifactKey - } - return nil -} - -func (m *SearchArtifactsRequest) GetPartitions() *core.Partitions { - if m != nil { - return m.Partitions - } - return nil -} - -func (m *SearchArtifactsRequest) GetPrincipal() string { - if m != nil { - return m.Principal - } - return "" -} - -func (m *SearchArtifactsRequest) GetVersion() string { - if m != nil { - return m.Version - } - return "" -} - -func (m *SearchArtifactsRequest) GetOptions() *SearchOptions { - if m != nil { - return m.Options - } - return nil -} - -func (m *SearchArtifactsRequest) GetToken() string { - if m != nil { - return m.Token - } - return "" -} - -func (m *SearchArtifactsRequest) GetLimit() int32 { - if m != nil { - return m.Limit - } - return 0 -} - -type SearchArtifactsResponse struct { - // If artifact specs are not requested, the resultant artifacts may be empty. - Artifacts []*Artifact `protobuf:"bytes,1,rep,name=artifacts,proto3" json:"artifacts,omitempty"` - // continuation token if relevant. - Token string `protobuf:"bytes,2,opt,name=token,proto3" json:"token,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SearchArtifactsResponse) Reset() { *m = SearchArtifactsResponse{} } -func (m *SearchArtifactsResponse) String() string { return proto.CompactTextString(m) } -func (*SearchArtifactsResponse) ProtoMessage() {} -func (*SearchArtifactsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{9} -} - -func (m *SearchArtifactsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SearchArtifactsResponse.Unmarshal(m, b) -} -func (m *SearchArtifactsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SearchArtifactsResponse.Marshal(b, m, deterministic) -} -func (m *SearchArtifactsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_SearchArtifactsResponse.Merge(m, src) -} -func (m *SearchArtifactsResponse) XXX_Size() int { - return xxx_messageInfo_SearchArtifactsResponse.Size(m) -} -func (m *SearchArtifactsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_SearchArtifactsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_SearchArtifactsResponse proto.InternalMessageInfo - -func (m *SearchArtifactsResponse) GetArtifacts() []*Artifact { - if m != nil { - return m.Artifacts - } - return nil -} - -func (m *SearchArtifactsResponse) GetToken() string { - if m != nil { - return m.Token - } - return "" -} - -type FindByWorkflowExecRequest struct { - ExecId *core.WorkflowExecutionIdentifier `protobuf:"bytes,1,opt,name=exec_id,json=execId,proto3" json:"exec_id,omitempty"` - Direction FindByWorkflowExecRequest_Direction `protobuf:"varint,2,opt,name=direction,proto3,enum=flyteidl.artifact.FindByWorkflowExecRequest_Direction" json:"direction,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *FindByWorkflowExecRequest) Reset() { *m = FindByWorkflowExecRequest{} } -func (m *FindByWorkflowExecRequest) String() string { return proto.CompactTextString(m) } -func (*FindByWorkflowExecRequest) ProtoMessage() {} -func (*FindByWorkflowExecRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{10} -} - -func (m *FindByWorkflowExecRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_FindByWorkflowExecRequest.Unmarshal(m, b) -} -func (m *FindByWorkflowExecRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_FindByWorkflowExecRequest.Marshal(b, m, deterministic) -} -func (m *FindByWorkflowExecRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_FindByWorkflowExecRequest.Merge(m, src) -} -func (m *FindByWorkflowExecRequest) XXX_Size() int { - return xxx_messageInfo_FindByWorkflowExecRequest.Size(m) -} -func (m *FindByWorkflowExecRequest) XXX_DiscardUnknown() { - xxx_messageInfo_FindByWorkflowExecRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_FindByWorkflowExecRequest proto.InternalMessageInfo - -func (m *FindByWorkflowExecRequest) GetExecId() *core.WorkflowExecutionIdentifier { - if m != nil { - return m.ExecId - } - return nil -} - -func (m *FindByWorkflowExecRequest) GetDirection() FindByWorkflowExecRequest_Direction { - if m != nil { - return m.Direction - } - return FindByWorkflowExecRequest_INPUTS -} - -// Aliases identify a particular version of an artifact. They are different than tags in that they -// have to be unique for a given artifact project/domain/name. That is, for a given project/domain/name/kind, -// at most one version can have any given value at any point. -type AddTagRequest struct { - ArtifactId *core.ArtifactID `protobuf:"bytes,1,opt,name=artifact_id,json=artifactId,proto3" json:"artifact_id,omitempty"` - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - // If true, and another version already has the specified kind/value, set this version instead - Overwrite bool `protobuf:"varint,3,opt,name=overwrite,proto3" json:"overwrite,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AddTagRequest) Reset() { *m = AddTagRequest{} } -func (m *AddTagRequest) String() string { return proto.CompactTextString(m) } -func (*AddTagRequest) ProtoMessage() {} -func (*AddTagRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{11} -} - -func (m *AddTagRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AddTagRequest.Unmarshal(m, b) -} -func (m *AddTagRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AddTagRequest.Marshal(b, m, deterministic) -} -func (m *AddTagRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_AddTagRequest.Merge(m, src) -} -func (m *AddTagRequest) XXX_Size() int { - return xxx_messageInfo_AddTagRequest.Size(m) -} -func (m *AddTagRequest) XXX_DiscardUnknown() { - xxx_messageInfo_AddTagRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_AddTagRequest proto.InternalMessageInfo - -func (m *AddTagRequest) GetArtifactId() *core.ArtifactID { - if m != nil { - return m.ArtifactId - } - return nil -} - -func (m *AddTagRequest) GetValue() string { - if m != nil { - return m.Value - } - return "" -} - -func (m *AddTagRequest) GetOverwrite() bool { - if m != nil { - return m.Overwrite - } - return false -} - -type AddTagResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AddTagResponse) Reset() { *m = AddTagResponse{} } -func (m *AddTagResponse) String() string { return proto.CompactTextString(m) } -func (*AddTagResponse) ProtoMessage() {} -func (*AddTagResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{12} -} - -func (m *AddTagResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AddTagResponse.Unmarshal(m, b) -} -func (m *AddTagResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AddTagResponse.Marshal(b, m, deterministic) -} -func (m *AddTagResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_AddTagResponse.Merge(m, src) -} -func (m *AddTagResponse) XXX_Size() int { - return xxx_messageInfo_AddTagResponse.Size(m) -} -func (m *AddTagResponse) XXX_DiscardUnknown() { - xxx_messageInfo_AddTagResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_AddTagResponse proto.InternalMessageInfo - -type CreateTriggerRequest struct { - TriggerLaunchPlan *admin.LaunchPlan `protobuf:"bytes,1,opt,name=trigger_launch_plan,json=triggerLaunchPlan,proto3" json:"trigger_launch_plan,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateTriggerRequest) Reset() { *m = CreateTriggerRequest{} } -func (m *CreateTriggerRequest) String() string { return proto.CompactTextString(m) } -func (*CreateTriggerRequest) ProtoMessage() {} -func (*CreateTriggerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{13} -} - -func (m *CreateTriggerRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateTriggerRequest.Unmarshal(m, b) -} -func (m *CreateTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateTriggerRequest.Marshal(b, m, deterministic) -} -func (m *CreateTriggerRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateTriggerRequest.Merge(m, src) -} -func (m *CreateTriggerRequest) XXX_Size() int { - return xxx_messageInfo_CreateTriggerRequest.Size(m) -} -func (m *CreateTriggerRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CreateTriggerRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateTriggerRequest proto.InternalMessageInfo - -func (m *CreateTriggerRequest) GetTriggerLaunchPlan() *admin.LaunchPlan { - if m != nil { - return m.TriggerLaunchPlan - } - return nil -} - -type CreateTriggerResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateTriggerResponse) Reset() { *m = CreateTriggerResponse{} } -func (m *CreateTriggerResponse) String() string { return proto.CompactTextString(m) } -func (*CreateTriggerResponse) ProtoMessage() {} -func (*CreateTriggerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{14} -} - -func (m *CreateTriggerResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateTriggerResponse.Unmarshal(m, b) -} -func (m *CreateTriggerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateTriggerResponse.Marshal(b, m, deterministic) -} -func (m *CreateTriggerResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateTriggerResponse.Merge(m, src) -} -func (m *CreateTriggerResponse) XXX_Size() int { - return xxx_messageInfo_CreateTriggerResponse.Size(m) -} -func (m *CreateTriggerResponse) XXX_DiscardUnknown() { - xxx_messageInfo_CreateTriggerResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateTriggerResponse proto.InternalMessageInfo - -type DeleteTriggerRequest struct { - TriggerId *core.Identifier `protobuf:"bytes,1,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DeleteTriggerRequest) Reset() { *m = DeleteTriggerRequest{} } -func (m *DeleteTriggerRequest) String() string { return proto.CompactTextString(m) } -func (*DeleteTriggerRequest) ProtoMessage() {} -func (*DeleteTriggerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{15} -} - -func (m *DeleteTriggerRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DeleteTriggerRequest.Unmarshal(m, b) -} -func (m *DeleteTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DeleteTriggerRequest.Marshal(b, m, deterministic) -} -func (m *DeleteTriggerRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_DeleteTriggerRequest.Merge(m, src) -} -func (m *DeleteTriggerRequest) XXX_Size() int { - return xxx_messageInfo_DeleteTriggerRequest.Size(m) -} -func (m *DeleteTriggerRequest) XXX_DiscardUnknown() { - xxx_messageInfo_DeleteTriggerRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_DeleteTriggerRequest proto.InternalMessageInfo - -func (m *DeleteTriggerRequest) GetTriggerId() *core.Identifier { - if m != nil { - return m.TriggerId - } - return nil -} - -type DeleteTriggerResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *DeleteTriggerResponse) Reset() { *m = DeleteTriggerResponse{} } -func (m *DeleteTriggerResponse) String() string { return proto.CompactTextString(m) } -func (*DeleteTriggerResponse) ProtoMessage() {} -func (*DeleteTriggerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{16} -} - -func (m *DeleteTriggerResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_DeleteTriggerResponse.Unmarshal(m, b) -} -func (m *DeleteTriggerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_DeleteTriggerResponse.Marshal(b, m, deterministic) -} -func (m *DeleteTriggerResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_DeleteTriggerResponse.Merge(m, src) -} -func (m *DeleteTriggerResponse) XXX_Size() int { - return xxx_messageInfo_DeleteTriggerResponse.Size(m) -} -func (m *DeleteTriggerResponse) XXX_DiscardUnknown() { - xxx_messageInfo_DeleteTriggerResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_DeleteTriggerResponse proto.InternalMessageInfo - -type ArtifactProducer struct { - // These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in - // Admin's domain. - EntityId *core.Identifier `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` - Outputs *core.VariableMap `protobuf:"bytes,2,opt,name=outputs,proto3" json:"outputs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ArtifactProducer) Reset() { *m = ArtifactProducer{} } -func (m *ArtifactProducer) String() string { return proto.CompactTextString(m) } -func (*ArtifactProducer) ProtoMessage() {} -func (*ArtifactProducer) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{17} -} - -func (m *ArtifactProducer) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ArtifactProducer.Unmarshal(m, b) -} -func (m *ArtifactProducer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ArtifactProducer.Marshal(b, m, deterministic) -} -func (m *ArtifactProducer) XXX_Merge(src proto.Message) { - xxx_messageInfo_ArtifactProducer.Merge(m, src) -} -func (m *ArtifactProducer) XXX_Size() int { - return xxx_messageInfo_ArtifactProducer.Size(m) -} -func (m *ArtifactProducer) XXX_DiscardUnknown() { - xxx_messageInfo_ArtifactProducer.DiscardUnknown(m) -} - -var xxx_messageInfo_ArtifactProducer proto.InternalMessageInfo - -func (m *ArtifactProducer) GetEntityId() *core.Identifier { - if m != nil { - return m.EntityId - } - return nil -} - -func (m *ArtifactProducer) GetOutputs() *core.VariableMap { - if m != nil { - return m.Outputs - } - return nil -} - -type RegisterProducerRequest struct { - Producers []*ArtifactProducer `protobuf:"bytes,1,rep,name=producers,proto3" json:"producers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RegisterProducerRequest) Reset() { *m = RegisterProducerRequest{} } -func (m *RegisterProducerRequest) String() string { return proto.CompactTextString(m) } -func (*RegisterProducerRequest) ProtoMessage() {} -func (*RegisterProducerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{18} -} - -func (m *RegisterProducerRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RegisterProducerRequest.Unmarshal(m, b) -} -func (m *RegisterProducerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RegisterProducerRequest.Marshal(b, m, deterministic) -} -func (m *RegisterProducerRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RegisterProducerRequest.Merge(m, src) -} -func (m *RegisterProducerRequest) XXX_Size() int { - return xxx_messageInfo_RegisterProducerRequest.Size(m) -} -func (m *RegisterProducerRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RegisterProducerRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RegisterProducerRequest proto.InternalMessageInfo - -func (m *RegisterProducerRequest) GetProducers() []*ArtifactProducer { - if m != nil { - return m.Producers - } - return nil -} - -type ArtifactConsumer struct { - // These should all be launch plan IDs - EntityId *core.Identifier `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` - Inputs *core.ParameterMap `protobuf:"bytes,2,opt,name=inputs,proto3" json:"inputs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ArtifactConsumer) Reset() { *m = ArtifactConsumer{} } -func (m *ArtifactConsumer) String() string { return proto.CompactTextString(m) } -func (*ArtifactConsumer) ProtoMessage() {} -func (*ArtifactConsumer) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{19} -} - -func (m *ArtifactConsumer) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ArtifactConsumer.Unmarshal(m, b) -} -func (m *ArtifactConsumer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ArtifactConsumer.Marshal(b, m, deterministic) -} -func (m *ArtifactConsumer) XXX_Merge(src proto.Message) { - xxx_messageInfo_ArtifactConsumer.Merge(m, src) -} -func (m *ArtifactConsumer) XXX_Size() int { - return xxx_messageInfo_ArtifactConsumer.Size(m) -} -func (m *ArtifactConsumer) XXX_DiscardUnknown() { - xxx_messageInfo_ArtifactConsumer.DiscardUnknown(m) -} - -var xxx_messageInfo_ArtifactConsumer proto.InternalMessageInfo - -func (m *ArtifactConsumer) GetEntityId() *core.Identifier { - if m != nil { - return m.EntityId - } - return nil -} - -func (m *ArtifactConsumer) GetInputs() *core.ParameterMap { - if m != nil { - return m.Inputs - } - return nil -} - -type RegisterConsumerRequest struct { - Consumers []*ArtifactConsumer `protobuf:"bytes,1,rep,name=consumers,proto3" json:"consumers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RegisterConsumerRequest) Reset() { *m = RegisterConsumerRequest{} } -func (m *RegisterConsumerRequest) String() string { return proto.CompactTextString(m) } -func (*RegisterConsumerRequest) ProtoMessage() {} -func (*RegisterConsumerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{20} -} - -func (m *RegisterConsumerRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RegisterConsumerRequest.Unmarshal(m, b) -} -func (m *RegisterConsumerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RegisterConsumerRequest.Marshal(b, m, deterministic) -} -func (m *RegisterConsumerRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RegisterConsumerRequest.Merge(m, src) -} -func (m *RegisterConsumerRequest) XXX_Size() int { - return xxx_messageInfo_RegisterConsumerRequest.Size(m) -} -func (m *RegisterConsumerRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RegisterConsumerRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RegisterConsumerRequest proto.InternalMessageInfo - -func (m *RegisterConsumerRequest) GetConsumers() []*ArtifactConsumer { - if m != nil { - return m.Consumers - } - return nil -} - -type RegisterResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RegisterResponse) Reset() { *m = RegisterResponse{} } -func (m *RegisterResponse) String() string { return proto.CompactTextString(m) } -func (*RegisterResponse) ProtoMessage() {} -func (*RegisterResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{21} -} - -func (m *RegisterResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RegisterResponse.Unmarshal(m, b) -} -func (m *RegisterResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RegisterResponse.Marshal(b, m, deterministic) -} -func (m *RegisterResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_RegisterResponse.Merge(m, src) -} -func (m *RegisterResponse) XXX_Size() int { - return xxx_messageInfo_RegisterResponse.Size(m) -} -func (m *RegisterResponse) XXX_DiscardUnknown() { - xxx_messageInfo_RegisterResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_RegisterResponse proto.InternalMessageInfo - -type ExecutionInputsRequest struct { - ExecutionId *core.WorkflowExecutionIdentifier `protobuf:"bytes,1,opt,name=execution_id,json=executionId,proto3" json:"execution_id,omitempty"` - // can make this a map in the future, currently no need. - Inputs []*core.ArtifactID `protobuf:"bytes,2,rep,name=inputs,proto3" json:"inputs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ExecutionInputsRequest) Reset() { *m = ExecutionInputsRequest{} } -func (m *ExecutionInputsRequest) String() string { return proto.CompactTextString(m) } -func (*ExecutionInputsRequest) ProtoMessage() {} -func (*ExecutionInputsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{22} -} - -func (m *ExecutionInputsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ExecutionInputsRequest.Unmarshal(m, b) -} -func (m *ExecutionInputsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ExecutionInputsRequest.Marshal(b, m, deterministic) -} -func (m *ExecutionInputsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_ExecutionInputsRequest.Merge(m, src) -} -func (m *ExecutionInputsRequest) XXX_Size() int { - return xxx_messageInfo_ExecutionInputsRequest.Size(m) -} -func (m *ExecutionInputsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_ExecutionInputsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_ExecutionInputsRequest proto.InternalMessageInfo - -func (m *ExecutionInputsRequest) GetExecutionId() *core.WorkflowExecutionIdentifier { - if m != nil { - return m.ExecutionId - } - return nil -} - -func (m *ExecutionInputsRequest) GetInputs() []*core.ArtifactID { - if m != nil { - return m.Inputs - } - return nil -} - -type ExecutionInputsResponse struct { - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *ExecutionInputsResponse) Reset() { *m = ExecutionInputsResponse{} } -func (m *ExecutionInputsResponse) String() string { return proto.CompactTextString(m) } -func (*ExecutionInputsResponse) ProtoMessage() {} -func (*ExecutionInputsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_804518da5936dedb, []int{23} -} - -func (m *ExecutionInputsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_ExecutionInputsResponse.Unmarshal(m, b) -} -func (m *ExecutionInputsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_ExecutionInputsResponse.Marshal(b, m, deterministic) -} -func (m *ExecutionInputsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_ExecutionInputsResponse.Merge(m, src) -} -func (m *ExecutionInputsResponse) XXX_Size() int { - return xxx_messageInfo_ExecutionInputsResponse.Size(m) -} -func (m *ExecutionInputsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_ExecutionInputsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_ExecutionInputsResponse proto.InternalMessageInfo - -func init() { - proto.RegisterEnum("flyteidl.artifact.FindByWorkflowExecRequest_Direction", FindByWorkflowExecRequest_Direction_name, FindByWorkflowExecRequest_Direction_value) - proto.RegisterType((*Artifact)(nil), "flyteidl.artifact.Artifact") - proto.RegisterType((*CreateArtifactRequest)(nil), "flyteidl.artifact.CreateArtifactRequest") - proto.RegisterMapType((map[string]string)(nil), "flyteidl.artifact.CreateArtifactRequest.PartitionsEntry") - proto.RegisterType((*ArtifactSource)(nil), "flyteidl.artifact.ArtifactSource") - proto.RegisterType((*ArtifactSpec)(nil), "flyteidl.artifact.ArtifactSpec") - proto.RegisterType((*CreateArtifactResponse)(nil), "flyteidl.artifact.CreateArtifactResponse") - proto.RegisterType((*GetArtifactRequest)(nil), "flyteidl.artifact.GetArtifactRequest") - proto.RegisterType((*GetArtifactResponse)(nil), "flyteidl.artifact.GetArtifactResponse") - proto.RegisterType((*SearchOptions)(nil), "flyteidl.artifact.SearchOptions") - proto.RegisterType((*SearchArtifactsRequest)(nil), "flyteidl.artifact.SearchArtifactsRequest") - proto.RegisterType((*SearchArtifactsResponse)(nil), "flyteidl.artifact.SearchArtifactsResponse") - proto.RegisterType((*FindByWorkflowExecRequest)(nil), "flyteidl.artifact.FindByWorkflowExecRequest") - proto.RegisterType((*AddTagRequest)(nil), "flyteidl.artifact.AddTagRequest") - proto.RegisterType((*AddTagResponse)(nil), "flyteidl.artifact.AddTagResponse") - proto.RegisterType((*CreateTriggerRequest)(nil), "flyteidl.artifact.CreateTriggerRequest") - proto.RegisterType((*CreateTriggerResponse)(nil), "flyteidl.artifact.CreateTriggerResponse") - proto.RegisterType((*DeleteTriggerRequest)(nil), "flyteidl.artifact.DeleteTriggerRequest") - proto.RegisterType((*DeleteTriggerResponse)(nil), "flyteidl.artifact.DeleteTriggerResponse") - proto.RegisterType((*ArtifactProducer)(nil), "flyteidl.artifact.ArtifactProducer") - proto.RegisterType((*RegisterProducerRequest)(nil), "flyteidl.artifact.RegisterProducerRequest") - proto.RegisterType((*ArtifactConsumer)(nil), "flyteidl.artifact.ArtifactConsumer") - proto.RegisterType((*RegisterConsumerRequest)(nil), "flyteidl.artifact.RegisterConsumerRequest") - proto.RegisterType((*RegisterResponse)(nil), "flyteidl.artifact.RegisterResponse") - proto.RegisterType((*ExecutionInputsRequest)(nil), "flyteidl.artifact.ExecutionInputsRequest") - proto.RegisterType((*ExecutionInputsResponse)(nil), "flyteidl.artifact.ExecutionInputsResponse") -} - -func init() { proto.RegisterFile("flyteidl/artifact/artifacts.proto", fileDescriptor_804518da5936dedb) } - -var fileDescriptor_804518da5936dedb = []byte{ - // 1635 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x58, 0xcb, 0x6f, 0xdb, 0x46, - 0x1a, 0x37, 0x2d, 0x59, 0xb2, 0x3e, 0x59, 0x8e, 0x3d, 0xf1, 0xda, 0xb2, 0x92, 0xdd, 0x28, 0xcc, - 0x3e, 0x9c, 0x6c, 0x56, 0x44, 0x9c, 0x45, 0x36, 0x0e, 0x90, 0x45, 0x9d, 0x38, 0x2d, 0x54, 0xe7, - 0xe1, 0xd0, 0x4e, 0x1f, 0x46, 0x50, 0x75, 0x44, 0x8e, 0x65, 0xc6, 0x14, 0xc9, 0x0c, 0x47, 0x76, - 0x89, 0x20, 0x48, 0x51, 0xa0, 0xa7, 0x1e, 0x7b, 0x6a, 0xd1, 0xa2, 0x39, 0xf5, 0xd8, 0x43, 0x81, - 0x00, 0xfd, 0x1b, 0x7a, 0x6d, 0x81, 0x5e, 0x7a, 0xec, 0x3f, 0x51, 0xf4, 0x52, 0x70, 0xc8, 0xe1, - 0x43, 0xa2, 0x64, 0x3b, 0xe9, 0xa1, 0x37, 0xce, 0x37, 0xbf, 0xef, 0x39, 0xdf, 0x4b, 0x82, 0xb3, - 0x3b, 0xa6, 0xc7, 0x88, 0xa1, 0x9b, 0x0a, 0xa6, 0xcc, 0xd8, 0xc1, 0x1a, 0x8b, 0x3e, 0xdc, 0x86, - 0x43, 0x6d, 0x66, 0xa3, 0x59, 0x01, 0x69, 0x88, 0x9b, 0xda, 0x62, 0xc7, 0xb6, 0x3b, 0x26, 0x51, - 0x38, 0xa0, 0xdd, 0xdb, 0x51, 0xb0, 0xe5, 0x05, 0xe8, 0xda, 0xe9, 0xf0, 0x0a, 0x3b, 0x86, 0x82, - 0x2d, 0xcb, 0x66, 0x98, 0x19, 0xb6, 0x15, 0xca, 0xaa, 0xd5, 0x63, 0x75, 0x7a, 0xd7, 0xb0, 0x14, - 0x13, 0xf7, 0x2c, 0x6d, 0xb7, 0xe5, 0x98, 0xd8, 0x12, 0xfc, 0x11, 0x42, 0xb3, 0x29, 0x51, 0x4c, - 0x83, 0x11, 0x8a, 0x4d, 0xc1, 0xbf, 0x98, 0xbe, 0x65, 0x9e, 0x43, 0xc4, 0xd5, 0xdf, 0xd2, 0x57, - 0x86, 0x4e, 0x2c, 0x66, 0xec, 0x18, 0x84, 0x86, 0xf7, 0x67, 0xd2, 0xf7, 0xc2, 0x97, 0x96, 0xa1, - 0x87, 0x80, 0xbf, 0xf6, 0x09, 0xb0, 0x18, 0xa1, 0x3b, 0x58, 0x23, 0x03, 0xa6, 0x93, 0x7d, 0x62, - 0x31, 0x45, 0x33, 0xed, 0x9e, 0xce, 0x3f, 0x43, 0x0b, 0xe4, 0xef, 0x25, 0x98, 0x5c, 0x0d, 0xc5, - 0xa2, 0x6b, 0x50, 0x4e, 0xa8, 0xa8, 0x4a, 0x75, 0x69, 0xa9, 0xbc, 0xbc, 0xd8, 0x88, 0x62, 0xe9, - 0xeb, 0x68, 0x08, 0x74, 0x73, 0x4d, 0x05, 0x81, 0x6e, 0xea, 0xe8, 0x32, 0xe4, 0x5d, 0x87, 0x68, - 0xd5, 0x71, 0xce, 0x74, 0xa6, 0x31, 0xf0, 0x00, 0x11, 0xe3, 0xa6, 0x43, 0x34, 0x95, 0x83, 0x11, - 0x82, 0x3c, 0xc3, 0x1d, 0xb7, 0x9a, 0xab, 0xe7, 0x96, 0x4a, 0x2a, 0xff, 0x46, 0x2b, 0x50, 0x70, - 0xed, 0x1e, 0xd5, 0x48, 0x35, 0xcf, 0x45, 0x9d, 0x1d, 0x25, 0x8a, 0x03, 0xd5, 0x90, 0x41, 0xfe, - 0x24, 0x07, 0x7f, 0xb9, 0x49, 0x09, 0x66, 0x44, 0x00, 0x54, 0xf2, 0xb8, 0x47, 0x5c, 0x86, 0xae, - 0xc3, 0x54, 0xe4, 0xd9, 0x1e, 0xf1, 0x42, 0xd7, 0x6a, 0x43, 0x5c, 0x5b, 0x27, 0x9e, 0x1a, 0x45, - 0x62, 0x9d, 0x78, 0xa8, 0x0a, 0xc5, 0x7d, 0x42, 0x5d, 0xc3, 0xb6, 0xaa, 0xb9, 0xba, 0xb4, 0x54, - 0x52, 0xc5, 0xf1, 0xe5, 0xdc, 0x7e, 0x07, 0xc0, 0xf1, 0xef, 0x79, 0x96, 0x55, 0xf3, 0xf5, 0xdc, - 0x52, 0x79, 0xf9, 0x6a, 0x06, 0x6b, 0xa6, 0x2f, 0x8d, 0x8d, 0x88, 0xf5, 0x96, 0xc5, 0xa8, 0xa7, - 0x26, 0x64, 0xa1, 0x19, 0xc8, 0x31, 0xdc, 0xa9, 0x4e, 0x70, 0x23, 0xfd, 0xcf, 0x44, 0x38, 0x0b, - 0xc7, 0x0c, 0x67, 0xed, 0x3a, 0x9c, 0xe8, 0xd3, 0xe5, 0xcb, 0x17, 0xe1, 0x2b, 0xa9, 0xfe, 0x27, - 0x9a, 0x83, 0x89, 0x7d, 0x6c, 0xf6, 0x08, 0x8f, 0x40, 0x49, 0x0d, 0x0e, 0xd7, 0xc6, 0xaf, 0x4a, - 0xf2, 0x6f, 0x12, 0x4c, 0xa7, 0x25, 0xa3, 0x77, 0x01, 0x1d, 0xd8, 0x74, 0x6f, 0xc7, 0xb4, 0x0f, - 0x5a, 0xe4, 0x03, 0xa2, 0xf5, 0x7c, 0xd1, 0xe1, 0x63, 0x5c, 0xe8, 0x7b, 0x8c, 0xb7, 0x43, 0xe0, - 0x2d, 0x81, 0x6b, 0x46, 0xd5, 0xa1, 0xce, 0x1e, 0xf4, 0x5f, 0xa2, 0x05, 0x28, 0x5a, 0xb6, 0x4e, - 0xfc, 0xbc, 0x0d, 0x2c, 0x29, 0xf8, 0xc7, 0xa6, 0x8e, 0x96, 0xa1, 0xc8, 0xb0, 0xbb, 0xe7, 0x5f, - 0xe4, 0x32, 0x13, 0x3a, 0x21, 0xb7, 0xe0, 0x23, 0x9b, 0x3a, 0x3a, 0x07, 0x15, 0x4a, 0x18, 0xf5, - 0x5a, 0x98, 0x31, 0xd2, 0x75, 0x18, 0x4f, 0xc5, 0x8a, 0x3a, 0xc5, 0x89, 0xab, 0x01, 0x0d, 0x9d, - 0x86, 0x92, 0x43, 0x0d, 0x4b, 0x33, 0x1c, 0x6c, 0x86, 0x11, 0x8f, 0x09, 0xf2, 0xaf, 0x12, 0x4c, - 0x25, 0x9f, 0x1e, 0x5d, 0x14, 0x81, 0x0a, 0xdc, 0x9d, 0xef, 0xb3, 0xe2, 0x76, 0xd0, 0x34, 0xc2, - 0x00, 0xa2, 0x06, 0xe4, 0xfd, 0x46, 0x11, 0xe6, 0x55, 0x2d, 0x1b, 0xbc, 0xe5, 0x39, 0x44, 0xe5, - 0x38, 0xf4, 0x6f, 0x98, 0x75, 0x77, 0x6d, 0xca, 0x5a, 0x3a, 0x71, 0x35, 0x6a, 0x38, 0x2c, 0xce, - 0xd5, 0x19, 0x7e, 0xb1, 0x16, 0xd3, 0xd1, 0x0a, 0x54, 0x7a, 0x2e, 0xa1, 0xad, 0x2e, 0x61, 0x58, - 0xc7, 0x0c, 0x87, 0x95, 0x36, 0xd7, 0x08, 0xfa, 0x60, 0x43, 0xb4, 0xc8, 0xc6, 0xaa, 0xe5, 0xa9, - 0x53, 0x3e, 0xf4, 0x4e, 0x88, 0xf4, 0x23, 0x23, 0xb8, 0x5a, 0xdc, 0xc0, 0xc0, 0xf1, 0x29, 0x41, - 0xf4, 0x4d, 0x92, 0xef, 0xc3, 0x7c, 0x7f, 0xea, 0xba, 0x8e, 0x6d, 0xb9, 0x04, 0xfd, 0x0f, 0x26, - 0x45, 0xd6, 0x85, 0x71, 0x38, 0x35, 0x22, 0x1f, 0xd5, 0x08, 0x2c, 0xb7, 0x01, 0xbd, 0x41, 0x58, - 0x7f, 0x59, 0x2f, 0xc3, 0xc4, 0xe3, 0x1e, 0xa1, 0xa2, 0x9e, 0x4f, 0x0f, 0xa9, 0xe7, 0xfb, 0x3e, - 0x46, 0x0d, 0xa0, 0x7e, 0x2d, 0xeb, 0x84, 0x61, 0xc3, 0x74, 0x79, 0x70, 0x27, 0x55, 0x71, 0x94, - 0xef, 0xc2, 0xc9, 0x94, 0x8e, 0x57, 0xb5, 0xf9, 0x7d, 0xa8, 0x6c, 0x12, 0x4c, 0xb5, 0xdd, 0x7b, - 0x4e, 0x50, 0x9d, 0xfe, 0x23, 0x31, 0x6a, 0x68, 0xac, 0x95, 0x28, 0x7f, 0x89, 0x1b, 0x31, 0x13, - 0x5c, 0xc4, 0xf5, 0x86, 0x64, 0xa8, 0x98, 0x98, 0x11, 0x97, 0xb5, 0xda, 0x1e, 0xef, 0x59, 0x81, - 0xb5, 0xe5, 0x80, 0x78, 0xc3, 0x5b, 0x27, 0x9e, 0xfc, 0xed, 0x38, 0xcc, 0x07, 0x2a, 0x84, 0x7a, - 0xf7, 0x0f, 0xea, 0x78, 0x2b, 0xa9, 0x16, 0x35, 0x9e, 0x59, 0x38, 0xb1, 0xb1, 0xa9, 0x1e, 0x94, - 0xaa, 0x8b, 0x5c, 0x5f, 0x5d, 0x24, 0x5b, 0x69, 0x3e, 0xdd, 0x4a, 0xaf, 0x41, 0xd1, 0x0e, 0x02, - 0xc5, 0x93, 0xaa, 0xbc, 0x5c, 0xcf, 0x08, 0x73, 0x2a, 0xa0, 0xaa, 0x60, 0xf0, 0xbb, 0x10, 0xb3, - 0xf7, 0x88, 0xc5, 0x9b, 0x5c, 0x49, 0x0d, 0x0e, 0x3e, 0xd5, 0x34, 0xba, 0x06, 0xab, 0x16, 0xeb, - 0xd2, 0xd2, 0x84, 0x1a, 0x1c, 0xe4, 0x47, 0xb0, 0x30, 0x10, 0xb3, 0xf0, 0xa9, 0x57, 0xa0, 0x14, - 0x6d, 0x12, 0x55, 0x89, 0xf7, 0xe5, 0x91, 0x6f, 0x1d, 0xa3, 0x63, 0x0b, 0xc6, 0x13, 0x16, 0xc8, - 0x3f, 0x4b, 0xb0, 0xf8, 0xba, 0x61, 0xe9, 0x37, 0xbc, 0x64, 0x3b, 0x13, 0x6f, 0x74, 0x13, 0x8a, - 0x7e, 0x17, 0x8c, 0x67, 0xed, 0x71, 0x7a, 0x60, 0xc1, 0x67, 0x6d, 0xea, 0x68, 0x0b, 0x4a, 0xba, - 0x41, 0x89, 0xc6, 0x2b, 0xde, 0x57, 0x3e, 0xbd, 0x7c, 0x25, 0xc3, 0xe6, 0xa1, 0x56, 0x34, 0xd6, - 0x04, 0xb7, 0x1a, 0x0b, 0x92, 0xff, 0x0e, 0xa5, 0x88, 0x8e, 0x00, 0x0a, 0xcd, 0xbb, 0x1b, 0x0f, - 0xb6, 0x36, 0x67, 0xc6, 0x50, 0x19, 0x8a, 0xf7, 0x1e, 0x6c, 0xf1, 0x83, 0x24, 0x3f, 0x83, 0xca, - 0xaa, 0xae, 0x6f, 0xe1, 0x8e, 0xf0, 0xe8, 0x55, 0x36, 0x88, 0xcc, 0x49, 0xe2, 0x67, 0x93, 0xbd, - 0x4f, 0xe8, 0x01, 0x35, 0x18, 0xe1, 0xd9, 0x34, 0xa9, 0xc6, 0x04, 0x79, 0x06, 0xa6, 0x85, 0x01, - 0xc1, 0x13, 0xca, 0x6d, 0x98, 0x0b, 0x7a, 0xcf, 0x16, 0x35, 0x3a, 0x1d, 0x42, 0x85, 0x65, 0x6f, - 0xc2, 0x49, 0x16, 0x50, 0x5a, 0x89, 0x05, 0x6e, 0xb0, 0x2c, 0xf8, 0x8e, 0xd7, 0xb8, 0xcd, 0x21, - 0x1b, 0x26, 0xb6, 0xd4, 0xd9, 0x90, 0x2d, 0x26, 0xc9, 0x0b, 0x62, 0xcd, 0x88, 0x74, 0x84, 0xca, - 0x37, 0x60, 0x6e, 0x8d, 0x98, 0x64, 0x40, 0xf9, 0x55, 0x00, 0xa1, 0x7c, 0x68, 0x54, 0x12, 0x4f, - 0x5b, 0x0a, 0xc1, 0x4d, 0xdd, 0x57, 0xd5, 0x27, 0x31, 0x54, 0xf5, 0xa1, 0x04, 0x33, 0x22, 0x90, - 0x1b, 0xd4, 0xd6, 0x7b, 0x1a, 0xa1, 0xe8, 0x0a, 0x94, 0x7c, 0x21, 0xcc, 0x3b, 0x92, 0x9a, 0xc9, - 0x00, 0xdb, 0xd4, 0xd1, 0x7f, 0xa1, 0x68, 0xf7, 0x98, 0xd3, 0x63, 0xee, 0x90, 0x81, 0xf3, 0x16, - 0xa6, 0x06, 0x6e, 0x9b, 0xe4, 0x0e, 0x76, 0x54, 0x01, 0x95, 0x1f, 0xc2, 0x82, 0x4a, 0x3a, 0x86, - 0xcb, 0x08, 0x15, 0x16, 0x08, 0x87, 0x57, 0xfd, 0x1e, 0x10, 0x90, 0x44, 0x21, 0x9d, 0x1b, 0x51, - 0x48, 0x11, 0x7b, 0xcc, 0x25, 0x3f, 0x8b, 0xfd, 0xbb, 0x69, 0x5b, 0x6e, 0xaf, 0xfb, 0x0a, 0xfe, - 0x5d, 0x86, 0x82, 0x61, 0x25, 0xdc, 0x3b, 0x35, 0xd8, 0xc9, 0x70, 0x97, 0x30, 0x42, 0x7d, 0xff, - 0x42, 0x68, 0xd2, 0x3d, 0x61, 0x40, 0xc2, 0x3d, 0x2d, 0x24, 0x1d, 0xc5, 0xbd, 0x88, 0x3d, 0xe6, - 0x92, 0x11, 0xcc, 0x08, 0xe9, 0xd1, 0x9b, 0x7e, 0x2e, 0xc1, 0x7c, 0x5c, 0xea, 0xdc, 0x0a, 0xa1, - 0xf1, 0x0e, 0x4c, 0x45, 0x0b, 0xd3, 0xcb, 0xf5, 0x8b, 0x32, 0x89, 0x89, 0xe8, 0x52, 0x22, 0x20, - 0xb9, 0xd1, 0x25, 0x2a, 0xc2, 0xb1, 0x08, 0x0b, 0x03, 0xb6, 0x05, 0x76, 0x2f, 0xff, 0x34, 0x1d, - 0xbf, 0x55, 0xe0, 0x14, 0xf5, 0x50, 0x07, 0xa6, 0xd3, 0x4b, 0x00, 0x5a, 0x3a, 0xea, 0x8a, 0x5b, - 0x3b, 0x7f, 0x04, 0x64, 0x18, 0xb3, 0x31, 0xf4, 0xf1, 0x04, 0x94, 0x13, 0x73, 0x1b, 0xfd, 0x23, - 0x83, 0x79, 0x70, 0x77, 0xa8, 0xfd, 0xf3, 0x30, 0x58, 0xa8, 0xe0, 0xeb, 0xfc, 0x47, 0x3f, 0xfc, - 0xf2, 0xe9, 0xf8, 0x57, 0x79, 0x54, 0x8f, 0x7f, 0x66, 0xf2, 0x9f, 0x8a, 0xfb, 0x97, 0x14, 0x7f, - 0xe5, 0x89, 0xa9, 0xdb, 0xdf, 0x49, 0xe8, 0x85, 0x74, 0x08, 0x4a, 0x31, 0x74, 0xe5, 0x09, 0x5f, - 0x45, 0x1a, 0xc9, 0xdf, 0x73, 0xc9, 0x61, 0xed, 0x2f, 0x60, 0x8f, 0x88, 0xc6, 0x9e, 0x1e, 0x0a, - 0xd4, 0xed, 0x2e, 0x36, 0xac, 0xc3, 0x71, 0x16, 0xee, 0x92, 0x4c, 0x54, 0x38, 0x7c, 0x9f, 0x6e, - 0x7f, 0x21, 0xa1, 0xcf, 0xfe, 0xbc, 0xa6, 0x6f, 0x3f, 0x97, 0xd0, 0x97, 0x87, 0x9a, 0xc7, 0x70, - 0x67, 0x40, 0x1c, 0xc3, 0x9d, 0x23, 0x1a, 0x38, 0x80, 0x1c, 0x66, 0xe1, 0x00, 0x90, 0x9b, 0x88, - 0x9e, 0x8f, 0xc3, 0x89, 0xbe, 0xc5, 0x02, 0x9d, 0x1f, 0xba, 0xc2, 0xf4, 0x2f, 0x6c, 0xb5, 0x0b, - 0x47, 0x81, 0x86, 0x39, 0xf9, 0x42, 0xe2, 0x39, 0xf9, 0x8d, 0x84, 0x5a, 0x43, 0x62, 0xc2, 0x4d, - 0x56, 0x5c, 0xe5, 0xc9, 0x10, 0xdf, 0xb3, 0x1d, 0xcd, 0x08, 0xfc, 0x3a, 0x6a, 0x8e, 0x54, 0x71, - 0x1c, 0x05, 0x48, 0x87, 0x4a, 0x6a, 0x70, 0xa2, 0x7f, 0x0d, 0x2d, 0xf4, 0xf4, 0x04, 0xad, 0x2d, - 0x1d, 0x0e, 0x8c, 0x1a, 0x82, 0x0e, 0x95, 0xd4, 0xcc, 0xcc, 0xd4, 0x92, 0x35, 0xa7, 0x33, 0xb5, - 0x64, 0x8f, 0xdf, 0x31, 0x74, 0x0f, 0x0a, 0xc1, 0xea, 0x81, 0xb2, 0xf6, 0xd4, 0xd4, 0x5a, 0x54, - 0x3b, 0x3b, 0x02, 0x11, 0x09, 0x24, 0xf1, 0x44, 0x88, 0x06, 0x7a, 0x56, 0x52, 0x0c, 0x99, 0xb9, - 0xb5, 0x73, 0x23, 0xb0, 0xd9, 0x6a, 0xa2, 0xb9, 0x3a, 0x4a, 0x4d, 0xdf, 0xec, 0x3b, 0xaa, 0x9a, - 0x2e, 0xa0, 0x4d, 0xc2, 0xfa, 0x26, 0x46, 0x66, 0x3d, 0x64, 0x4f, 0xbc, 0xcc, 0x7a, 0x18, 0x32, - 0x80, 0xe4, 0x31, 0xf4, 0xa3, 0x04, 0x68, 0x70, 0xc5, 0x45, 0x17, 0x8f, 0xb3, 0x09, 0x1f, 0xab, - 0x04, 0x75, 0x5e, 0x81, 0xef, 0xa1, 0x87, 0x23, 0xab, 0x83, 0x28, 0x4f, 0xc2, 0x0d, 0x3f, 0x51, - 0x1a, 0x82, 0x12, 0x95, 0x9d, 0x20, 0x84, 0x5d, 0x3a, 0xda, 0xc2, 0x9f, 0xde, 0x78, 0x6d, 0xfb, - 0xff, 0x1d, 0x83, 0xed, 0xf6, 0xda, 0x0d, 0xcd, 0xee, 0x2a, 0xdc, 0x3a, 0x9b, 0x76, 0x82, 0x0f, - 0x25, 0xfa, 0x73, 0xaf, 0x43, 0x2c, 0xc5, 0x69, 0xff, 0xa7, 0x63, 0x2b, 0x03, 0xff, 0x8c, 0xb6, - 0x0b, 0xfc, 0xc7, 0xfc, 0xe5, 0xdf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe2, 0x8a, 0xd7, 0x3b, 0x35, - 0x15, 0x00, 0x00, -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// ArtifactRegistryClient is the client API for ArtifactRegistry service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type ArtifactRegistryClient interface { - CreateArtifact(ctx context.Context, in *CreateArtifactRequest, opts ...grpc.CallOption) (*CreateArtifactResponse, error) - GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*GetArtifactResponse, error) - SearchArtifacts(ctx context.Context, in *SearchArtifactsRequest, opts ...grpc.CallOption) (*SearchArtifactsResponse, error) - CreateTrigger(ctx context.Context, in *CreateTriggerRequest, opts ...grpc.CallOption) (*CreateTriggerResponse, error) - DeleteTrigger(ctx context.Context, in *DeleteTriggerRequest, opts ...grpc.CallOption) (*DeleteTriggerResponse, error) - AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) - RegisterProducer(ctx context.Context, in *RegisterProducerRequest, opts ...grpc.CallOption) (*RegisterResponse, error) - RegisterConsumer(ctx context.Context, in *RegisterConsumerRequest, opts ...grpc.CallOption) (*RegisterResponse, error) - SetExecutionInputs(ctx context.Context, in *ExecutionInputsRequest, opts ...grpc.CallOption) (*ExecutionInputsResponse, error) - FindByWorkflowExec(ctx context.Context, in *FindByWorkflowExecRequest, opts ...grpc.CallOption) (*SearchArtifactsResponse, error) -} - -type artifactRegistryClient struct { - cc *grpc.ClientConn -} - -func NewArtifactRegistryClient(cc *grpc.ClientConn) ArtifactRegistryClient { - return &artifactRegistryClient{cc} -} - -func (c *artifactRegistryClient) CreateArtifact(ctx context.Context, in *CreateArtifactRequest, opts ...grpc.CallOption) (*CreateArtifactResponse, error) { - out := new(CreateArtifactResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/CreateArtifact", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) GetArtifact(ctx context.Context, in *GetArtifactRequest, opts ...grpc.CallOption) (*GetArtifactResponse, error) { - out := new(GetArtifactResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/GetArtifact", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) SearchArtifacts(ctx context.Context, in *SearchArtifactsRequest, opts ...grpc.CallOption) (*SearchArtifactsResponse, error) { - out := new(SearchArtifactsResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/SearchArtifacts", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) CreateTrigger(ctx context.Context, in *CreateTriggerRequest, opts ...grpc.CallOption) (*CreateTriggerResponse, error) { - out := new(CreateTriggerResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/CreateTrigger", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) DeleteTrigger(ctx context.Context, in *DeleteTriggerRequest, opts ...grpc.CallOption) (*DeleteTriggerResponse, error) { - out := new(DeleteTriggerResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/DeleteTrigger", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) AddTag(ctx context.Context, in *AddTagRequest, opts ...grpc.CallOption) (*AddTagResponse, error) { - out := new(AddTagResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/AddTag", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) RegisterProducer(ctx context.Context, in *RegisterProducerRequest, opts ...grpc.CallOption) (*RegisterResponse, error) { - out := new(RegisterResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/RegisterProducer", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) RegisterConsumer(ctx context.Context, in *RegisterConsumerRequest, opts ...grpc.CallOption) (*RegisterResponse, error) { - out := new(RegisterResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/RegisterConsumer", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) SetExecutionInputs(ctx context.Context, in *ExecutionInputsRequest, opts ...grpc.CallOption) (*ExecutionInputsResponse, error) { - out := new(ExecutionInputsResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/SetExecutionInputs", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *artifactRegistryClient) FindByWorkflowExec(ctx context.Context, in *FindByWorkflowExecRequest, opts ...grpc.CallOption) (*SearchArtifactsResponse, error) { - out := new(SearchArtifactsResponse) - err := c.cc.Invoke(ctx, "/flyteidl.artifact.ArtifactRegistry/FindByWorkflowExec", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ArtifactRegistryServer is the server API for ArtifactRegistry service. -type ArtifactRegistryServer interface { - CreateArtifact(context.Context, *CreateArtifactRequest) (*CreateArtifactResponse, error) - GetArtifact(context.Context, *GetArtifactRequest) (*GetArtifactResponse, error) - SearchArtifacts(context.Context, *SearchArtifactsRequest) (*SearchArtifactsResponse, error) - CreateTrigger(context.Context, *CreateTriggerRequest) (*CreateTriggerResponse, error) - DeleteTrigger(context.Context, *DeleteTriggerRequest) (*DeleteTriggerResponse, error) - AddTag(context.Context, *AddTagRequest) (*AddTagResponse, error) - RegisterProducer(context.Context, *RegisterProducerRequest) (*RegisterResponse, error) - RegisterConsumer(context.Context, *RegisterConsumerRequest) (*RegisterResponse, error) - SetExecutionInputs(context.Context, *ExecutionInputsRequest) (*ExecutionInputsResponse, error) - FindByWorkflowExec(context.Context, *FindByWorkflowExecRequest) (*SearchArtifactsResponse, error) -} - -// UnimplementedArtifactRegistryServer can be embedded to have forward compatible implementations. -type UnimplementedArtifactRegistryServer struct { -} - -func (*UnimplementedArtifactRegistryServer) CreateArtifact(ctx context.Context, req *CreateArtifactRequest) (*CreateArtifactResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateArtifact not implemented") -} -func (*UnimplementedArtifactRegistryServer) GetArtifact(ctx context.Context, req *GetArtifactRequest) (*GetArtifactResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetArtifact not implemented") -} -func (*UnimplementedArtifactRegistryServer) SearchArtifacts(ctx context.Context, req *SearchArtifactsRequest) (*SearchArtifactsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method SearchArtifacts not implemented") -} -func (*UnimplementedArtifactRegistryServer) CreateTrigger(ctx context.Context, req *CreateTriggerRequest) (*CreateTriggerResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method CreateTrigger not implemented") -} -func (*UnimplementedArtifactRegistryServer) DeleteTrigger(ctx context.Context, req *DeleteTriggerRequest) (*DeleteTriggerResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteTrigger not implemented") -} -func (*UnimplementedArtifactRegistryServer) AddTag(ctx context.Context, req *AddTagRequest) (*AddTagResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method AddTag not implemented") -} -func (*UnimplementedArtifactRegistryServer) RegisterProducer(ctx context.Context, req *RegisterProducerRequest) (*RegisterResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RegisterProducer not implemented") -} -func (*UnimplementedArtifactRegistryServer) RegisterConsumer(ctx context.Context, req *RegisterConsumerRequest) (*RegisterResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method RegisterConsumer not implemented") -} -func (*UnimplementedArtifactRegistryServer) SetExecutionInputs(ctx context.Context, req *ExecutionInputsRequest) (*ExecutionInputsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method SetExecutionInputs not implemented") -} -func (*UnimplementedArtifactRegistryServer) FindByWorkflowExec(ctx context.Context, req *FindByWorkflowExecRequest) (*SearchArtifactsResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method FindByWorkflowExec not implemented") -} - -func RegisterArtifactRegistryServer(s *grpc.Server, srv ArtifactRegistryServer) { - s.RegisterService(&_ArtifactRegistry_serviceDesc, srv) -} - -func _ArtifactRegistry_CreateArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateArtifactRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).CreateArtifact(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/CreateArtifact", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).CreateArtifact(ctx, req.(*CreateArtifactRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_GetArtifact_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetArtifactRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).GetArtifact(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/GetArtifact", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).GetArtifact(ctx, req.(*GetArtifactRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_SearchArtifacts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(SearchArtifactsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).SearchArtifacts(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/SearchArtifacts", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).SearchArtifacts(ctx, req.(*SearchArtifactsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_CreateTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateTriggerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).CreateTrigger(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/CreateTrigger", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).CreateTrigger(ctx, req.(*CreateTriggerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_DeleteTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteTriggerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).DeleteTrigger(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/DeleteTrigger", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).DeleteTrigger(ctx, req.(*DeleteTriggerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_AddTag_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AddTagRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).AddTag(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/AddTag", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).AddTag(ctx, req.(*AddTagRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_RegisterProducer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RegisterProducerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).RegisterProducer(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/RegisterProducer", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).RegisterProducer(ctx, req.(*RegisterProducerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_RegisterConsumer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RegisterConsumerRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).RegisterConsumer(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/RegisterConsumer", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).RegisterConsumer(ctx, req.(*RegisterConsumerRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_SetExecutionInputs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ExecutionInputsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).SetExecutionInputs(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/SetExecutionInputs", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).SetExecutionInputs(ctx, req.(*ExecutionInputsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _ArtifactRegistry_FindByWorkflowExec_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(FindByWorkflowExecRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ArtifactRegistryServer).FindByWorkflowExec(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/flyteidl.artifact.ArtifactRegistry/FindByWorkflowExec", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ArtifactRegistryServer).FindByWorkflowExec(ctx, req.(*FindByWorkflowExecRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _ArtifactRegistry_serviceDesc = grpc.ServiceDesc{ - ServiceName: "flyteidl.artifact.ArtifactRegistry", - HandlerType: (*ArtifactRegistryServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateArtifact", - Handler: _ArtifactRegistry_CreateArtifact_Handler, - }, - { - MethodName: "GetArtifact", - Handler: _ArtifactRegistry_GetArtifact_Handler, - }, - { - MethodName: "SearchArtifacts", - Handler: _ArtifactRegistry_SearchArtifacts_Handler, - }, - { - MethodName: "CreateTrigger", - Handler: _ArtifactRegistry_CreateTrigger_Handler, - }, - { - MethodName: "DeleteTrigger", - Handler: _ArtifactRegistry_DeleteTrigger_Handler, - }, - { - MethodName: "AddTag", - Handler: _ArtifactRegistry_AddTag_Handler, - }, - { - MethodName: "RegisterProducer", - Handler: _ArtifactRegistry_RegisterProducer_Handler, - }, - { - MethodName: "RegisterConsumer", - Handler: _ArtifactRegistry_RegisterConsumer_Handler, - }, - { - MethodName: "SetExecutionInputs", - Handler: _ArtifactRegistry_SetExecutionInputs_Handler, - }, - { - MethodName: "FindByWorkflowExec", - Handler: _ArtifactRegistry_FindByWorkflowExec_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "flyteidl/artifact/artifacts.proto", -} diff --git a/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.pb.gw.go b/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.pb.gw.go deleted file mode 100644 index b6e603f774..0000000000 --- a/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.pb.gw.go +++ /dev/null @@ -1,636 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: flyteidl/artifact/artifacts.proto - -/* -Package artifact is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package artifact - -import ( - "context" - "io" - "net/http" - - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/status" -) - -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray - -var ( - filter_ArtifactRegistry_GetArtifact_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) - -func request_ArtifactRegistry_GetArtifact_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactRegistryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetArtifactRequest - var metadata runtime.ServerMetadata - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactRegistry_GetArtifact_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_ArtifactRegistry_GetArtifact_1 = &utilities.DoubleArray{Encoding: map[string]int{"query": 0, "artifact_id": 1, "artifact_key": 2, "project": 3, "domain": 4, "name": 5, "version": 6}, Base: []int{1, 13, 1, 1, 1, 4, 3, 2, 0, 0, 9, 7, 6, 0, 9, 9, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 5, 8, 2, 11, 12, 13, 2, 15, 16}} -) - -func request_ArtifactRegistry_GetArtifact_1(ctx context.Context, marshaler runtime.Marshaler, client ArtifactRegistryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetArtifactRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["query.artifact_id.artifact_key.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_id.artifact_key.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_id.artifact_key.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_id.artifact_key.project", err) - } - - val, ok = pathParams["query.artifact_id.artifact_key.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_id.artifact_key.domain") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_id.artifact_key.domain", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_id.artifact_key.domain", err) - } - - val, ok = pathParams["query.artifact_id.artifact_key.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_id.artifact_key.name") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_id.artifact_key.name", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_id.artifact_key.name", err) - } - - val, ok = pathParams["query.artifact_id.version"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_id.version") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_id.version", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_id.version", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactRegistry_GetArtifact_1); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_ArtifactRegistry_GetArtifact_2 = &utilities.DoubleArray{Encoding: map[string]int{"query": 0, "artifact_id": 1, "artifact_key": 2, "project": 3, "domain": 4, "name": 5}, Base: []int{1, 9, 1, 1, 1, 4, 4, 0, 3, 0, 9, 7, 7, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 5, 7, 9, 2, 11, 12, 13}} -) - -func request_ArtifactRegistry_GetArtifact_2(ctx context.Context, marshaler runtime.Marshaler, client ArtifactRegistryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetArtifactRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["query.artifact_id.artifact_key.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_id.artifact_key.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_id.artifact_key.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_id.artifact_key.project", err) - } - - val, ok = pathParams["query.artifact_id.artifact_key.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_id.artifact_key.domain") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_id.artifact_key.domain", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_id.artifact_key.domain", err) - } - - val, ok = pathParams["query.artifact_id.artifact_key.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_id.artifact_key.name") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_id.artifact_key.name", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_id.artifact_key.name", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactRegistry_GetArtifact_2); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_ArtifactRegistry_GetArtifact_3 = &utilities.DoubleArray{Encoding: map[string]int{"query": 0, "artifact_tag": 1, "artifact_key": 2, "project": 3, "domain": 4, "name": 5}, Base: []int{1, 9, 1, 1, 1, 4, 4, 0, 3, 0, 9, 7, 7, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 5, 7, 9, 2, 11, 12, 13}} -) - -func request_ArtifactRegistry_GetArtifact_3(ctx context.Context, marshaler runtime.Marshaler, client ArtifactRegistryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq GetArtifactRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["query.artifact_tag.artifact_key.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_tag.artifact_key.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_tag.artifact_key.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_tag.artifact_key.project", err) - } - - val, ok = pathParams["query.artifact_tag.artifact_key.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_tag.artifact_key.domain") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_tag.artifact_key.domain", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_tag.artifact_key.domain", err) - } - - val, ok = pathParams["query.artifact_tag.artifact_key.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "query.artifact_tag.artifact_key.name") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "query.artifact_tag.artifact_key.name", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "query.artifact_tag.artifact_key.name", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactRegistry_GetArtifact_3); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.GetArtifact(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_ArtifactRegistry_SearchArtifacts_0 = &utilities.DoubleArray{Encoding: map[string]int{"artifact_key": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} -) - -func request_ArtifactRegistry_SearchArtifacts_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactRegistryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq SearchArtifactsRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["artifact_key.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_key.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "artifact_key.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_key.project", err) - } - - val, ok = pathParams["artifact_key.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_key.domain") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "artifact_key.domain", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_key.domain", err) - } - - val, ok = pathParams["artifact_key.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_key.name") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "artifact_key.name", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_key.name", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactRegistry_SearchArtifacts_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.SearchArtifacts(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_ArtifactRegistry_SearchArtifacts_1 = &utilities.DoubleArray{Encoding: map[string]int{"artifact_key": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} -) - -func request_ArtifactRegistry_SearchArtifacts_1(ctx context.Context, marshaler runtime.Marshaler, client ArtifactRegistryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq SearchArtifactsRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["artifact_key.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_key.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "artifact_key.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_key.project", err) - } - - val, ok = pathParams["artifact_key.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "artifact_key.domain") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "artifact_key.domain", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "artifact_key.domain", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactRegistry_SearchArtifacts_1); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.SearchArtifacts(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_ArtifactRegistry_FindByWorkflowExec_0 = &utilities.DoubleArray{Encoding: map[string]int{"exec_id": 0, "project": 1, "domain": 2, "name": 3, "direction": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 1, 3, 4, 5, 6}} -) - -func request_ArtifactRegistry_FindByWorkflowExec_0(ctx context.Context, marshaler runtime.Marshaler, client ArtifactRegistryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq FindByWorkflowExecRequest - var metadata runtime.ServerMetadata - - var ( - val string - e int32 - ok bool - err error - _ = err - ) - - val, ok = pathParams["exec_id.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "exec_id.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "exec_id.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "exec_id.project", err) - } - - val, ok = pathParams["exec_id.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "exec_id.domain") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "exec_id.domain", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "exec_id.domain", err) - } - - val, ok = pathParams["exec_id.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "exec_id.name") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "exec_id.name", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "exec_id.name", err) - } - - val, ok = pathParams["direction"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "direction") - } - - e, err = runtime.Enum(val, FindByWorkflowExecRequest_Direction_value) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "direction", err) - } - - protoReq.Direction = FindByWorkflowExecRequest_Direction(e) - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ArtifactRegistry_FindByWorkflowExec_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.FindByWorkflowExec(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -// RegisterArtifactRegistryHandlerFromEndpoint is same as RegisterArtifactRegistryHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterArtifactRegistryHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterArtifactRegistryHandler(ctx, mux, conn) -} - -// RegisterArtifactRegistryHandler registers the http handlers for service ArtifactRegistry to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterArtifactRegistryHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterArtifactRegistryHandlerClient(ctx, mux, NewArtifactRegistryClient(conn)) -} - -// RegisterArtifactRegistryHandlerClient registers the http handlers for service ArtifactRegistry -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ArtifactRegistryClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ArtifactRegistryClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "ArtifactRegistryClient" to call the correct interceptors. -func RegisterArtifactRegistryHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ArtifactRegistryClient) error { - - mux.Handle("GET", pattern_ArtifactRegistry_GetArtifact_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ArtifactRegistry_GetArtifact_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_ArtifactRegistry_GetArtifact_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_ArtifactRegistry_GetArtifact_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ArtifactRegistry_GetArtifact_1(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_ArtifactRegistry_GetArtifact_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_ArtifactRegistry_GetArtifact_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ArtifactRegistry_GetArtifact_2(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_ArtifactRegistry_GetArtifact_2(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_ArtifactRegistry_GetArtifact_3, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ArtifactRegistry_GetArtifact_3(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_ArtifactRegistry_GetArtifact_3(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_ArtifactRegistry_SearchArtifacts_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ArtifactRegistry_SearchArtifacts_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_ArtifactRegistry_SearchArtifacts_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_ArtifactRegistry_SearchArtifacts_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ArtifactRegistry_SearchArtifacts_1(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_ArtifactRegistry_SearchArtifacts_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - mux.Handle("GET", pattern_ArtifactRegistry_FindByWorkflowExec_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_ArtifactRegistry_FindByWorkflowExec_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_ArtifactRegistry_FindByWorkflowExec_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_ArtifactRegistry_GetArtifact_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 0}, []string{"artifacts", "api", "v1", "data"}, "")) - - pattern_ArtifactRegistry_GetArtifact_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9}, []string{"artifacts", "api", "v1", "data", "artifact", "id", "query.artifact_id.artifact_key.project", "query.artifact_id.artifact_key.domain", "query.artifact_id.artifact_key.name", "query.artifact_id.version"}, "")) - - pattern_ArtifactRegistry_GetArtifact_2 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"artifacts", "api", "v1", "data", "artifact", "id", "query.artifact_id.artifact_key.project", "query.artifact_id.artifact_key.domain", "query.artifact_id.artifact_key.name"}, "")) - - pattern_ArtifactRegistry_GetArtifact_3 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"artifacts", "api", "v1", "data", "artifact", "tag", "query.artifact_tag.artifact_key.project", "query.artifact_tag.artifact_key.domain", "query.artifact_tag.artifact_key.name"}, "")) - - pattern_ArtifactRegistry_SearchArtifacts_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"artifacts", "api", "v1", "data", "query", "s", "artifact_key.project", "artifact_key.domain", "artifact_key.name"}, "")) - - pattern_ArtifactRegistry_SearchArtifacts_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"artifacts", "api", "v1", "data", "query", "artifact_key.project", "artifact_key.domain"}, "")) - - pattern_ArtifactRegistry_FindByWorkflowExec_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9}, []string{"artifacts", "api", "v1", "data", "query", "e", "exec_id.project", "exec_id.domain", "exec_id.name", "direction"}, "")) -) - -var ( - forward_ArtifactRegistry_GetArtifact_0 = runtime.ForwardResponseMessage - - forward_ArtifactRegistry_GetArtifact_1 = runtime.ForwardResponseMessage - - forward_ArtifactRegistry_GetArtifact_2 = runtime.ForwardResponseMessage - - forward_ArtifactRegistry_GetArtifact_3 = runtime.ForwardResponseMessage - - forward_ArtifactRegistry_SearchArtifacts_0 = runtime.ForwardResponseMessage - - forward_ArtifactRegistry_SearchArtifacts_1 = runtime.ForwardResponseMessage - - forward_ArtifactRegistry_FindByWorkflowExec_0 = runtime.ForwardResponseMessage -) diff --git a/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.swagger.json b/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.swagger.json deleted file mode 100644 index 8617ff5fee..0000000000 --- a/flyteidl/gen/pb-go/flyteidl/artifact/artifacts.swagger.json +++ /dev/null @@ -1,2117 +0,0 @@ -{ - "swagger": "2.0", - "info": { - "title": "flyteidl/artifact/artifacts.proto", - "version": "version not set" - }, - "schemes": [ - "http", - "https" - ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "paths": { - "/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}": { - "get": { - "operationId": "GetArtifact3", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/artifactGetArtifactResponse" - } - } - }, - "parameters": [ - { - "name": "query.artifact_id.artifact_key.project", - "description": "Project and domain and suffix needs to be unique across a given artifact store.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_id.artifact_key.domain", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_id.artifact_key.name", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_id.version", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.static_value", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.artifact_tag.value.triggered_binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.input_binding.var", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.uri", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "details", - "description": "If false, then long_description is not returned.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - } - ], - "tags": [ - "ArtifactRegistry" - ] - } - }, - "/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}/{query.artifact_id.version}": { - "get": { - "operationId": "GetArtifact2", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/artifactGetArtifactResponse" - } - } - }, - "parameters": [ - { - "name": "query.artifact_id.artifact_key.project", - "description": "Project and domain and suffix needs to be unique across a given artifact store.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_id.artifact_key.domain", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_id.artifact_key.name", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_id.version", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_tag.value.static_value", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.artifact_tag.value.triggered_binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.input_binding.var", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.uri", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "details", - "description": "If false, then long_description is not returned.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - } - ], - "tags": [ - "ArtifactRegistry" - ] - } - }, - "/artifacts/api/v1/data/artifact/tag/{query.artifact_tag.artifact_key.project}/{query.artifact_tag.artifact_key.domain}/{query.artifact_tag.artifact_key.name}": { - "get": { - "operationId": "GetArtifact4", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/artifactGetArtifactResponse" - } - } - }, - "parameters": [ - { - "name": "query.artifact_tag.artifact_key.project", - "description": "Project and domain and suffix needs to be unique across a given artifact store.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_tag.artifact_key.domain", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_tag.artifact_key.name", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "query.artifact_id.version", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.static_value", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.artifact_tag.value.triggered_binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.input_binding.var", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.uri", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "details", - "description": "If false, then long_description is not returned.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - } - ], - "tags": [ - "ArtifactRegistry" - ] - } - }, - "/artifacts/api/v1/data/artifacts": { - "get": { - "operationId": "GetArtifact", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/artifactGetArtifactResponse" - } - } - }, - "parameters": [ - { - "name": "query.artifact_id.artifact_key.project", - "description": "Project and domain and suffix needs to be unique across a given artifact store.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_id.artifact_key.domain", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_id.artifact_key.name", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_id.version", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.artifact_key.project", - "description": "Project and domain and suffix needs to be unique across a given artifact store.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.artifact_key.domain", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.artifact_key.name", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.static_value", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.artifact_tag.value.triggered_binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.triggered_binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.artifact_tag.value.input_binding.var", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.uri", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.index", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "query.binding.partition_key", - "description": "These two fields are only relevant in the partition value case.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "query.binding.transform", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "details", - "description": "If false, then long_description is not returned.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - } - ], - "tags": [ - "ArtifactRegistry" - ] - } - }, - "/artifacts/api/v1/data/query/e/{exec_id.project}/{exec_id.domain}/{exec_id.name}/{direction}": { - "get": { - "operationId": "FindByWorkflowExec", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/artifactSearchArtifactsResponse" - } - } - }, - "parameters": [ - { - "name": "exec_id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "exec_id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "exec_id.name", - "description": "User or system provided value for the resource.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "direction", - "in": "path", - "required": true, - "type": "string", - "enum": [ - "INPUTS", - "OUTPUTS" - ] - } - ], - "tags": [ - "ArtifactRegistry" - ] - } - }, - "/artifacts/api/v1/data/query/s/{artifact_key.project}/{artifact_key.domain}/{artifact_key.name}": { - "get": { - "operationId": "SearchArtifacts", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/artifactSearchArtifactsResponse" - } - } - }, - "parameters": [ - { - "name": "artifact_key.project", - "description": "Project and domain and suffix needs to be unique across a given artifact store.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "artifact_key.domain", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "artifact_key.name", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "principal", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "version", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "options.strict_partitions", - "description": "If true, this means a strict partition search. meaning if you don't specify the partition\nfield, that will mean, non-partitioned, rather than any partition.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - }, - { - "name": "options.latest_by_key", - "description": "If true, only one artifact per key will be returned. It will be the latest one by creation time.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - }, - { - "name": "token", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "limit", - "in": "query", - "required": false, - "type": "integer", - "format": "int32" - } - ], - "tags": [ - "ArtifactRegistry" - ] - } - }, - "/artifacts/api/v1/data/query/{artifact_key.project}/{artifact_key.domain}": { - "get": { - "operationId": "SearchArtifacts2", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/artifactSearchArtifactsResponse" - } - } - }, - "parameters": [ - { - "name": "artifact_key.project", - "description": "Project and domain and suffix needs to be unique across a given artifact store.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "artifact_key.domain", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "artifact_key.name", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "principal", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "version", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "options.strict_partitions", - "description": "If true, this means a strict partition search. meaning if you don't specify the partition\nfield, that will mean, non-partitioned, rather than any partition.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - }, - { - "name": "options.latest_by_key", - "description": "If true, only one artifact per key will be returned. It will be the latest one by creation time.", - "in": "query", - "required": false, - "type": "boolean", - "format": "boolean" - }, - { - "name": "token", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "limit", - "in": "query", - "required": false, - "type": "integer", - "format": "int32" - } - ], - "tags": [ - "ArtifactRegistry" - ] - } - } - }, - "definitions": { - "BlobTypeBlobDimensionality": { - "type": "string", - "enum": [ - "SINGLE", - "MULTIPART" - ], - "default": "SINGLE" - }, - "QualityOfServiceTier": { - "type": "string", - "enum": [ - "UNDEFINED", - "HIGH", - "MEDIUM", - "LOW" - ], - "default": "UNDEFINED", - "description": " - UNDEFINED: Default: no quality of service specified." - }, - "SchemaColumnSchemaColumnType": { - "type": "string", - "enum": [ - "INTEGER", - "FLOAT", - "STRING", - "BOOLEAN", - "DATETIME", - "DURATION" - ], - "default": "INTEGER" - }, - "SchemaTypeSchemaColumn": { - "type": "object", - "properties": { - "name": { - "type": "string", - "title": "A unique name -within the schema type- for the column" - }, - "type": { - "$ref": "#/definitions/SchemaColumnSchemaColumnType", - "description": "The column type. This allows a limited set of types currently." - } - } - }, - "SecretMountType": { - "type": "string", - "enum": [ - "ANY", - "ENV_VAR", - "FILE" - ], - "default": "ANY", - "description": " - ANY: Default case, indicates the client can tolerate either mounting options.\n - ENV_VAR: ENV_VAR indicates the secret needs to be mounted as an environment variable.\n - FILE: FILE indicates the secret needs to be mounted as a file." - }, - "StructuredDatasetTypeDatasetColumn": { - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "A unique name within the schema type for the column." - }, - "literal_type": { - "$ref": "#/definitions/coreLiteralType", - "description": "The column type." - } - } - }, - "adminAnnotations": { - "type": "object", - "properties": { - "values": { - "type": "object", - "additionalProperties": { - "type": "string" - }, - "description": "Map of custom annotations to be applied to the execution resource." - } - }, - "description": "Annotation values to be applied to an execution resource.\nIn the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined\nto specify how to merge annotations defined at registration and execution time." - }, - "adminAuth": { - "type": "object", - "properties": { - "assumable_iam_role": { - "type": "string", - "description": "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." - }, - "kubernetes_service_account": { - "type": "string", - "description": "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." - } - }, - "description": "Defines permissions associated with executions created by this launch plan spec.\nUse either of these roles when they have permissions required by your workflow execution.\nDeprecated." - }, - "adminAuthRole": { - "type": "object", - "properties": { - "assumable_iam_role": { - "type": "string", - "description": "Defines an optional iam role which will be used for tasks run in executions created with this launch plan." - }, - "kubernetes_service_account": { - "type": "string", - "description": "Defines an optional kubernetes service account which will be used for tasks run in executions created with this launch plan." - } - }, - "description": "Defines permissions associated with executions created by this launch plan spec.\nUse either of these roles when they have permissions required by your workflow execution.\nDeprecated." - }, - "adminCronSchedule": { - "type": "object", - "properties": { - "schedule": { - "type": "string", - "title": "Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression;\nAlso supports nonstandard predefined scheduling definitions\nas described by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions\nexcept @reboot" - }, - "offset": { - "type": "string", - "title": "ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations" - } - }, - "description": "Options for schedules to run according to a cron expression." - }, - "adminEmailNotification": { - "type": "object", - "properties": { - "recipients_email": { - "type": "array", - "items": { - "type": "string" - }, - "title": "The list of email addresses recipients for this notification.\n+required" - } - }, - "description": "Defines an email notification specification." - }, - "adminEnvs": { - "type": "object", - "properties": { - "values": { - "type": "array", - "items": { - "$ref": "#/definitions/coreKeyValuePair" - }, - "description": "Map of custom environment variables to be applied to the execution resource." - } - }, - "description": "Environment variable values to be applied to an execution resource.\nIn the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined\nto specify how to merge environment variables defined at registration and execution time." - }, - "adminFixedRate": { - "type": "object", - "properties": { - "value": { - "type": "integer", - "format": "int64" - }, - "unit": { - "$ref": "#/definitions/adminFixedRateUnit" - } - }, - "description": "Option for schedules run at a certain frequency e.g. every 2 minutes." - }, - "adminFixedRateUnit": { - "type": "string", - "enum": [ - "MINUTE", - "HOUR", - "DAY" - ], - "default": "MINUTE", - "description": "Represents a frequency at which to run a schedule." - }, - "adminLabels": { - "type": "object", - "properties": { - "values": { - "type": "object", - "additionalProperties": { - "type": "string" - }, - "description": "Map of custom labels to be applied to the execution resource." - } - }, - "description": "Label values to be applied to an execution resource.\nIn the future a mode (e.g. OVERRIDE, APPEND, etc) can be defined\nto specify how to merge labels defined at registration and execution time." - }, - "adminLaunchPlan": { - "type": "object", - "properties": { - "id": { - "$ref": "#/definitions/coreIdentifier", - "description": "Uniquely identifies a launch plan entity." - }, - "spec": { - "$ref": "#/definitions/adminLaunchPlanSpec", - "description": "User-provided launch plan details, including reference workflow, inputs and other metadata." - }, - "closure": { - "$ref": "#/definitions/adminLaunchPlanClosure", - "description": "Values computed by the flyte platform after launch plan registration." - } - }, - "description": "A LaunchPlan provides the capability to templatize workflow executions.\nLaunch plans simplify associating one or more schedules, inputs and notifications with your workflows.\nLaunch plans can be shared and used to trigger executions with predefined inputs even when a workflow\ndefinition doesn't necessarily have a default value for said input." - }, - "adminLaunchPlanClosure": { - "type": "object", - "properties": { - "state": { - "$ref": "#/definitions/adminLaunchPlanState", - "description": "Indicate the Launch plan state." - }, - "expected_inputs": { - "$ref": "#/definitions/coreParameterMap", - "title": "Indicates the set of inputs expected when creating an execution with the Launch plan" - }, - "expected_outputs": { - "$ref": "#/definitions/coreVariableMap", - "title": "Indicates the set of outputs expected to be produced by creating an execution with the Launch plan" - }, - "created_at": { - "type": "string", - "format": "date-time", - "description": "Time at which the launch plan was created." - }, - "updated_at": { - "type": "string", - "format": "date-time", - "description": "Time at which the launch plan was last updated." - } - }, - "description": "Values computed by the flyte platform after launch plan registration.\nThese include expected_inputs required to be present in a CreateExecutionRequest\nto launch the reference workflow as well timestamp values associated with the launch plan." - }, - "adminLaunchPlanMetadata": { - "type": "object", - "properties": { - "schedule": { - "$ref": "#/definitions/adminSchedule", - "title": "Schedule to execute the Launch Plan" - }, - "notifications": { - "type": "array", - "items": { - "$ref": "#/definitions/adminNotification" - }, - "title": "List of notifications based on Execution status transitions" - }, - "launch_conditions": { - "$ref": "#/definitions/protobufAny", - "title": "Additional metadata for how to launch the launch plan" - } - }, - "description": "Additional launch plan attributes included in the LaunchPlanSpec not strictly required to launch\nthe reference workflow." - }, - "adminLaunchPlanSpec": { - "type": "object", - "properties": { - "workflow_id": { - "$ref": "#/definitions/coreIdentifier", - "title": "Reference to the Workflow template that the launch plan references" - }, - "entity_metadata": { - "$ref": "#/definitions/adminLaunchPlanMetadata", - "title": "Metadata for the Launch Plan" - }, - "default_inputs": { - "$ref": "#/definitions/coreParameterMap", - "description": "Input values to be passed for the execution.\nThese can be overridden when an execution is created with this launch plan." - }, - "fixed_inputs": { - "$ref": "#/definitions/coreLiteralMap", - "description": "Fixed, non-overridable inputs for the Launch Plan.\nThese can not be overridden when an execution is created with this launch plan." - }, - "role": { - "type": "string", - "title": "String to indicate the role to use to execute the workflow underneath" - }, - "labels": { - "$ref": "#/definitions/adminLabels", - "description": "Custom labels to be applied to the execution resource." - }, - "annotations": { - "$ref": "#/definitions/adminAnnotations", - "description": "Custom annotations to be applied to the execution resource." - }, - "auth": { - "$ref": "#/definitions/adminAuth", - "description": "Indicates the permission associated with workflow executions triggered with this launch plan." - }, - "auth_role": { - "$ref": "#/definitions/adminAuthRole" - }, - "security_context": { - "$ref": "#/definitions/coreSecurityContext", - "title": "Indicates security context for permissions triggered with this launch plan" - }, - "quality_of_service": { - "$ref": "#/definitions/coreQualityOfService", - "description": "Indicates the runtime priority of the execution." - }, - "raw_output_data_config": { - "$ref": "#/definitions/adminRawOutputDataConfig", - "description": "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.)." - }, - "max_parallelism": { - "type": "integer", - "format": "int32", - "description": "Controls the maximum number of tasknodes that can be run in parallel for the entire workflow.\nThis is useful to achieve fairness. Note: MapTasks are regarded as one unit,\nand parallelism/concurrency of MapTasks is independent from this." - }, - "interruptible": { - "type": "boolean", - "format": "boolean", - "description": "Allows for the interruptible flag of a workflow to be overwritten for a single execution.\nOmitting this field uses the workflow's value as a default.\nAs we need to distinguish between the field not being provided and its default value false, we have to use a wrapper\naround the bool field." - }, - "overwrite_cache": { - "type": "boolean", - "format": "boolean", - "description": "Allows for all cached values of a workflow and its tasks to be overwritten for a single execution.\nIf enabled, all calculations are performed even if cached results would be available, overwriting the stored\ndata once execution finishes successfully." - }, - "envs": { - "$ref": "#/definitions/adminEnvs", - "description": "Environment variables to be set for the execution." - } - }, - "description": "User-provided launch plan definition and configuration values." - }, - "adminLaunchPlanState": { - "type": "string", - "enum": [ - "INACTIVE", - "ACTIVE" - ], - "default": "INACTIVE", - "description": "By default any launch plan regardless of state can be used to launch a workflow execution.\nHowever, at most one version of a launch plan\n(e.g. a NamedEntityIdentifier set of shared project, domain and name values) can be\nactive at a time in regards to *schedules*. That is, at most one schedule in a NamedEntityIdentifier\ngroup will be observed and trigger executions at a defined cadence." - }, - "adminNotification": { - "type": "object", - "properties": { - "phases": { - "type": "array", - "items": { - "$ref": "#/definitions/coreWorkflowExecutionPhase" - }, - "title": "A list of phases to which users can associate the notifications to.\n+required" - }, - "email": { - "$ref": "#/definitions/adminEmailNotification" - }, - "pager_duty": { - "$ref": "#/definitions/adminPagerDutyNotification" - }, - "slack": { - "$ref": "#/definitions/adminSlackNotification" - } - }, - "description": "Represents a structure for notifications based on execution status.\nThe notification content is configured within flyte admin but can be templatized.\nFuture iterations could expose configuring notifications with custom content." - }, - "adminPagerDutyNotification": { - "type": "object", - "properties": { - "recipients_email": { - "type": "array", - "items": { - "type": "string" - }, - "title": "Currently, PagerDuty notifications leverage email to trigger a notification.\n+required" - } - }, - "description": "Defines a pager duty notification specification." - }, - "adminRawOutputDataConfig": { - "type": "object", - "properties": { - "output_location_prefix": { - "type": "string", - "title": "Prefix for where offloaded data from user workflows will be written\ne.g. s3://bucket/key or s3://bucket/" - } - }, - "description": "Encapsulates user settings pertaining to offloaded data (i.e. Blobs, Schema, query data, etc.).\nSee https://github.com/flyteorg/flyte/issues/211 for more background information." - }, - "adminSchedule": { - "type": "object", - "properties": { - "cron_expression": { - "type": "string", - "title": "Uses AWS syntax: Minutes Hours Day-of-month Month Day-of-week Year\ne.g. for a schedule that runs every 15 minutes: 0/15 * * * ? *" - }, - "rate": { - "$ref": "#/definitions/adminFixedRate" - }, - "cron_schedule": { - "$ref": "#/definitions/adminCronSchedule" - }, - "kickoff_time_input_arg": { - "type": "string", - "description": "Name of the input variable that the kickoff time will be supplied to when the workflow is kicked off." - } - }, - "description": "Defines complete set of information required to trigger an execution on a schedule." - }, - "adminSlackNotification": { - "type": "object", - "properties": { - "recipients_email": { - "type": "array", - "items": { - "type": "string" - }, - "title": "Currently, Slack notifications leverage email to trigger a notification.\n+required" - } - }, - "description": "Defines a slack notification specification." - }, - "artifactAddTagResponse": { - "type": "object" - }, - "artifactArtifact": { - "type": "object", - "properties": { - "artifact_id": { - "$ref": "#/definitions/coreArtifactID" - }, - "spec": { - "$ref": "#/definitions/artifactArtifactSpec" - }, - "tags": { - "type": "array", - "items": { - "type": "string" - }, - "title": "references the tag field in ArtifactTag" - }, - "source": { - "$ref": "#/definitions/artifactArtifactSource" - } - } - }, - "artifactArtifactConsumer": { - "type": "object", - "properties": { - "entity_id": { - "$ref": "#/definitions/coreIdentifier", - "title": "These should all be launch plan IDs" - }, - "inputs": { - "$ref": "#/definitions/coreParameterMap" - } - } - }, - "artifactArtifactProducer": { - "type": "object", - "properties": { - "entity_id": { - "$ref": "#/definitions/coreIdentifier", - "description": "These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in\nAdmin's domain." - }, - "outputs": { - "$ref": "#/definitions/coreVariableMap" - } - } - }, - "artifactArtifactSource": { - "type": "object", - "properties": { - "workflow_execution": { - "$ref": "#/definitions/coreWorkflowExecutionIdentifier" - }, - "node_id": { - "type": "string" - }, - "task_id": { - "$ref": "#/definitions/coreIdentifier" - }, - "retry_attempt": { - "type": "integer", - "format": "int64" - }, - "principal": { - "type": "string", - "description": "Uploads, either from the UI or from the CLI, or FlyteRemote, will have this." - } - } - }, - "artifactArtifactSpec": { - "type": "object", - "properties": { - "value": { - "$ref": "#/definitions/coreLiteral" - }, - "type": { - "$ref": "#/definitions/coreLiteralType", - "description": "This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but\nforgets to change the name, that is okay. And the reason why this is a separate field is because adding the\ntype to all Literals is a lot of work." - }, - "short_description": { - "type": "string" - }, - "user_metadata": { - "$ref": "#/definitions/protobufAny", - "title": "Additional user metadata" - }, - "metadata_type": { - "type": "string" - } - } - }, - "artifactCreateArtifactResponse": { - "type": "object", - "properties": { - "artifact": { - "$ref": "#/definitions/artifactArtifact" - } - } - }, - "artifactCreateTriggerResponse": { - "type": "object" - }, - "artifactDeleteTriggerResponse": { - "type": "object" - }, - "artifactExecutionInputsResponse": { - "type": "object" - }, - "artifactFindByWorkflowExecRequestDirection": { - "type": "string", - "enum": [ - "INPUTS", - "OUTPUTS" - ], - "default": "INPUTS" - }, - "artifactGetArtifactResponse": { - "type": "object", - "properties": { - "artifact": { - "$ref": "#/definitions/artifactArtifact" - } - } - }, - "artifactRegisterResponse": { - "type": "object" - }, - "artifactSearchArtifactsResponse": { - "type": "object", - "properties": { - "artifacts": { - "type": "array", - "items": { - "$ref": "#/definitions/artifactArtifact" - }, - "description": "If artifact specs are not requested, the resultant artifacts may be empty." - }, - "token": { - "type": "string", - "description": "continuation token if relevant." - } - } - }, - "artifactSearchOptions": { - "type": "object", - "properties": { - "strict_partitions": { - "type": "boolean", - "format": "boolean", - "description": "If true, this means a strict partition search. meaning if you don't specify the partition\nfield, that will mean, non-partitioned, rather than any partition." - }, - "latest_by_key": { - "type": "boolean", - "format": "boolean", - "description": "If true, only one artifact per key will be returned. It will be the latest one by creation time." - } - } - }, - "coreArtifactBindingData": { - "type": "object", - "properties": { - "index": { - "type": "integer", - "format": "int64" - }, - "partition_key": { - "type": "string", - "title": "These two fields are only relevant in the partition value case" - }, - "transform": { - "type": "string" - } - }, - "title": "Only valid for triggers" - }, - "coreArtifactID": { - "type": "object", - "properties": { - "artifact_key": { - "$ref": "#/definitions/coreArtifactKey" - }, - "version": { - "type": "string" - }, - "partitions": { - "$ref": "#/definitions/corePartitions" - } - } - }, - "coreArtifactKey": { - "type": "object", - "properties": { - "project": { - "type": "string", - "description": "Project and domain and suffix needs to be unique across a given artifact store." - }, - "domain": { - "type": "string" - }, - "name": { - "type": "string" - } - } - }, - "coreArtifactQuery": { - "type": "object", - "properties": { - "artifact_id": { - "$ref": "#/definitions/coreArtifactID" - }, - "artifact_tag": { - "$ref": "#/definitions/coreArtifactTag" - }, - "uri": { - "type": "string" - }, - "binding": { - "$ref": "#/definitions/coreArtifactBindingData", - "description": "This is used in the trigger case, where a user specifies a value for an input that is one of the triggering\nartifacts, or a partition value derived from a triggering artifact." - } - }, - "title": "Uniqueness constraints for Artifacts\n - project, domain, name, version, partitions\nOption 2 (tags are standalone, point to an individual artifact id):\n - project, domain, name, alias (points to one partition if partitioned)\n - project, domain, name, partition key, partition value" - }, - "coreArtifactTag": { - "type": "object", - "properties": { - "artifact_key": { - "$ref": "#/definitions/coreArtifactKey" - }, - "value": { - "$ref": "#/definitions/coreLabelValue" - } - } - }, - "coreBinary": { - "type": "object", - "properties": { - "value": { - "type": "string", - "format": "byte" - }, - "tag": { - "type": "string" - } - }, - "description": "A simple byte array with a tag to help different parts of the system communicate about what is in the byte array.\nIt's strongly advisable that consumers of this type define a unique tag and validate the tag before parsing the data." - }, - "coreBlob": { - "type": "object", - "properties": { - "metadata": { - "$ref": "#/definitions/coreBlobMetadata" - }, - "uri": { - "type": "string" - } - }, - "description": "Refers to an offloaded set of files. It encapsulates the type of the store and a unique uri for where the data is.\nThere are no restrictions on how the uri is formatted since it will depend on how to interact with the store." - }, - "coreBlobMetadata": { - "type": "object", - "properties": { - "type": { - "$ref": "#/definitions/coreBlobType" - } - } - }, - "coreBlobType": { - "type": "object", - "properties": { - "format": { - "type": "string", - "title": "Format can be a free form string understood by SDK/UI etc like\ncsv, parquet etc" - }, - "dimensionality": { - "$ref": "#/definitions/BlobTypeBlobDimensionality" - } - }, - "title": "Defines type behavior for blob objects" - }, - "coreEnumType": { - "type": "object", - "properties": { - "values": { - "type": "array", - "items": { - "type": "string" - }, - "description": "Predefined set of enum values." - } - }, - "description": "Enables declaring enum types, with predefined string values\nFor len(values) \u003e 0, the first value in the ordered list is regarded as the default value. If you wish\nTo provide no defaults, make the first value as undefined." - }, - "coreError": { - "type": "object", - "properties": { - "failed_node_id": { - "type": "string", - "description": "The node id that threw the error." - }, - "message": { - "type": "string", - "description": "Error message thrown." - } - }, - "description": "Represents an error thrown from a node." - }, - "coreIdentifier": { - "type": "object", - "properties": { - "resource_type": { - "$ref": "#/definitions/coreResourceType", - "description": "Identifies the specific type of resource that this identifier corresponds to." - }, - "project": { - "type": "string", - "description": "Name of the project the resource belongs to." - }, - "domain": { - "type": "string", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project." - }, - "name": { - "type": "string", - "description": "User provided value for the resource." - }, - "version": { - "type": "string", - "description": "Specific version of the resource." - } - }, - "description": "Encapsulation of fields that uniquely identifies a Flyte resource." - }, - "coreIdentity": { - "type": "object", - "properties": { - "iam_role": { - "type": "string", - "description": "iam_role references the fully qualified name of Identity \u0026 Access Management role to impersonate." - }, - "k8s_service_account": { - "type": "string", - "description": "k8s_service_account references a kubernetes service account to impersonate." - }, - "oauth2_client": { - "$ref": "#/definitions/coreOAuth2Client", - "description": "oauth2_client references an oauth2 client. Backend plugins can use this information to impersonate the client when\nmaking external calls." - }, - "execution_identity": { - "type": "string", - "title": "execution_identity references the subject who makes the execution" - } - }, - "description": "Identity encapsulates the various security identities a task can run as. It's up to the underlying plugin to pick the\nright identity for the execution environment." - }, - "coreInputBindingData": { - "type": "object", - "properties": { - "var": { - "type": "string" - } - } - }, - "coreKeyValuePair": { - "type": "object", - "properties": { - "key": { - "type": "string", - "description": "required." - }, - "value": { - "type": "string", - "description": "+optional." - } - }, - "description": "A generic key value pair." - }, - "coreLabelValue": { - "type": "object", - "properties": { - "static_value": { - "type": "string" - }, - "triggered_binding": { - "$ref": "#/definitions/coreArtifactBindingData" - }, - "input_binding": { - "$ref": "#/definitions/coreInputBindingData" - } - } - }, - "coreLiteral": { - "type": "object", - "properties": { - "scalar": { - "$ref": "#/definitions/coreScalar", - "description": "A simple value." - }, - "collection": { - "$ref": "#/definitions/coreLiteralCollection", - "description": "A collection of literals to allow nesting." - }, - "map": { - "$ref": "#/definitions/coreLiteralMap", - "description": "A map of strings to literals." - }, - "hash": { - "type": "string", - "title": "A hash representing this literal.\nThis is used for caching purposes. For more details refer to RFC 1893\n(https://github.com/flyteorg/flyte/blob/master/rfc/system/1893-caching-of-offloaded-objects.md)" - }, - "metadata": { - "type": "object", - "additionalProperties": { - "type": "string" - }, - "description": "Additional metadata for literals." - } - }, - "description": "A simple value. This supports any level of nesting (e.g. array of array of array of Blobs) as well as simple primitives." - }, - "coreLiteralCollection": { - "type": "object", - "properties": { - "literals": { - "type": "array", - "items": { - "$ref": "#/definitions/coreLiteral" - } - } - }, - "description": "A collection of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field." - }, - "coreLiteralMap": { - "type": "object", - "properties": { - "literals": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/coreLiteral" - } - } - }, - "description": "A map of literals. This is a workaround since oneofs in proto messages cannot contain a repeated field." - }, - "coreLiteralType": { - "type": "object", - "properties": { - "simple": { - "$ref": "#/definitions/coreSimpleType", - "description": "A simple type that can be compared one-to-one with another." - }, - "schema": { - "$ref": "#/definitions/coreSchemaType", - "description": "A complex type that requires matching of inner fields." - }, - "collection_type": { - "$ref": "#/definitions/coreLiteralType", - "description": "Defines the type of the value of a collection. Only homogeneous collections are allowed." - }, - "map_value_type": { - "$ref": "#/definitions/coreLiteralType", - "description": "Defines the type of the value of a map type. The type of the key is always a string." - }, - "blob": { - "$ref": "#/definitions/coreBlobType", - "description": "A blob might have specialized implementation details depending on associated metadata." - }, - "enum_type": { - "$ref": "#/definitions/coreEnumType", - "description": "Defines an enum with pre-defined string values." - }, - "structured_dataset_type": { - "$ref": "#/definitions/coreStructuredDatasetType", - "title": "Generalized schema support" - }, - "union_type": { - "$ref": "#/definitions/coreUnionType", - "description": "Defines an union type with pre-defined LiteralTypes." - }, - "metadata": { - "$ref": "#/definitions/protobufStruct", - "description": "This field contains type metadata that is descriptive of the type, but is NOT considered in type-checking. This might be used by\nconsumers to identify special behavior or display extended information for the type." - }, - "annotation": { - "$ref": "#/definitions/coreTypeAnnotation", - "description": "This field contains arbitrary data that might have special semantic\nmeaning for the client but does not effect internal flyte behavior." - }, - "structure": { - "$ref": "#/definitions/coreTypeStructure", - "description": "Hints to improve type matching." - } - }, - "description": "Defines a strong type to allow type checking between interfaces." - }, - "coreOAuth2Client": { - "type": "object", - "properties": { - "client_id": { - "type": "string", - "title": "client_id is the public id for the client to use. The system will not perform any pre-auth validation that the\nsecret requested matches the client_id indicated here.\n+required" - }, - "client_secret": { - "$ref": "#/definitions/coreSecret", - "title": "client_secret is a reference to the secret used to authenticate the OAuth2 client.\n+required" - } - }, - "description": "OAuth2Client encapsulates OAuth2 Client Credentials to be used when making calls on behalf of that task." - }, - "coreOAuth2TokenRequest": { - "type": "object", - "properties": { - "name": { - "type": "string", - "title": "name indicates a unique id for the token request within this task token requests. It'll be used as a suffix for\nenvironment variables and as a filename for mounting tokens as files.\n+required" - }, - "type": { - "$ref": "#/definitions/coreOAuth2TokenRequestType", - "title": "type indicates the type of the request to make. Defaults to CLIENT_CREDENTIALS.\n+required" - }, - "client": { - "$ref": "#/definitions/coreOAuth2Client", - "title": "client references the client_id/secret to use to request the OAuth2 token.\n+required" - }, - "idp_discovery_endpoint": { - "type": "string", - "title": "idp_discovery_endpoint references the discovery endpoint used to retrieve token endpoint and other related\ninformation.\n+optional" - }, - "token_endpoint": { - "type": "string", - "title": "token_endpoint references the token issuance endpoint. If idp_discovery_endpoint is not provided, this parameter is\nmandatory.\n+optional" - } - }, - "description": "OAuth2TokenRequest encapsulates information needed to request an OAuth2 token.\nFLYTE_TOKENS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if\ntokens are passed through environment variables.\nFLYTE_TOKENS_PATH_PREFIX will be passed to indicate the prefix of the path where secrets will be mounted if tokens\nare passed through file mounts." - }, - "coreOAuth2TokenRequestType": { - "type": "string", - "enum": [ - "CLIENT_CREDENTIALS" - ], - "default": "CLIENT_CREDENTIALS", - "description": "Type of the token requested.\n\n - CLIENT_CREDENTIALS: CLIENT_CREDENTIALS indicates a 2-legged OAuth token requested using client credentials." - }, - "coreParameter": { - "type": "object", - "properties": { - "var": { - "$ref": "#/definitions/coreVariable", - "description": "+required Variable. Defines the type of the variable backing this parameter." - }, - "default": { - "$ref": "#/definitions/coreLiteral", - "description": "Defines a default value that has to match the variable type defined." - }, - "required": { - "type": "boolean", - "format": "boolean", - "description": "+optional, is this value required to be filled." - }, - "artifact_query": { - "$ref": "#/definitions/coreArtifactQuery", - "description": "This is an execution time search basically that should result in exactly one Artifact with a Type that\nmatches the type of the variable." - }, - "artifact_id": { - "$ref": "#/definitions/coreArtifactID" - } - }, - "description": "A parameter is used as input to a launch plan and has\nthe special ability to have a default value or mark itself as required." - }, - "coreParameterMap": { - "type": "object", - "properties": { - "parameters": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/coreParameter" - }, - "description": "Defines a map of parameter names to parameters." - } - }, - "description": "A map of Parameters." - }, - "corePartitions": { - "type": "object", - "properties": { - "value": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/coreLabelValue" - } - } - } - }, - "corePrimitive": { - "type": "object", - "properties": { - "integer": { - "type": "string", - "format": "int64" - }, - "float_value": { - "type": "number", - "format": "double" - }, - "string_value": { - "type": "string" - }, - "boolean": { - "type": "boolean", - "format": "boolean" - }, - "datetime": { - "type": "string", - "format": "date-time" - }, - "duration": { - "type": "string" - } - }, - "title": "Primitive Types" - }, - "coreQualityOfService": { - "type": "object", - "properties": { - "tier": { - "$ref": "#/definitions/QualityOfServiceTier" - }, - "spec": { - "$ref": "#/definitions/coreQualityOfServiceSpec" - } - }, - "description": "Indicates the priority of an execution." - }, - "coreQualityOfServiceSpec": { - "type": "object", - "properties": { - "queueing_budget": { - "type": "string", - "description": "Indicates how much queueing delay an execution can tolerate." - } - }, - "description": "Represents customized execution run-time attributes." - }, - "coreResourceType": { - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ], - "default": "UNSPECIFIED", - "description": "Indicates a resource type within Flyte.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects" - }, - "coreScalar": { - "type": "object", - "properties": { - "primitive": { - "$ref": "#/definitions/corePrimitive" - }, - "blob": { - "$ref": "#/definitions/coreBlob" - }, - "binary": { - "$ref": "#/definitions/coreBinary" - }, - "schema": { - "$ref": "#/definitions/coreSchema" - }, - "none_type": { - "$ref": "#/definitions/coreVoid" - }, - "error": { - "$ref": "#/definitions/coreError" - }, - "generic": { - "$ref": "#/definitions/protobufStruct" - }, - "structured_dataset": { - "$ref": "#/definitions/coreStructuredDataset" - }, - "union": { - "$ref": "#/definitions/coreUnion" - } - } - }, - "coreSchema": { - "type": "object", - "properties": { - "uri": { - "type": "string" - }, - "type": { - "$ref": "#/definitions/coreSchemaType" - } - }, - "description": "A strongly typed schema that defines the interface of data retrieved from the underlying storage medium." - }, - "coreSchemaType": { - "type": "object", - "properties": { - "columns": { - "type": "array", - "items": { - "$ref": "#/definitions/SchemaTypeSchemaColumn" - }, - "description": "A list of ordered columns this schema comprises of." - } - }, - "description": "Defines schema columns and types to strongly type-validate schemas interoperability." - }, - "coreSecret": { - "type": "object", - "properties": { - "group": { - "type": "string", - "title": "The name of the secret group where to find the key referenced below. For K8s secrets, this should be the name of\nthe v1/secret object. For Confidant, this should be the Credential name. For Vault, this should be the secret name.\nFor AWS Secret Manager, this should be the name of the secret.\n+required" - }, - "group_version": { - "type": "string", - "title": "The group version to fetch. This is not supported in all secret management systems. It'll be ignored for the ones\nthat do not support it.\n+optional" - }, - "key": { - "type": "string", - "title": "The name of the secret to mount. This has to match an existing secret in the system. It's up to the implementation\nof the secret management system to require case sensitivity. For K8s secrets, Confidant and Vault, this should\nmatch one of the keys inside the secret. For AWS Secret Manager, it's ignored.\n+optional" - }, - "mount_requirement": { - "$ref": "#/definitions/SecretMountType", - "title": "mount_requirement is optional. Indicates where the secret has to be mounted. If provided, the execution will fail\nif the underlying key management system cannot satisfy that requirement. If not provided, the default location\nwill depend on the key management system.\n+optional" - } - }, - "description": "Secret encapsulates information about the secret a task needs to proceed. An environment variable\nFLYTE_SECRETS_ENV_PREFIX will be passed to indicate the prefix of the environment variables that will be present if\nsecrets are passed through environment variables.\nFLYTE_SECRETS_DEFAULT_DIR will be passed to indicate the prefix of the path where secrets will be mounted if secrets\nare passed through file mounts." - }, - "coreSecurityContext": { - "type": "object", - "properties": { - "run_as": { - "$ref": "#/definitions/coreIdentity", - "description": "run_as encapsulates the identity a pod should run as. If the task fills in multiple fields here, it'll be up to the\nbackend plugin to choose the appropriate identity for the execution engine the task will run on." - }, - "secrets": { - "type": "array", - "items": { - "$ref": "#/definitions/coreSecret" - }, - "description": "secrets indicate the list of secrets the task needs in order to proceed. Secrets will be mounted/passed to the\npod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS\nBatch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access\nto the secret) and to pass it to the remote execution engine." - }, - "tokens": { - "type": "array", - "items": { - "$ref": "#/definitions/coreOAuth2TokenRequest" - }, - "description": "tokens indicate the list of token requests the task needs in order to proceed. Tokens will be mounted/passed to the\npod as it starts. If the plugin responsible for kicking of the task will not run it on a flyte cluster (e.g. AWS\nBatch), it's the responsibility of the plugin to fetch the secret (which means propeller identity will need access\nto the secret) and to pass it to the remote execution engine." - } - }, - "description": "SecurityContext holds security attributes that apply to tasks." - }, - "coreSimpleType": { - "type": "string", - "enum": [ - "NONE", - "INTEGER", - "FLOAT", - "STRING", - "BOOLEAN", - "DATETIME", - "DURATION", - "BINARY", - "ERROR", - "STRUCT" - ], - "default": "NONE", - "description": "Define a set of simple types." - }, - "coreStructuredDataset": { - "type": "object", - "properties": { - "uri": { - "type": "string", - "title": "String location uniquely identifying where the data is.\nShould start with the storage location (e.g. s3://, gs://, bq://, etc.)" - }, - "metadata": { - "$ref": "#/definitions/coreStructuredDatasetMetadata" - } - } - }, - "coreStructuredDatasetMetadata": { - "type": "object", - "properties": { - "structured_dataset_type": { - "$ref": "#/definitions/coreStructuredDatasetType", - "description": "Bundle the type information along with the literal.\nThis is here because StructuredDatasets can often be more defined at run time than at compile time.\nThat is, at compile time you might only declare a task to return a pandas dataframe or a StructuredDataset,\nwithout any column information, but at run time, you might have that column information.\nflytekit python will copy this type information into the literal, from the type information, if not provided by\nthe various plugins (encoders).\nSince this field is run time generated, it's not used for any type checking." - } - } - }, - "coreStructuredDatasetType": { - "type": "object", - "properties": { - "columns": { - "type": "array", - "items": { - "$ref": "#/definitions/StructuredDatasetTypeDatasetColumn" - }, - "description": "A list of ordered columns this schema comprises of." - }, - "format": { - "type": "string", - "description": "This is the storage format, the format of the bits at rest\nparquet, feather, csv, etc.\nFor two types to be compatible, the format will need to be an exact match." - }, - "external_schema_type": { - "type": "string", - "description": "This is a string representing the type that the bytes in external_schema_bytes are formatted in.\nThis is an optional field that will not be used for type checking." - }, - "external_schema_bytes": { - "type": "string", - "format": "byte", - "description": "The serialized bytes of a third-party schema library like Arrow.\nThis is an optional field that will not be used for type checking." - } - } - }, - "coreTypeAnnotation": { - "type": "object", - "properties": { - "annotations": { - "$ref": "#/definitions/protobufStruct", - "description": "A arbitrary JSON payload to describe a type." - } - }, - "description": "TypeAnnotation encapsulates registration time information about a type. This can be used for various control-plane operations. TypeAnnotation will not be available at runtime when a task runs." - }, - "coreTypeStructure": { - "type": "object", - "properties": { - "tag": { - "type": "string", - "title": "Must exactly match for types to be castable" - }, - "dataclass_type": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/coreLiteralType" - }, - "title": "dataclass_type only exists for dataclasses.\nThis is used to resolve the type of the fields of dataclass\nThe key is the field name, and the value is the literal type of the field\ne.g. For dataclass Foo, with fields a, and a is a string\nFoo.a will be resolved as a literal type of string from dataclass_type" - } - }, - "description": "Hints to improve type matching\ne.g. allows distinguishing output from custom type transformers\neven if the underlying IDL serialization matches." - }, - "coreUnion": { - "type": "object", - "properties": { - "value": { - "$ref": "#/definitions/coreLiteral" - }, - "type": { - "$ref": "#/definitions/coreLiteralType" - } - }, - "description": "The runtime representation of a tagged union value. See `UnionType` for more details." - }, - "coreUnionType": { - "type": "object", - "properties": { - "variants": { - "type": "array", - "items": { - "$ref": "#/definitions/coreLiteralType" - }, - "description": "Predefined set of variants in union." - } - }, - "description": "Defines a tagged union type, also known as a variant (and formally as the sum type).\n\nA sum type S is defined by a sequence of types (A, B, C, ...), each tagged by a string tag\nA value of type S is constructed from a value of any of the variant types. The specific choice of type is recorded by\nstoring the varaint's tag with the literal value and can be examined in runtime.\n\nType S is typically written as\nS := Apple A | Banana B | Cantaloupe C | ...\n\nNotably, a nullable (optional) type is a sum type between some type X and the singleton type representing a null-value:\nOptional X := X | Null\n\nSee also: https://en.wikipedia.org/wiki/Tagged_union" - }, - "coreVariable": { - "type": "object", - "properties": { - "type": { - "$ref": "#/definitions/coreLiteralType", - "description": "Variable literal type." - }, - "description": { - "type": "string", - "title": "+optional string describing input variable" - }, - "artifact_partial_id": { - "$ref": "#/definitions/coreArtifactID", - "description": "+optional This object allows the user to specify how Artifacts are created.\nname, tag, partitions can be specified. The other fields (version and project/domain) are ignored." - }, - "artifact_tag": { - "$ref": "#/definitions/coreArtifactTag" - } - }, - "description": "Defines a strongly typed variable." - }, - "coreVariableMap": { - "type": "object", - "properties": { - "variables": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/coreVariable" - }, - "description": "Defines a map of variable names to variables." - } - }, - "title": "A map of Variables" - }, - "coreVoid": { - "type": "object", - "description": "Used to denote a nil/null/None assignment to a scalar value. The underlying LiteralType for Void is intentionally\nundefined since it can be assigned to a scalar of any LiteralType." - }, - "coreWorkflowExecutionIdentifier": { - "type": "object", - "properties": { - "project": { - "type": "string", - "description": "Name of the project the resource belongs to." - }, - "domain": { - "type": "string", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project." - }, - "name": { - "type": "string", - "description": "User or system provided value for the resource." - } - }, - "title": "Encapsulation of fields that uniquely identifies a Flyte workflow execution" - }, - "coreWorkflowExecutionPhase": { - "type": "string", - "enum": [ - "UNDEFINED", - "QUEUED", - "RUNNING", - "SUCCEEDING", - "SUCCEEDED", - "FAILING", - "FAILED", - "ABORTED", - "TIMED_OUT", - "ABORTING" - ], - "default": "UNDEFINED" - }, - "protobufAny": { - "type": "object", - "properties": { - "type_url": { - "type": "string", - "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. This string must contain at least\none \"/\" character. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." - }, - "value": { - "type": "string", - "format": "byte", - "description": "Must be a valid serialized protocol buffer of the above specified type." - } - }, - "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := \u0026pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" - }, - "protobufListValue": { - "type": "object", - "properties": { - "values": { - "type": "array", - "items": { - "$ref": "#/definitions/protobufValue" - }, - "description": "Repeated field of dynamically typed values." - } - }, - "description": "`ListValue` is a wrapper around a repeated field of values.\n\nThe JSON representation for `ListValue` is JSON array." - }, - "protobufNullValue": { - "type": "string", - "enum": [ - "NULL_VALUE" - ], - "default": "NULL_VALUE", - "description": "`NullValue` is a singleton enumeration to represent the null value for the\n`Value` type union.\n\n The JSON representation for `NullValue` is JSON `null`.\n\n - NULL_VALUE: Null value." - }, - "protobufStruct": { - "type": "object", - "properties": { - "fields": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/protobufValue" - }, - "description": "Unordered map of dynamically typed values." - } - }, - "description": "`Struct` represents a structured data value, consisting of fields\nwhich map to dynamically typed values. In some languages, `Struct`\nmight be supported by a native representation. For example, in\nscripting languages like JS a struct is represented as an\nobject. The details of that representation are described together\nwith the proto support for the language.\n\nThe JSON representation for `Struct` is JSON object." - }, - "protobufValue": { - "type": "object", - "properties": { - "null_value": { - "$ref": "#/definitions/protobufNullValue", - "description": "Represents a null value." - }, - "number_value": { - "type": "number", - "format": "double", - "description": "Represents a double value." - }, - "string_value": { - "type": "string", - "description": "Represents a string value." - }, - "bool_value": { - "type": "boolean", - "format": "boolean", - "description": "Represents a boolean value." - }, - "struct_value": { - "$ref": "#/definitions/protobufStruct", - "description": "Represents a structured value." - }, - "list_value": { - "$ref": "#/definitions/protobufListValue", - "description": "Represents a repeated `Value`." - } - }, - "description": "`Value` represents a dynamically typed value which can be either\nnull, a number, a string, a boolean, a recursive struct value, or a\nlist of values. A producer of value is expected to set one of that\nvariants, absence of any variant indicates an error.\n\nThe JSON representation for `Value` is JSON value." - } - } -} diff --git a/flyteidl/gen/pb-go/flyteidl/core/artifact_id.pb.go b/flyteidl/gen/pb-go/flyteidl/core/artifact_id.pb.go index bf7ba4679f..ec28336db7 100644 --- a/flyteidl/gen/pb-go/flyteidl/core/artifact_id.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/core/artifact_id.pb.go @@ -6,6 +6,7 @@ package core import ( fmt "fmt" proto "github.com/golang/protobuf/proto" + timestamp "github.com/golang/protobuf/ptypes/timestamp" math "math" ) @@ -80,8 +81,13 @@ func (m *ArtifactKey) GetName() string { type ArtifactBindingData struct { Index uint32 `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty"` // These two fields are only relevant in the partition value case - PartitionKey string `protobuf:"bytes,2,opt,name=partition_key,json=partitionKey,proto3" json:"partition_key,omitempty"` - Transform string `protobuf:"bytes,3,opt,name=transform,proto3" json:"transform,omitempty"` + // + // Types that are valid to be assigned to PartitionData: + // *ArtifactBindingData_PartitionKey + // *ArtifactBindingData_BindToTimePartition + PartitionData isArtifactBindingData_PartitionData `protobuf_oneof:"partition_data"` + // This is only relevant in the time partition case + Transform string `protobuf:"bytes,4,opt,name=transform,proto3" json:"transform,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -119,13 +125,43 @@ func (m *ArtifactBindingData) GetIndex() uint32 { return 0 } -func (m *ArtifactBindingData) GetPartitionKey() string { +type isArtifactBindingData_PartitionData interface { + isArtifactBindingData_PartitionData() +} + +type ArtifactBindingData_PartitionKey struct { + PartitionKey string `protobuf:"bytes,2,opt,name=partition_key,json=partitionKey,proto3,oneof"` +} + +type ArtifactBindingData_BindToTimePartition struct { + BindToTimePartition bool `protobuf:"varint,3,opt,name=bind_to_time_partition,json=bindToTimePartition,proto3,oneof"` +} + +func (*ArtifactBindingData_PartitionKey) isArtifactBindingData_PartitionData() {} + +func (*ArtifactBindingData_BindToTimePartition) isArtifactBindingData_PartitionData() {} + +func (m *ArtifactBindingData) GetPartitionData() isArtifactBindingData_PartitionData { if m != nil { - return m.PartitionKey + return m.PartitionData + } + return nil +} + +func (m *ArtifactBindingData) GetPartitionKey() string { + if x, ok := m.GetPartitionData().(*ArtifactBindingData_PartitionKey); ok { + return x.PartitionKey } return "" } +func (m *ArtifactBindingData) GetBindToTimePartition() bool { + if x, ok := m.GetPartitionData().(*ArtifactBindingData_BindToTimePartition); ok { + return x.BindToTimePartition + } + return false +} + func (m *ArtifactBindingData) GetTransform() string { if m != nil { return m.Transform @@ -133,6 +169,14 @@ func (m *ArtifactBindingData) GetTransform() string { return "" } +// XXX_OneofWrappers is for the internal use of the proto package. +func (*ArtifactBindingData) XXX_OneofWrappers() []interface{} { + return []interface{}{ + (*ArtifactBindingData_PartitionKey)(nil), + (*ArtifactBindingData_BindToTimePartition)(nil), + } +} + type InputBindingData struct { Var string `protobuf:"bytes,1,opt,name=var,proto3" json:"var,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -175,6 +219,7 @@ func (m *InputBindingData) GetVar() string { type LabelValue struct { // Types that are valid to be assigned to Value: // *LabelValue_StaticValue + // *LabelValue_TimeValue // *LabelValue_TriggeredBinding // *LabelValue_InputBinding Value isLabelValue_Value `protobuf_oneof:"value"` @@ -216,16 +261,22 @@ type LabelValue_StaticValue struct { StaticValue string `protobuf:"bytes,1,opt,name=static_value,json=staticValue,proto3,oneof"` } +type LabelValue_TimeValue struct { + TimeValue *timestamp.Timestamp `protobuf:"bytes,2,opt,name=time_value,json=timeValue,proto3,oneof"` +} + type LabelValue_TriggeredBinding struct { - TriggeredBinding *ArtifactBindingData `protobuf:"bytes,2,opt,name=triggered_binding,json=triggeredBinding,proto3,oneof"` + TriggeredBinding *ArtifactBindingData `protobuf:"bytes,3,opt,name=triggered_binding,json=triggeredBinding,proto3,oneof"` } type LabelValue_InputBinding struct { - InputBinding *InputBindingData `protobuf:"bytes,3,opt,name=input_binding,json=inputBinding,proto3,oneof"` + InputBinding *InputBindingData `protobuf:"bytes,4,opt,name=input_binding,json=inputBinding,proto3,oneof"` } func (*LabelValue_StaticValue) isLabelValue_Value() {} +func (*LabelValue_TimeValue) isLabelValue_Value() {} + func (*LabelValue_TriggeredBinding) isLabelValue_Value() {} func (*LabelValue_InputBinding) isLabelValue_Value() {} @@ -244,6 +295,13 @@ func (m *LabelValue) GetStaticValue() string { return "" } +func (m *LabelValue) GetTimeValue() *timestamp.Timestamp { + if x, ok := m.GetValue().(*LabelValue_TimeValue); ok { + return x.TimeValue + } + return nil +} + func (m *LabelValue) GetTriggeredBinding() *ArtifactBindingData { if x, ok := m.GetValue().(*LabelValue_TriggeredBinding); ok { return x.TriggeredBinding @@ -262,6 +320,7 @@ func (m *LabelValue) GetInputBinding() *InputBindingData { func (*LabelValue) XXX_OneofWrappers() []interface{} { return []interface{}{ (*LabelValue_StaticValue)(nil), + (*LabelValue_TimeValue)(nil), (*LabelValue_TriggeredBinding)(nil), (*LabelValue_InputBinding)(nil), } @@ -306,29 +365,63 @@ func (m *Partitions) GetValue() map[string]*LabelValue { return nil } +type TimePartition struct { + Value *LabelValue `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimePartition) Reset() { *m = TimePartition{} } +func (m *TimePartition) String() string { return proto.CompactTextString(m) } +func (*TimePartition) ProtoMessage() {} +func (*TimePartition) Descriptor() ([]byte, []int) { + return fileDescriptor_1079b0707e23f978, []int{5} +} + +func (m *TimePartition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimePartition.Unmarshal(m, b) +} +func (m *TimePartition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimePartition.Marshal(b, m, deterministic) +} +func (m *TimePartition) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimePartition.Merge(m, src) +} +func (m *TimePartition) XXX_Size() int { + return xxx_messageInfo_TimePartition.Size(m) +} +func (m *TimePartition) XXX_DiscardUnknown() { + xxx_messageInfo_TimePartition.DiscardUnknown(m) +} + +var xxx_messageInfo_TimePartition proto.InternalMessageInfo + +func (m *TimePartition) GetValue() *LabelValue { + if m != nil { + return m.Value + } + return nil +} + type ArtifactID struct { ArtifactKey *ArtifactKey `protobuf:"bytes,1,opt,name=artifact_key,json=artifactKey,proto3" json:"artifact_key,omitempty"` Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` // Think of a partition as a tag on an Artifact, except it's a key-value pair. // Different partitions naturally have different versions (execution ids). - // This is a oneof because of partition querying... we need a way to distinguish between - // a user not-specifying partitions when searching, and specifically searching for an Artifact - // that is not partitioned (this can happen if an Artifact goes from partitioned to non- - // partitioned across executions). - // - // Types that are valid to be assigned to Dimensions: - // *ArtifactID_Partitions - Dimensions isArtifactID_Dimensions `protobuf_oneof:"dimensions"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Partitions *Partitions `protobuf:"bytes,3,opt,name=partitions,proto3" json:"partitions,omitempty"` + // There is no such thing as an empty time partition - if it's not set, then there is no time partition. + TimePartition *TimePartition `protobuf:"bytes,4,opt,name=time_partition,json=timePartition,proto3" json:"time_partition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ArtifactID) Reset() { *m = ArtifactID{} } func (m *ArtifactID) String() string { return proto.CompactTextString(m) } func (*ArtifactID) ProtoMessage() {} func (*ArtifactID) Descriptor() ([]byte, []int) { - return fileDescriptor_1079b0707e23f978, []int{5} + return fileDescriptor_1079b0707e23f978, []int{6} } func (m *ArtifactID) XXX_Unmarshal(b []byte) error { @@ -363,37 +456,20 @@ func (m *ArtifactID) GetVersion() string { return "" } -type isArtifactID_Dimensions interface { - isArtifactID_Dimensions() -} - -type ArtifactID_Partitions struct { - Partitions *Partitions `protobuf:"bytes,3,opt,name=partitions,proto3,oneof"` -} - -func (*ArtifactID_Partitions) isArtifactID_Dimensions() {} - -func (m *ArtifactID) GetDimensions() isArtifactID_Dimensions { +func (m *ArtifactID) GetPartitions() *Partitions { if m != nil { - return m.Dimensions + return m.Partitions } return nil } -func (m *ArtifactID) GetPartitions() *Partitions { - if x, ok := m.GetDimensions().(*ArtifactID_Partitions); ok { - return x.Partitions +func (m *ArtifactID) GetTimePartition() *TimePartition { + if m != nil { + return m.TimePartition } return nil } -// XXX_OneofWrappers is for the internal use of the proto package. -func (*ArtifactID) XXX_OneofWrappers() []interface{} { - return []interface{}{ - (*ArtifactID_Partitions)(nil), - } -} - type ArtifactTag struct { ArtifactKey *ArtifactKey `protobuf:"bytes,1,opt,name=artifact_key,json=artifactKey,proto3" json:"artifact_key,omitempty"` Value *LabelValue `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` @@ -406,7 +482,7 @@ func (m *ArtifactTag) Reset() { *m = ArtifactTag{} } func (m *ArtifactTag) String() string { return proto.CompactTextString(m) } func (*ArtifactTag) ProtoMessage() {} func (*ArtifactTag) Descriptor() ([]byte, []int) { - return fileDescriptor_1079b0707e23f978, []int{6} + return fileDescriptor_1079b0707e23f978, []int{7} } func (m *ArtifactTag) XXX_Unmarshal(b []byte) error { @@ -462,7 +538,7 @@ func (m *ArtifactQuery) Reset() { *m = ArtifactQuery{} } func (m *ArtifactQuery) String() string { return proto.CompactTextString(m) } func (*ArtifactQuery) ProtoMessage() {} func (*ArtifactQuery) Descriptor() ([]byte, []int) { - return fileDescriptor_1079b0707e23f978, []int{7} + return fileDescriptor_1079b0707e23f978, []int{8} } func (m *ArtifactQuery) XXX_Unmarshal(b []byte) error { @@ -556,56 +632,6 @@ func (*ArtifactQuery) XXX_OneofWrappers() []interface{} { } } -type Trigger struct { - // This will be set to a launch plan type, but note that this is different than the actual launch plan type. - TriggerId *Identifier `protobuf:"bytes,1,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` - // These are partial artifact IDs that will be triggered on - // Consider making these ArtifactQuery instead. - Triggers []*ArtifactID `protobuf:"bytes,2,rep,name=triggers,proto3" json:"triggers,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *Trigger) Reset() { *m = Trigger{} } -func (m *Trigger) String() string { return proto.CompactTextString(m) } -func (*Trigger) ProtoMessage() {} -func (*Trigger) Descriptor() ([]byte, []int) { - return fileDescriptor_1079b0707e23f978, []int{8} -} - -func (m *Trigger) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Trigger.Unmarshal(m, b) -} -func (m *Trigger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Trigger.Marshal(b, m, deterministic) -} -func (m *Trigger) XXX_Merge(src proto.Message) { - xxx_messageInfo_Trigger.Merge(m, src) -} -func (m *Trigger) XXX_Size() int { - return xxx_messageInfo_Trigger.Size(m) -} -func (m *Trigger) XXX_DiscardUnknown() { - xxx_messageInfo_Trigger.DiscardUnknown(m) -} - -var xxx_messageInfo_Trigger proto.InternalMessageInfo - -func (m *Trigger) GetTriggerId() *Identifier { - if m != nil { - return m.TriggerId - } - return nil -} - -func (m *Trigger) GetTriggers() []*ArtifactID { - if m != nil { - return m.Triggers - } - return nil -} - func init() { proto.RegisterType((*ArtifactKey)(nil), "flyteidl.core.ArtifactKey") proto.RegisterType((*ArtifactBindingData)(nil), "flyteidl.core.ArtifactBindingData") @@ -613,53 +639,57 @@ func init() { proto.RegisterType((*LabelValue)(nil), "flyteidl.core.LabelValue") proto.RegisterType((*Partitions)(nil), "flyteidl.core.Partitions") proto.RegisterMapType((map[string]*LabelValue)(nil), "flyteidl.core.Partitions.ValueEntry") + proto.RegisterType((*TimePartition)(nil), "flyteidl.core.TimePartition") proto.RegisterType((*ArtifactID)(nil), "flyteidl.core.ArtifactID") proto.RegisterType((*ArtifactTag)(nil), "flyteidl.core.ArtifactTag") proto.RegisterType((*ArtifactQuery)(nil), "flyteidl.core.ArtifactQuery") - proto.RegisterType((*Trigger)(nil), "flyteidl.core.Trigger") } func init() { proto.RegisterFile("flyteidl/core/artifact_id.proto", fileDescriptor_1079b0707e23f978) } var fileDescriptor_1079b0707e23f978 = []byte{ - // 612 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0xc1, 0x6e, 0xd3, 0x4c, - 0x10, 0x8e, 0x9b, 0xb6, 0xf9, 0x3b, 0x4e, 0xa4, 0xfe, 0x0b, 0x42, 0x4e, 0x84, 0x68, 0xe5, 0xf6, - 0xd0, 0x0b, 0xb6, 0x14, 0x84, 0x54, 0x85, 0x02, 0x22, 0x0a, 0x28, 0x56, 0x39, 0x50, 0x37, 0xe2, - 0xc0, 0x25, 0xda, 0xc4, 0x1b, 0xb3, 0x34, 0x59, 0x47, 0xeb, 0x4d, 0x84, 0x91, 0x78, 0x14, 0xde, - 0x81, 0x97, 0xe1, 0x3d, 0x78, 0x04, 0xb4, 0xeb, 0x5d, 0x3b, 0x89, 0xf0, 0xa1, 0xe2, 0xb6, 0x33, - 0xf9, 0xe6, 0xf3, 0x37, 0x5f, 0x66, 0x06, 0x4e, 0x66, 0xf3, 0x4c, 0x10, 0x1a, 0xcd, 0xfd, 0x69, - 0xc2, 0x89, 0x8f, 0xb9, 0xa0, 0x33, 0x3c, 0x15, 0x63, 0x1a, 0x79, 0x4b, 0x9e, 0x88, 0x04, 0xb5, - 0x0c, 0xc0, 0x93, 0x80, 0xce, 0x93, 0x6d, 0x3c, 0x8d, 0x08, 0x13, 0x74, 0x46, 0x09, 0xcf, 0xe1, - 0xee, 0x2d, 0xd8, 0x6f, 0x34, 0xc7, 0x35, 0xc9, 0x90, 0x03, 0x8d, 0x25, 0x4f, 0xbe, 0x90, 0xa9, - 0x70, 0xac, 0x53, 0xeb, 0xe2, 0x28, 0x34, 0x21, 0x7a, 0x04, 0x87, 0x51, 0xb2, 0xc0, 0x94, 0x39, - 0x7b, 0xea, 0x07, 0x1d, 0x21, 0x04, 0xfb, 0x0c, 0x2f, 0x88, 0x53, 0x57, 0x59, 0xf5, 0x76, 0x19, - 0x3c, 0x30, 0xa4, 0x7d, 0xca, 0x22, 0xca, 0xe2, 0x01, 0x16, 0x18, 0x3d, 0x84, 0x03, 0xca, 0x22, - 0xf2, 0x55, 0x51, 0xb7, 0xc2, 0x3c, 0x40, 0x67, 0xd0, 0x5a, 0xca, 0x36, 0x04, 0x4d, 0xd8, 0xf8, - 0x8e, 0x64, 0x9a, 0xbf, 0x59, 0x24, 0xa5, 0xae, 0xc7, 0x70, 0x24, 0x38, 0x66, 0xe9, 0x2c, 0xe1, - 0x0b, 0xfd, 0xa9, 0x32, 0xe1, 0x9e, 0xc3, 0x71, 0xc0, 0x96, 0xab, 0xad, 0x8f, 0x1d, 0x43, 0x7d, - 0x8d, 0xb9, 0xee, 0x42, 0x3e, 0xdd, 0x5f, 0x16, 0xc0, 0x7b, 0x3c, 0x21, 0xf3, 0x8f, 0x78, 0xbe, - 0x22, 0xe8, 0x0c, 0x9a, 0xa9, 0xc0, 0x82, 0x4e, 0xc7, 0x6b, 0x19, 0xe7, 0xc8, 0x61, 0x2d, 0xb4, - 0xf3, 0x6c, 0x0e, 0xba, 0x81, 0xff, 0x05, 0xa7, 0x71, 0x4c, 0x38, 0x89, 0xc6, 0x93, 0x9c, 0x5e, - 0x09, 0xb4, 0xbb, 0xae, 0xb7, 0xe5, 0xb4, 0xf7, 0x97, 0x8e, 0x87, 0xb5, 0xf0, 0xb8, 0x28, 0xd7, - 0x79, 0xf4, 0x0e, 0x5a, 0x54, 0x8a, 0x2d, 0xe8, 0xea, 0x8a, 0xee, 0x64, 0x87, 0x6e, 0xb7, 0xa1, - 0x61, 0x2d, 0x6c, 0xd2, 0x8d, 0x5c, 0xbf, 0x01, 0x07, 0x4a, 0xb8, 0xfb, 0xc3, 0x02, 0xf8, 0x60, - 0xcc, 0x4a, 0x51, 0x4f, 0xe7, 0x1d, 0xeb, 0xb4, 0x7e, 0x61, 0x77, 0xcf, 0x77, 0x78, 0x4b, 0xa4, - 0xa7, 0x5a, 0x7c, 0xcb, 0x04, 0xcf, 0xc2, 0xbc, 0xa4, 0x73, 0x0b, 0x50, 0x26, 0xa5, 0x85, 0xf2, - 0xff, 0xd0, 0x16, 0xde, 0x91, 0x0c, 0xf9, 0x86, 0x3b, 0xb7, 0xa0, 0xbd, 0xc3, 0x5d, 0xba, 0xab, - 0x09, 0x7b, 0x7b, 0x97, 0x96, 0xfb, 0xd3, 0x02, 0x30, 0xe6, 0x04, 0x03, 0xf4, 0x12, 0x9a, 0xc5, - 0xd4, 0x1a, 0x7a, 0xbb, 0xdb, 0xa9, 0x70, 0xf3, 0x9a, 0x64, 0xa1, 0x8d, 0xb7, 0x27, 0x74, 0x4d, - 0x78, 0x4a, 0x13, 0x33, 0x88, 0x26, 0x44, 0x2f, 0x00, 0x8a, 0x99, 0x49, 0xb5, 0xab, 0xed, 0xca, - 0xee, 0x87, 0xb5, 0x70, 0x03, 0xde, 0x6f, 0x02, 0x44, 0x74, 0x41, 0x98, 0x64, 0x4a, 0xdd, 0xef, - 0xe5, 0x56, 0x8c, 0x70, 0xfc, 0xaf, 0x92, 0xef, 0xeb, 0x9a, 0xfb, 0xdb, 0x82, 0x96, 0x61, 0xbb, - 0x59, 0x11, 0x9e, 0xa1, 0x2b, 0xb0, 0x37, 0x56, 0x5d, 0x0b, 0x68, 0x57, 0x08, 0x08, 0x06, 0xb2, - 0x39, 0x83, 0x0f, 0x22, 0xf4, 0x7a, 0x43, 0xbf, 0xc0, 0x66, 0x80, 0xab, 0xf4, 0x8f, 0x70, 0x2c, - 0xd7, 0x00, 0x6f, 0x18, 0x80, 0xa0, 0xbe, 0xe2, 0x34, 0x5f, 0xbc, 0x61, 0x2d, 0x94, 0x01, 0x7a, - 0x05, 0x0d, 0x33, 0xc1, 0xfb, 0xf7, 0x58, 0x08, 0x53, 0x24, 0x1d, 0x2f, 0xaf, 0x91, 0xfb, 0x0d, - 0x1a, 0xa3, 0x7c, 0x53, 0xd0, 0x25, 0x80, 0x5e, 0x9a, 0xea, 0x56, 0x83, 0xa2, 0x52, 0xde, 0x01, - 0x05, 0x0e, 0x22, 0xf4, 0x1c, 0xfe, 0xd3, 0x41, 0xea, 0xec, 0xa9, 0xe9, 0xaf, 0xb6, 0x28, 0x2c, - 0xa0, 0xfd, 0xab, 0x4f, 0xbd, 0x98, 0x8a, 0xcf, 0xab, 0x89, 0x37, 0x4d, 0x16, 0xbe, 0x2a, 0x48, - 0x78, 0x9c, 0x3f, 0xfc, 0xe2, 0x7e, 0xc6, 0x84, 0xf9, 0xcb, 0xc9, 0xd3, 0x38, 0xf1, 0xb7, 0x4e, - 0xea, 0xe4, 0x50, 0x1d, 0xd2, 0x67, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x29, 0x9b, 0xf7, 0x47, - 0x9a, 0x05, 0x00, 0x00, + // 683 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0xb6, 0x9b, 0xfe, 0xd0, 0x71, 0x5c, 0x85, 0x2d, 0xaa, 0xd2, 0xa8, 0xa2, 0x95, 0x29, 0x52, + 0x2f, 0xd8, 0x52, 0x11, 0x12, 0x94, 0xf2, 0x17, 0x0a, 0x72, 0x55, 0x0e, 0xd4, 0x8d, 0x38, 0x70, + 0xb1, 0xd6, 0xf1, 0xc6, 0x2c, 0x8d, 0xbd, 0xd1, 0x66, 0x53, 0xe1, 0x03, 0x2f, 0xc1, 0x9d, 0x07, + 0xe1, 0x81, 0xb8, 0xf3, 0x08, 0x68, 0x77, 0xfd, 0x13, 0x47, 0xad, 0x54, 0xc4, 0x6d, 0x67, 0x76, + 0xe6, 0xdb, 0x99, 0xef, 0xdb, 0x19, 0xd8, 0x1d, 0x8d, 0x73, 0x41, 0x68, 0x3c, 0xf6, 0x86, 0x8c, + 0x13, 0x0f, 0x73, 0x41, 0x47, 0x78, 0x28, 0x42, 0x1a, 0xbb, 0x13, 0xce, 0x04, 0x43, 0x76, 0x19, + 0xe0, 0xca, 0x80, 0xde, 0x6e, 0xc2, 0x58, 0x32, 0x26, 0x9e, 0xba, 0x8c, 0x66, 0x23, 0x4f, 0xd0, + 0x94, 0x4c, 0x05, 0x4e, 0x27, 0x3a, 0xbe, 0x77, 0xbf, 0x09, 0x48, 0x63, 0x92, 0x09, 0x3a, 0xa2, + 0x84, 0xeb, 0x7b, 0xe7, 0x02, 0xac, 0x37, 0xc5, 0x23, 0x67, 0x24, 0x47, 0x5d, 0x58, 0x9b, 0x70, + 0xf6, 0x95, 0x0c, 0x45, 0xd7, 0xdc, 0x33, 0x0f, 0xd6, 0x83, 0xd2, 0x44, 0x5b, 0xb0, 0x1a, 0xb3, + 0x14, 0xd3, 0xac, 0xbb, 0xa4, 0x2e, 0x0a, 0x0b, 0x21, 0x58, 0xce, 0x70, 0x4a, 0xba, 0x2d, 0xe5, + 0x55, 0x67, 0xe7, 0x97, 0x09, 0x9b, 0x25, 0x6a, 0x9f, 0x66, 0x31, 0xcd, 0x92, 0x13, 0x2c, 0x30, + 0xba, 0x07, 0x2b, 0x34, 0x8b, 0xc9, 0x37, 0x85, 0x6d, 0x07, 0xda, 0x40, 0x0f, 0xc1, 0x9e, 0xc8, + 0x46, 0x05, 0x65, 0x59, 0x78, 0x49, 0x72, 0xfd, 0x80, 0x6f, 0x04, 0xed, 0xca, 0x2d, 0x4b, 0x7b, + 0x02, 0x5b, 0x11, 0xcd, 0xe2, 0x50, 0xb0, 0x50, 0x36, 0x19, 0x56, 0x97, 0xea, 0xe9, 0x3b, 0xbe, + 0x11, 0x6c, 0xca, 0xfb, 0x01, 0x1b, 0xd0, 0x94, 0x7c, 0x2c, 0x2f, 0xd1, 0x0e, 0xac, 0x0b, 0x8e, + 0xb3, 0xe9, 0x88, 0xf1, 0xb4, 0xbb, 0xac, 0x8a, 0xac, 0x1d, 0xfd, 0x0e, 0x6c, 0xd4, 0x6f, 0xc7, + 0x58, 0x60, 0x67, 0x1f, 0x3a, 0xa7, 0xd9, 0x64, 0xd6, 0xa8, 0xbb, 0x03, 0xad, 0x2b, 0xcc, 0x0b, + 0x46, 0xe4, 0xd1, 0xf9, 0xb1, 0x04, 0xf0, 0x01, 0x47, 0x64, 0xfc, 0x09, 0x8f, 0x67, 0x04, 0x3d, + 0x80, 0xf6, 0x54, 0x60, 0x41, 0x87, 0xe1, 0x95, 0xb4, 0x75, 0xa4, 0x6f, 0x04, 0x96, 0xf6, 0xea, + 0xa0, 0xe7, 0x00, 0xaa, 0x70, 0x1d, 0x22, 0x9b, 0xb4, 0x0e, 0x7b, 0xae, 0x16, 0xd0, 0x2d, 0x05, + 0x74, 0x07, 0xa5, 0x80, 0xbe, 0x11, 0xac, 0xcb, 0x78, 0x9d, 0x7c, 0x0e, 0x77, 0x05, 0xa7, 0x49, + 0x42, 0x38, 0x89, 0xc3, 0x48, 0xd7, 0xa6, 0x1a, 0xb7, 0x0e, 0x1d, 0xb7, 0xf1, 0x27, 0xdc, 0x6b, + 0x98, 0xf7, 0x8d, 0xa0, 0x53, 0xa5, 0x17, 0x7e, 0xf4, 0x1e, 0x6c, 0x2a, 0x3b, 0xad, 0xe0, 0x96, + 0x15, 0xdc, 0xee, 0x02, 0xdc, 0x22, 0x1b, 0x52, 0x18, 0x3a, 0xe7, 0xeb, 0xaf, 0xc1, 0x8a, 0x6a, + 0xc9, 0xf9, 0x69, 0x02, 0x54, 0xc4, 0x4f, 0xd1, 0x51, 0xe1, 0xef, 0x9a, 0x7b, 0xad, 0x03, 0xeb, + 0x70, 0x7f, 0x01, 0xb7, 0x8e, 0x74, 0x55, 0x8b, 0xef, 0x32, 0xc1, 0xf3, 0x40, 0xa7, 0xf4, 0x2e, + 0x00, 0x6a, 0xa7, 0xe4, 0x5f, 0xfe, 0x8b, 0x82, 0xff, 0x4b, 0x92, 0x23, 0xaf, 0xc4, 0xd6, 0x34, + 0x6e, 0x2f, 0x60, 0xd7, 0xd2, 0x14, 0x80, 0x47, 0x4b, 0x4f, 0x4d, 0xe7, 0x35, 0xd8, 0xcd, 0xbf, + 0xe1, 0xd5, 0x15, 0xde, 0x0a, 0xc5, 0xf9, 0x6d, 0x02, 0x94, 0xf4, 0x9e, 0x9e, 0xa0, 0x17, 0xd0, + 0xae, 0x26, 0xb4, 0x2c, 0x50, 0x6a, 0x7a, 0xbd, 0x1e, 0x67, 0x24, 0x0f, 0x2c, 0xdc, 0x1c, 0xb6, + 0x2b, 0xc2, 0xa7, 0xf2, 0x0b, 0xeb, 0x99, 0x2a, 0x4d, 0xf4, 0x0c, 0xa0, 0xfa, 0x96, 0xd3, 0x42, + 0xe6, 0xed, 0x1b, 0xf9, 0x0b, 0xe6, 0x82, 0xd1, 0x5b, 0xd8, 0x58, 0x18, 0x0f, 0x2d, 0xeb, 0xce, + 0x42, 0x7a, 0x83, 0x89, 0xc0, 0x16, 0xf3, 0xa6, 0xf3, 0xbd, 0xde, 0x0a, 0x03, 0x9c, 0xfc, 0x6f, + 0x9f, 0xff, 0x2a, 0x96, 0xf3, 0xc7, 0x04, 0xbb, 0x44, 0x3b, 0x9f, 0x11, 0x9e, 0xa3, 0x63, 0xb0, + 0xe6, 0x76, 0xe1, 0x0d, 0x7a, 0xd5, 0xca, 0xf8, 0x46, 0x00, 0x65, 0xfc, 0x69, 0x8c, 0x5e, 0xcd, + 0xd5, 0x2f, 0x70, 0x52, 0xcd, 0xde, 0xf5, 0xe9, 0x03, 0x9c, 0xc8, 0xd1, 0xc5, 0x73, 0x04, 0x20, + 0x68, 0xcd, 0x38, 0xd5, 0x3b, 0xce, 0x37, 0x02, 0x69, 0xa0, 0x97, 0xb0, 0xd6, 0x1c, 0x9c, 0xdb, + 0xcd, 0x61, 0x99, 0xd4, 0x6f, 0x03, 0xd4, 0xdb, 0xb8, 0x7f, 0xfc, 0xf9, 0x28, 0xa1, 0xe2, 0xcb, + 0x2c, 0x72, 0x87, 0x2c, 0xf5, 0x14, 0x10, 0xe3, 0x89, 0x3e, 0x78, 0xd5, 0x0e, 0x4f, 0x48, 0xe6, + 0x4d, 0xa2, 0x47, 0x09, 0xf3, 0x1a, 0x6b, 0x3d, 0x5a, 0x55, 0xeb, 0xe3, 0xf1, 0xdf, 0x00, 0x00, + 0x00, 0xff, 0xff, 0xdf, 0x09, 0x3c, 0xc7, 0x3f, 0x06, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/core/catalog.pb.go b/flyteidl/gen/pb-go/flyteidl/core/catalog.pb.go index 47675cbb7a..66f06900fa 100644 --- a/flyteidl/gen/pb-go/flyteidl/core/catalog.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/core/catalog.pb.go @@ -38,6 +38,8 @@ const ( CatalogCacheStatus_CACHE_PUT_FAILURE CatalogCacheStatus = 5 // Used to indicate the cache lookup was skipped CatalogCacheStatus_CACHE_SKIPPED CatalogCacheStatus = 6 + // Used to indicate that the cache was evicted + CatalogCacheStatus_CACHE_EVICTED CatalogCacheStatus = 7 ) var CatalogCacheStatus_name = map[int32]string{ @@ -48,6 +50,7 @@ var CatalogCacheStatus_name = map[int32]string{ 4: "CACHE_LOOKUP_FAILURE", 5: "CACHE_PUT_FAILURE", 6: "CACHE_SKIPPED", + 7: "CACHE_EVICTED", } var CatalogCacheStatus_value = map[string]int32{ @@ -58,6 +61,7 @@ var CatalogCacheStatus_value = map[string]int32{ "CACHE_LOOKUP_FAILURE": 4, "CACHE_PUT_FAILURE": 5, "CACHE_SKIPPED": 6, + "CACHE_EVICTED": 7, } func (x CatalogCacheStatus) String() string { @@ -285,35 +289,35 @@ func init() { func init() { proto.RegisterFile("flyteidl/core/catalog.proto", fileDescriptor_534f5d1443565574) } var fileDescriptor_534f5d1443565574 = []byte{ - // 468 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x52, 0x4f, 0x6f, 0xd3, 0x4e, - 0x10, 0xad, 0xd3, 0xfe, 0x22, 0x65, 0xf2, 0x4b, 0xeb, 0x6e, 0x5a, 0x30, 0x20, 0xf1, 0x27, 0x07, - 0x84, 0x90, 0xb0, 0xa5, 0x72, 0x41, 0x88, 0x8b, 0x63, 0x2f, 0xca, 0xaa, 0x2e, 0x31, 0xbb, 0x36, - 0x42, 0x08, 0x29, 0xda, 0xd8, 0x1b, 0xd7, 0x6a, 0x9a, 0xad, 0xec, 0x0d, 0x82, 0x33, 0x07, 0xbe, - 0x05, 0xe2, 0xa3, 0xa2, 0x78, 0x1d, 0xe7, 0x0f, 0x27, 0xcf, 0xbc, 0xf7, 0xe6, 0xcd, 0xb3, 0x3d, - 0xf0, 0x68, 0x36, 0xff, 0xa1, 0x44, 0x9e, 0xce, 0x9d, 0x44, 0x16, 0xc2, 0x49, 0xb8, 0xe2, 0x73, - 0x99, 0xd9, 0x77, 0x85, 0x54, 0x12, 0xf5, 0xd6, 0xa4, 0xbd, 0x22, 0x1f, 0x3e, 0xde, 0xd5, 0xe6, - 0xa9, 0x58, 0xa8, 0x7c, 0x96, 0x8b, 0x42, 0xcb, 0x07, 0x04, 0x90, 0xa7, 0xe7, 0xdd, 0x42, 0xe5, - 0x33, 0x9e, 0xa8, 0x88, 0x67, 0xe8, 0x09, 0x74, 0x79, 0xdd, 0x4e, 0xf2, 0xd4, 0x32, 0x9e, 0x1a, - 0x2f, 0x3a, 0x14, 0xd6, 0x10, 0x49, 0x11, 0x82, 0xa3, 0x05, 0xbf, 0x15, 0x56, 0xab, 0x62, 0xaa, - 0x7a, 0xf0, 0xb3, 0x05, 0x27, 0xb5, 0xd7, 0x95, 0x50, 0x3c, 0xe5, 0x8a, 0xa3, 0x37, 0x00, 0xab, - 0x67, 0x29, 0x1a, 0x9f, 0xee, 0xc5, 0x03, 0x7b, 0x27, 0xa2, 0x4d, 0x9a, 0x4c, 0xb4, 0x53, 0x8b, - 0x49, 0x8a, 0x7c, 0xf8, 0xbf, 0x89, 0xa0, 0x78, 0x56, 0x6d, 0xea, 0x5e, 0x3c, 0xdb, 0x9b, 0xfd, - 0x37, 0x3b, 0x6d, 0x92, 0xaf, 0x5e, 0xe4, 0x2b, 0x9c, 0x97, 0x72, 0x59, 0x24, 0x62, 0xa2, 0x78, - 0x79, 0x33, 0x11, 0xdf, 0x45, 0xb2, 0x54, 0xb9, 0x5c, 0x58, 0x87, 0x95, 0xdd, 0xf3, 0x3d, 0xbb, - 0x88, 0x97, 0x37, 0x78, 0xad, 0xd9, 0xe4, 0x1a, 0x1d, 0xd0, 0xbe, 0xb6, 0xd9, 0x11, 0x0c, 0x11, - 0x98, 0xb5, 0x7b, 0x63, 0x3c, 0xf8, 0x6d, 0x34, 0x5f, 0x94, 0x8a, 0x52, 0x14, 0xdf, 0x78, 0x05, - 0xff, 0x32, 0xa0, 0xcd, 0x14, 0x57, 0xcb, 0x12, 0x59, 0x70, 0x46, 0x31, 0xc3, 0xf4, 0x93, 0x1b, - 0x91, 0xf1, 0x87, 0x89, 0x4f, 0x98, 0x3b, 0x0c, 0xb0, 0x6f, 0x1e, 0xec, 0x33, 0xae, 0xf7, 0x31, - 0x26, 0x14, 0xfb, 0xa6, 0x81, 0xee, 0x01, 0xda, 0x66, 0xf0, 0x67, 0xc2, 0x22, 0x66, 0xb6, 0xf6, - 0x27, 0x28, 0x0e, 0xb0, 0xcb, 0xb0, 0x6f, 0x1e, 0xa2, 0xfb, 0xd0, 0xdf, 0x66, 0xde, 0xbb, 0x24, - 0x88, 0x29, 0x36, 0x8f, 0x5e, 0xfe, 0xd9, 0x04, 0xf4, 0x78, 0x72, 0x2d, 0xea, 0x54, 0x08, 0x8e, - 0x3d, 0xd7, 0x1b, 0xe1, 0xed, 0x3c, 0xc7, 0x00, 0x1a, 0xbb, 0x22, 0x8c, 0x99, 0x06, 0xea, 0x41, - 0x47, 0xf7, 0x23, 0x12, 0x99, 0x2d, 0xd4, 0x87, 0x13, 0xdd, 0x86, 0xe3, 0x30, 0x0e, 0xdc, 0xa8, - 0xda, 0x6b, 0xc1, 0x99, 0x06, 0x83, 0xf1, 0xf8, 0x32, 0x0e, 0x37, 0x8b, 0xd1, 0x39, 0x9c, 0xd6, - 0xf2, 0x38, 0x6a, 0xe0, 0xff, 0xd0, 0x29, 0xf4, 0x34, 0xcc, 0x2e, 0x49, 0x18, 0x62, 0xdf, 0x6c, - 0x0f, 0xdf, 0x7d, 0x79, 0x9b, 0xe5, 0xea, 0x7a, 0x39, 0xb5, 0x13, 0x79, 0xeb, 0x54, 0xbf, 0x48, - 0x16, 0x99, 0x2e, 0x9c, 0xe6, 0xa0, 0x33, 0xb1, 0x70, 0xee, 0xa6, 0xaf, 0x32, 0xe9, 0xec, 0xdc, - 0xf8, 0xb4, 0x5d, 0x5d, 0xf6, 0xeb, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7c, 0xed, 0xa2, 0x14, - 0x27, 0x03, 0x00, 0x00, + // 475 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x52, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0xad, 0xd3, 0x12, 0x94, 0x09, 0x69, 0xdd, 0x4d, 0x0b, 0x01, 0x24, 0x3e, 0x72, 0x40, 0x08, + 0x09, 0x5b, 0x2a, 0x17, 0x84, 0xb8, 0x38, 0xf6, 0xa2, 0xac, 0xea, 0x12, 0xb3, 0x6b, 0x57, 0x08, + 0x21, 0x45, 0x1b, 0x7b, 0xe3, 0x5a, 0x4d, 0xb3, 0x95, 0xbd, 0x41, 0x70, 0xe6, 0xc0, 0xbf, 0xe0, + 0x4f, 0xf0, 0x07, 0x51, 0xbc, 0x8e, 0xf3, 0xd1, 0x93, 0x67, 0xde, 0x7b, 0xf3, 0xe6, 0xd9, 0x1e, + 0x78, 0x3a, 0x9d, 0xfd, 0x52, 0x22, 0x4b, 0x66, 0x76, 0x2c, 0x73, 0x61, 0xc7, 0x5c, 0xf1, 0x99, + 0x4c, 0xad, 0xdb, 0x5c, 0x2a, 0x89, 0x3a, 0x2b, 0xd2, 0x5a, 0x92, 0x4f, 0x9e, 0x6d, 0x6b, 0xb3, + 0x44, 0xcc, 0x55, 0x36, 0xcd, 0x44, 0xae, 0xe5, 0x7d, 0x02, 0xc8, 0xd5, 0xf3, 0x4e, 0xae, 0xb2, + 0x29, 0x8f, 0x55, 0xc8, 0x53, 0xf4, 0x1c, 0xda, 0xbc, 0x6a, 0xc7, 0x59, 0xd2, 0x33, 0x5e, 0x18, + 0xaf, 0x5b, 0x14, 0x56, 0x10, 0x49, 0x10, 0x82, 0x83, 0x39, 0xbf, 0x11, 0xbd, 0x46, 0xc9, 0x94, + 0x75, 0xff, 0x77, 0x03, 0x8e, 0x2a, 0xaf, 0x0b, 0xa1, 0x78, 0xc2, 0x15, 0x47, 0xef, 0x01, 0x96, + 0xcf, 0x42, 0xd4, 0x3e, 0xed, 0xb3, 0xc7, 0xd6, 0x56, 0x44, 0x8b, 0xd4, 0x99, 0x68, 0xab, 0x12, + 0x93, 0x04, 0x79, 0xf0, 0xa0, 0x8e, 0xa0, 0x78, 0x5a, 0x6e, 0x6a, 0x9f, 0xbd, 0xdc, 0x99, 0xbd, + 0x9b, 0x9d, 0xd6, 0xc9, 0x97, 0x2f, 0xf2, 0x1d, 0x4e, 0x0b, 0xb9, 0xc8, 0x63, 0x31, 0x56, 0xbc, + 0xb8, 0x1e, 0x8b, 0x9f, 0x22, 0x5e, 0xa8, 0x4c, 0xce, 0x7b, 0xfb, 0xa5, 0xdd, 0xab, 0x1d, 0xbb, + 0x90, 0x17, 0xd7, 0x78, 0xa5, 0x59, 0xe7, 0x1a, 0xee, 0xd1, 0xae, 0xb6, 0xd9, 0x12, 0x0c, 0x10, + 0x98, 0x95, 0x7b, 0x6d, 0xdc, 0xff, 0x6b, 0xd4, 0x5f, 0x94, 0x8a, 0x42, 0xe4, 0x3f, 0x78, 0x09, + 0xff, 0x31, 0xa0, 0xc9, 0x14, 0x57, 0x8b, 0x02, 0xf5, 0xe0, 0x84, 0x62, 0x86, 0xe9, 0xa5, 0x13, + 0x92, 0xd1, 0xe7, 0xb1, 0x47, 0x98, 0x33, 0xf0, 0xb1, 0x67, 0xee, 0xed, 0x32, 0x8e, 0xfb, 0x25, + 0x22, 0x14, 0x7b, 0xa6, 0x81, 0x1e, 0x02, 0xda, 0x64, 0xf0, 0x57, 0xc2, 0x42, 0x66, 0x36, 0x76, + 0x27, 0x28, 0xf6, 0xb1, 0xc3, 0xb0, 0x67, 0xee, 0xa3, 0x47, 0xd0, 0xdd, 0x64, 0x3e, 0x39, 0xc4, + 0x8f, 0x28, 0x36, 0x0f, 0xde, 0xfc, 0x5b, 0x07, 0x74, 0x79, 0x7c, 0x25, 0xaa, 0x54, 0x08, 0x0e, + 0x5d, 0xc7, 0x1d, 0xe2, 0xcd, 0x3c, 0x87, 0x00, 0x1a, 0xbb, 0x20, 0x8c, 0x99, 0x06, 0xea, 0x40, + 0x4b, 0xf7, 0x43, 0x12, 0x9a, 0x0d, 0xd4, 0x85, 0x23, 0xdd, 0x06, 0xa3, 0x20, 0xf2, 0x9d, 0xb0, + 0xdc, 0xdb, 0x83, 0x13, 0x0d, 0xfa, 0xa3, 0xd1, 0x79, 0x14, 0xac, 0x17, 0xa3, 0x53, 0x38, 0xae, + 0xe4, 0x51, 0x58, 0xc3, 0xf7, 0xd0, 0x31, 0x74, 0x34, 0xcc, 0xce, 0x49, 0x10, 0x60, 0xcf, 0x6c, + 0xae, 0x21, 0x7c, 0x49, 0xdc, 0xa5, 0xed, 0xfd, 0xc1, 0xc7, 0x6f, 0x1f, 0xd2, 0x4c, 0x5d, 0x2d, + 0x26, 0x56, 0x2c, 0x6f, 0xec, 0xf2, 0xaf, 0xc9, 0x3c, 0xd5, 0x85, 0x5d, 0xdf, 0x78, 0x2a, 0xe6, + 0xf6, 0xed, 0xe4, 0x6d, 0x2a, 0xed, 0xad, 0xb3, 0x9f, 0x34, 0xcb, 0x63, 0x7f, 0xf7, 0x3f, 0x00, + 0x00, 0xff, 0xff, 0x2e, 0x3b, 0x98, 0x7c, 0x3a, 0x03, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/core/identifier.pb.go b/flyteidl/gen/pb-go/flyteidl/core/identifier.pb.go index 4ea56b88f2..bb4d3264e4 100644 --- a/flyteidl/gen/pb-go/flyteidl/core/identifier.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/core/identifier.pb.go @@ -70,7 +70,9 @@ type Identifier struct { // User provided value for the resource. Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` // Specific version of the resource. - Version string `protobuf:"bytes,5,opt,name=version,proto3" json:"version,omitempty"` + Version string `protobuf:"bytes,5,opt,name=version,proto3" json:"version,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,6,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -136,6 +138,13 @@ func (m *Identifier) GetVersion() string { return "" } +func (m *Identifier) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Encapsulation of fields that uniquely identifies a Flyte workflow execution type WorkflowExecutionIdentifier struct { // Name of the project the resource belongs to. @@ -144,7 +153,9 @@ type WorkflowExecutionIdentifier struct { // A domain can be considered as a subset within a specific project. Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` // User or system provided value for the resource. - Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,5,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -196,6 +207,13 @@ func (m *WorkflowExecutionIdentifier) GetName() string { return "" } +func (m *WorkflowExecutionIdentifier) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // Encapsulation of fields that identify a Flyte node execution entity. type NodeExecutionIdentifier struct { NodeId string `protobuf:"bytes,1,opt,name=node_id,json=nodeId,proto3" json:"node_id,omitempty"` @@ -362,35 +380,36 @@ func init() { func init() { proto.RegisterFile("flyteidl/core/identifier.proto", fileDescriptor_adfa846a86e1fa0c) } var fileDescriptor_adfa846a86e1fa0c = []byte{ - // 467 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x53, 0x41, 0x8f, 0xd2, 0x50, - 0x10, 0xb6, 0x2c, 0x02, 0x3b, 0x05, 0xb7, 0xbe, 0x83, 0xd4, 0x90, 0x98, 0x4d, 0x4d, 0xcc, 0x66, - 0x13, 0xdb, 0x04, 0x6f, 0xc6, 0x83, 0x75, 0x97, 0x8d, 0xcd, 0x22, 0xbb, 0x29, 0x25, 0x24, 0x5e, - 0x48, 0x69, 0x87, 0xfa, 0x04, 0xfa, 0x9a, 0xd7, 0x87, 0xda, 0x8b, 0x89, 0x7f, 0xc8, 0x9f, 0xe1, - 0xef, 0x32, 0xaf, 0x94, 0xdd, 0x42, 0x40, 0x2f, 0x7b, 0xeb, 0xcc, 0xf7, 0xbd, 0xf9, 0xbe, 0x99, - 0xce, 0xc0, 0x8b, 0xd9, 0x22, 0x13, 0x48, 0xc3, 0x85, 0x15, 0x30, 0x8e, 0x16, 0x0d, 0x31, 0x16, - 0x74, 0x46, 0x91, 0x9b, 0x09, 0x67, 0x82, 0x91, 0xd6, 0x06, 0x37, 0x25, 0x6e, 0xfc, 0x56, 0x00, - 0x9c, 0x3b, 0x0e, 0x79, 0x0f, 0x2d, 0x8e, 0x29, 0x5b, 0xf1, 0x00, 0x27, 0x22, 0x4b, 0x50, 0x57, - 0x4e, 0x95, 0xb3, 0x27, 0xdd, 0x8e, 0xb9, 0xf5, 0xca, 0x74, 0x0b, 0x8e, 0x97, 0x25, 0xe8, 0x36, - 0x79, 0x29, 0x22, 0x3a, 0xd4, 0x13, 0xce, 0xbe, 0x62, 0x20, 0xf4, 0xca, 0xa9, 0x72, 0x76, 0xec, - 0x6e, 0x42, 0xf2, 0x0c, 0x6a, 0x21, 0x5b, 0xfa, 0x34, 0xd6, 0x8f, 0x72, 0xa0, 0x88, 0x08, 0x81, - 0x6a, 0xec, 0x2f, 0x51, 0xaf, 0xe6, 0xd9, 0xfc, 0x5b, 0x56, 0xf9, 0x86, 0x3c, 0xa5, 0x2c, 0xd6, - 0x1f, 0xaf, 0xab, 0x14, 0xa1, 0x11, 0x40, 0x67, 0xcc, 0xf8, 0x7c, 0xb6, 0x60, 0xdf, 0x7b, 0x3f, - 0x30, 0x58, 0x09, 0xca, 0xe2, 0x52, 0x03, 0x25, 0x79, 0xe5, 0x90, 0x7c, 0xe5, 0x7f, 0xf2, 0xc6, - 0x2f, 0x05, 0xda, 0x03, 0x16, 0xe2, 0x3e, 0x85, 0x36, 0xd4, 0x63, 0x16, 0xe2, 0x84, 0x86, 0x85, - 0x42, 0x4d, 0x86, 0x4e, 0x48, 0x3e, 0x41, 0x13, 0x37, 0x7c, 0x89, 0x4a, 0x19, 0xb5, 0x7b, 0xbe, - 0x33, 0xba, 0x7f, 0x98, 0x77, 0x55, 0xbc, 0x4f, 0x1a, 0x7f, 0x14, 0x68, 0x7b, 0x7e, 0x3a, 0xdf, - 0xe7, 0xa1, 0x0b, 0x75, 0xe1, 0xa7, 0xf3, 0x8d, 0x07, 0xb5, 0xfb, 0x7c, 0x47, 0xa5, 0x54, 0xb4, - 0x26, 0x99, 0x4e, 0x48, 0x5c, 0x78, 0x9a, 0xfb, 0xde, 0xe3, 0xf1, 0xd5, 0xce, 0xeb, 0x03, 0xad, - 0xbb, 0x27, 0xf1, 0x36, 0x40, 0x5e, 0xca, 0x75, 0x11, 0x3c, 0x9b, 0xf8, 0x42, 0xe0, 0x32, 0x11, - 0xf9, 0x9f, 0x6d, 0xc9, 0x8d, 0x10, 0x3c, 0xb3, 0xd7, 0x39, 0xe3, 0x27, 0x68, 0x43, 0x1a, 0xc5, - 0xfe, 0xa2, 0xd4, 0x40, 0x07, 0x8e, 0xd3, 0x3c, 0x77, 0x3f, 0xc6, 0x46, 0x5a, 0x90, 0x1e, 0x78, - 0x90, 0xe7, 0x23, 0x68, 0x96, 0xf7, 0x95, 0x9c, 0x80, 0x3a, 0x1a, 0x0c, 0x6f, 0x7b, 0x17, 0xce, - 0x95, 0xd3, 0xbb, 0xd4, 0x1e, 0x91, 0x06, 0x54, 0x3d, 0x7b, 0x78, 0xad, 0x29, 0xa4, 0x09, 0x8d, - 0xf1, 0x8d, 0x7b, 0x7d, 0xd5, 0xbf, 0x19, 0x6b, 0x15, 0x49, 0xec, 0xdb, 0xa3, 0xc1, 0xc5, 0xc7, - 0xc9, 0x6d, 0xdf, 0x1e, 0x68, 0x47, 0x44, 0x85, 0xfa, 0xa5, 0xed, 0xd9, 0xc3, 0x9e, 0xa7, 0x55, - 0x3f, 0xbc, 0xfb, 0xfc, 0x36, 0xa2, 0xe2, 0xcb, 0x6a, 0x6a, 0x06, 0x6c, 0x69, 0xe5, 0xde, 0x18, - 0x8f, 0xd6, 0x1f, 0xd6, 0xdd, 0x11, 0x46, 0x18, 0x5b, 0xc9, 0xf4, 0x75, 0xc4, 0xac, 0xad, 0xbb, - 0x9c, 0xd6, 0xf2, 0x6b, 0x7c, 0xf3, 0x37, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x34, 0xd8, 0xe0, 0xaf, - 0x03, 0x00, 0x00, + // 481 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x53, 0xcf, 0x8f, 0xd2, 0x40, + 0x14, 0xb6, 0x0b, 0x5b, 0xd8, 0x57, 0x70, 0xeb, 0x1c, 0xa4, 0x86, 0xc4, 0x6c, 0x30, 0x31, 0x9b, + 0x4d, 0x6c, 0x13, 0xbc, 0x19, 0x0f, 0xd6, 0x5d, 0x36, 0x36, 0x8b, 0xec, 0xa6, 0x94, 0x90, 0x78, + 0x21, 0xa5, 0x7d, 0xd4, 0x11, 0xe8, 0x34, 0xd3, 0x41, 0xed, 0xc5, 0xc4, 0xff, 0xcc, 0x93, 0x7f, + 0x97, 0x99, 0x52, 0x96, 0x42, 0x58, 0xbd, 0x78, 0x7b, 0x3f, 0xbe, 0x79, 0xdf, 0xf7, 0xde, 0xbc, + 0x07, 0xcf, 0x67, 0x8b, 0x4c, 0x20, 0x0d, 0x17, 0x56, 0xc0, 0x38, 0x5a, 0x34, 0xc4, 0x58, 0xd0, + 0x19, 0x45, 0x6e, 0x26, 0x9c, 0x09, 0x46, 0x9a, 0x9b, 0xbc, 0x29, 0xf3, 0x9d, 0x5f, 0x0a, 0x80, + 0x73, 0x8f, 0x21, 0xef, 0xa0, 0xc9, 0x31, 0x65, 0x2b, 0x1e, 0xe0, 0x44, 0x64, 0x09, 0x1a, 0xca, + 0x99, 0x72, 0xfe, 0xb8, 0xdb, 0x36, 0x77, 0x5e, 0x99, 0x6e, 0x81, 0xf1, 0xb2, 0x04, 0xdd, 0x06, + 0x2f, 0x79, 0xc4, 0x80, 0x5a, 0xc2, 0xd9, 0x17, 0x0c, 0x84, 0x71, 0x74, 0xa6, 0x9c, 0x9f, 0xb8, + 0x1b, 0x97, 0x3c, 0x05, 0x35, 0x64, 0x4b, 0x9f, 0xc6, 0x46, 0x25, 0x4f, 0x14, 0x1e, 0x21, 0x50, + 0x8d, 0xfd, 0x25, 0x1a, 0xd5, 0x3c, 0x9a, 0xdb, 0xb2, 0xca, 0x57, 0xe4, 0x29, 0x65, 0xb1, 0x71, + 0xbc, 0xae, 0x52, 0xb8, 0x44, 0x87, 0x0a, 0xe3, 0x91, 0xa1, 0xe6, 0x51, 0x69, 0x76, 0x56, 0xd0, + 0x1e, 0x33, 0x3e, 0x9f, 0x2d, 0xd8, 0xb7, 0xde, 0x77, 0x0c, 0x56, 0x82, 0xb2, 0xb8, 0xd4, 0x52, + 0x49, 0x90, 0xf2, 0x90, 0xa0, 0xa3, 0x7f, 0x0a, 0x2a, 0x68, 0x8f, 0xb7, 0xb4, 0x3f, 0x15, 0x68, + 0x0d, 0x58, 0x88, 0x87, 0x38, 0x5b, 0x50, 0x8b, 0x59, 0x88, 0x13, 0x1a, 0x16, 0x9c, 0xaa, 0x74, + 0x9d, 0x90, 0x7c, 0x84, 0x06, 0x6e, 0xf0, 0x32, 0x2b, 0x89, 0xb5, 0xee, 0xc5, 0xde, 0x78, 0xff, + 0xd2, 0x8e, 0xab, 0xe1, 0x36, 0xd8, 0xf9, 0xad, 0x40, 0xcb, 0xf3, 0xd3, 0xf9, 0x21, 0x0d, 0x5d, + 0xa8, 0x09, 0x3f, 0x9d, 0x6f, 0x34, 0x68, 0xdd, 0x67, 0x7b, 0x2c, 0xa5, 0xa2, 0xaa, 0x44, 0x3a, + 0x21, 0x71, 0xe1, 0x49, 0xae, 0xfb, 0x80, 0xc6, 0x97, 0x7b, 0xaf, 0x1f, 0x68, 0xdd, 0x3d, 0x8d, + 0x77, 0x13, 0xe4, 0x85, 0x5c, 0x29, 0xc1, 0xb3, 0x89, 0x2f, 0x04, 0x2e, 0x13, 0x91, 0xff, 0x7e, + 0x53, 0x6e, 0x8d, 0xe0, 0x99, 0xbd, 0x8e, 0x75, 0x7e, 0x80, 0x3e, 0xa4, 0x51, 0xec, 0x2f, 0x4a, + 0x0d, 0xb4, 0xe1, 0x24, 0xcd, 0x63, 0xdb, 0x31, 0xd6, 0xd3, 0x02, 0xf4, 0x9f, 0x07, 0x79, 0x31, + 0x82, 0x46, 0x79, 0xa7, 0xc9, 0x29, 0x68, 0xa3, 0xc1, 0xf0, 0xae, 0x77, 0xe9, 0x5c, 0x3b, 0xbd, + 0x2b, 0xfd, 0x11, 0xa9, 0x43, 0xd5, 0xb3, 0x87, 0x37, 0xba, 0x42, 0x1a, 0x50, 0x1f, 0xdf, 0xba, + 0x37, 0xd7, 0xfd, 0xdb, 0xb1, 0x7e, 0x24, 0x81, 0x7d, 0x7b, 0x34, 0xb8, 0xfc, 0x30, 0xb9, 0xeb, + 0xdb, 0x03, 0xbd, 0x42, 0x34, 0xa8, 0x5d, 0xd9, 0x9e, 0x3d, 0xec, 0x79, 0x7a, 0xf5, 0xfd, 0xdb, + 0x4f, 0x6f, 0x22, 0x2a, 0x3e, 0xaf, 0xa6, 0x66, 0xc0, 0x96, 0x56, 0xae, 0x8d, 0xf1, 0x68, 0x6d, + 0x58, 0xf7, 0x87, 0x1a, 0x61, 0x6c, 0x25, 0xd3, 0x57, 0x11, 0xb3, 0x76, 0x6e, 0x77, 0xaa, 0xe6, + 0x17, 0xfb, 0xfa, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x03, 0xea, 0x9d, 0xb8, 0xd3, 0x03, 0x00, + 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/core/metrics.pb.go b/flyteidl/gen/pb-go/flyteidl/core/metrics.pb.go index 447a918a48..e9688ace88 100644 --- a/flyteidl/gen/pb-go/flyteidl/core/metrics.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/core/metrics.pb.go @@ -6,6 +6,7 @@ package core import ( fmt "fmt" proto "github.com/golang/protobuf/proto" + _struct "github.com/golang/protobuf/ptypes/struct" timestamp "github.com/golang/protobuf/ptypes/timestamp" math "math" ) @@ -161,34 +162,92 @@ func (*Span) XXX_OneofWrappers() []interface{} { } } +// ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task. +type ExecutionMetricResult struct { + // The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG. + Metric string `protobuf:"bytes,1,opt,name=metric,proto3" json:"metric,omitempty"` + // The result data in prometheus range query result format + // https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats. + // This may include multiple time series, differentiated by their metric labels. + // Start time is greater of (execution attempt start, 48h ago) + // End time is lesser of (execution attempt end, now) + Data *_struct.Struct `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecutionMetricResult) Reset() { *m = ExecutionMetricResult{} } +func (m *ExecutionMetricResult) String() string { return proto.CompactTextString(m) } +func (*ExecutionMetricResult) ProtoMessage() {} +func (*ExecutionMetricResult) Descriptor() ([]byte, []int) { + return fileDescriptor_756935f796ae3119, []int{1} +} + +func (m *ExecutionMetricResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecutionMetricResult.Unmarshal(m, b) +} +func (m *ExecutionMetricResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecutionMetricResult.Marshal(b, m, deterministic) +} +func (m *ExecutionMetricResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecutionMetricResult.Merge(m, src) +} +func (m *ExecutionMetricResult) XXX_Size() int { + return xxx_messageInfo_ExecutionMetricResult.Size(m) +} +func (m *ExecutionMetricResult) XXX_DiscardUnknown() { + xxx_messageInfo_ExecutionMetricResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecutionMetricResult proto.InternalMessageInfo + +func (m *ExecutionMetricResult) GetMetric() string { + if m != nil { + return m.Metric + } + return "" +} + +func (m *ExecutionMetricResult) GetData() *_struct.Struct { + if m != nil { + return m.Data + } + return nil +} + func init() { proto.RegisterType((*Span)(nil), "flyteidl.core.Span") + proto.RegisterType((*ExecutionMetricResult)(nil), "flyteidl.core.ExecutionMetricResult") } func init() { proto.RegisterFile("flyteidl/core/metrics.proto", fileDescriptor_756935f796ae3119) } var fileDescriptor_756935f796ae3119 = []byte{ - // 341 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xdf, 0x4b, 0xeb, 0x30, - 0x14, 0x80, 0xf7, 0x7b, 0x77, 0xe9, 0xbd, 0x2f, 0xbd, 0x2f, 0x63, 0x17, 0xae, 0x43, 0x41, 0xa6, - 0x60, 0x02, 0x13, 0x1f, 0x14, 0x5f, 0x1c, 0x08, 0xeb, 0x83, 0x3e, 0xd4, 0x81, 0xe0, 0xcb, 0x68, - 0x9b, 0xd3, 0x1a, 0xd6, 0xe6, 0x94, 0x34, 0x63, 0xfa, 0xf7, 0xf8, 0x8f, 0x4a, 0x12, 0x3b, 0xd8, - 0x98, 0xf8, 0xd6, 0x34, 0xdf, 0xf7, 0x85, 0x90, 0x43, 0xfe, 0xa5, 0xf9, 0xbb, 0x06, 0xc1, 0x73, - 0x96, 0xa0, 0x02, 0x56, 0x80, 0x56, 0x22, 0xa9, 0x68, 0xa9, 0x50, 0xa3, 0xff, 0xa7, 0xde, 0xa4, - 0x66, 0x73, 0xf4, 0x7f, 0x97, 0x15, 0x1c, 0xa4, 0x16, 0xa9, 0x00, 0xe5, 0xf0, 0xd1, 0x51, 0x86, - 0x98, 0xe5, 0xc0, 0xec, 0x2a, 0x5e, 0xa7, 0x4c, 0x8b, 0x02, 0x2a, 0x1d, 0x15, 0xa5, 0x03, 0x8e, - 0x3f, 0xda, 0xa4, 0xf3, 0x54, 0x46, 0xd2, 0xbf, 0x26, 0xa4, 0xd2, 0x91, 0xd2, 0x4b, 0x43, 0x0c, - 0x9b, 0xe3, 0xe6, 0xc4, 0x9b, 0x8e, 0xa8, 0xd3, 0x69, 0xad, 0xd3, 0x45, 0xad, 0x87, 0x03, 0x4b, - 0x9b, 0xb5, 0x7f, 0x45, 0x7e, 0x81, 0xe4, 0x4e, 0x6c, 0xfd, 0x28, 0xf6, 0x41, 0x72, 0xab, 0x3d, - 0x10, 0x6f, 0x83, 0x6a, 0x95, 0xe6, 0xb8, 0x59, 0x0a, 0x3e, 0x6c, 0x5b, 0xf3, 0x9c, 0xee, 0x5c, - 0x90, 0x3e, 0x7f, 0x11, 0xf7, 0x6f, 0x90, 0xac, 0xb5, 0x40, 0x19, 0x6c, 0xaf, 0x38, 0x6f, 0x84, - 0xa4, 0x0e, 0x04, 0xdc, 0xbf, 0x23, 0x7d, 0x89, 0x1c, 0x4c, 0xaa, 0x63, 0x53, 0xa7, 0x7b, 0xa9, - 0x47, 0xe4, 0x70, 0x38, 0xd3, 0x33, 0xa2, 0x4b, 0xe8, 0xa8, 0x5a, 0x99, 0x44, 0xf7, 0x60, 0x62, - 0x11, 0x55, 0xab, 0x6f, 0x12, 0x46, 0x0c, 0xb8, 0x7f, 0x42, 0x7e, 0x63, 0x09, 0x2a, 0x32, 0x80, - 0xe9, 0xf4, 0xc6, 0xcd, 0xc9, 0x60, 0xde, 0x08, 0xbd, 0xed, 0xdf, 0x80, 0xfb, 0x67, 0xa4, 0x5b, - 0x95, 0x91, 0xac, 0x86, 0xfd, 0x71, 0x7b, 0xe2, 0x4d, 0xff, 0xee, 0x9d, 0x62, 0xde, 0x23, 0x74, - 0xc4, 0xac, 0x43, 0x5a, 0x82, 0xcf, 0x6e, 0x5f, 0x6e, 0x32, 0xa1, 0x5f, 0xd7, 0x31, 0x4d, 0xb0, - 0x60, 0x96, 0x46, 0x95, 0xb9, 0x0f, 0xb6, 0x1d, 0x81, 0x0c, 0x24, 0x2b, 0xe3, 0x8b, 0x0c, 0xd9, - 0xce, 0x54, 0xc4, 0x3d, 0xfb, 0x0a, 0x97, 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xff, 0xa7, 0xb8, - 0x33, 0x59, 0x02, 0x00, 0x00, + // 393 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x51, 0xcb, 0xd3, 0x30, + 0x14, 0x86, 0xbf, 0x7d, 0xeb, 0x3a, 0x97, 0xea, 0x4d, 0x44, 0x1d, 0x53, 0x74, 0x4c, 0x90, 0xa9, + 0x98, 0xc0, 0xc4, 0x0b, 0xc5, 0x1b, 0x07, 0xc2, 0x7a, 0x31, 0x2f, 0xba, 0x81, 0x20, 0xc2, 0x48, + 0x9b, 0xb4, 0x86, 0xb5, 0x49, 0x49, 0x52, 0xa6, 0xbf, 0xc7, 0x3f, 0x2a, 0x49, 0xda, 0xc2, 0xe6, + 0xc4, 0xbb, 0x9e, 0x9e, 0xf7, 0x79, 0xd2, 0xe6, 0x1c, 0xf0, 0x38, 0x2f, 0x7f, 0x19, 0xc6, 0x69, + 0x89, 0x33, 0xa9, 0x18, 0xae, 0x98, 0x51, 0x3c, 0xd3, 0xa8, 0x56, 0xd2, 0x48, 0x78, 0xaf, 0x6b, + 0x22, 0xdb, 0x9c, 0x3d, 0x3d, 0xcf, 0x72, 0xca, 0x84, 0xe1, 0x39, 0x67, 0xca, 0xc7, 0x67, 0xcf, + 0x0a, 0x29, 0x8b, 0x92, 0x61, 0x57, 0xa5, 0x4d, 0x8e, 0x0d, 0xaf, 0x98, 0x36, 0xa4, 0xaa, 0xdb, + 0xc0, 0x93, 0xcb, 0x80, 0x36, 0xaa, 0xc9, 0x8c, 0xef, 0x2e, 0x7e, 0x0f, 0x41, 0xb0, 0xab, 0x89, + 0x80, 0xef, 0x01, 0xd0, 0x86, 0x28, 0x73, 0xb0, 0xfc, 0x74, 0x30, 0x1f, 0x2c, 0xa3, 0xd5, 0x0c, + 0x79, 0x16, 0x75, 0x2c, 0xda, 0x77, 0xf2, 0x64, 0xe2, 0xd2, 0xb6, 0x86, 0xef, 0xc0, 0x1d, 0x26, + 0xa8, 0x07, 0x6f, 0xff, 0x0b, 0x8e, 0x99, 0xa0, 0x0e, 0xdb, 0x82, 0xe8, 0x24, 0xd5, 0x31, 0x2f, + 0xe5, 0xe9, 0xc0, 0xe9, 0x74, 0xe8, 0xc8, 0x57, 0xe8, 0xec, 0xf7, 0xd1, 0xd7, 0x36, 0xf1, 0xf9, + 0x27, 0xcb, 0x1a, 0xc3, 0xa5, 0x88, 0xfb, 0x0b, 0xd8, 0xdc, 0x24, 0xa0, 0x13, 0xc4, 0x14, 0x7e, + 0x02, 0x63, 0x21, 0x29, 0xb3, 0xaa, 0xc0, 0xa9, 0x5e, 0x5c, 0xa8, 0xbe, 0x48, 0xca, 0xae, 0x6b, + 0x42, 0x0b, 0x7a, 0x85, 0x21, 0xfa, 0x68, 0x15, 0xa3, 0xab, 0x8a, 0x3d, 0xd1, 0xc7, 0x7f, 0x28, + 0x2c, 0x18, 0x53, 0xf8, 0x1c, 0xdc, 0x95, 0x35, 0x53, 0xc4, 0x06, 0xac, 0x27, 0x9c, 0x0f, 0x96, + 0x93, 0xcd, 0x4d, 0x12, 0xf5, 0x6f, 0x63, 0x0a, 0x5f, 0x82, 0x91, 0xae, 0x89, 0xd0, 0xd3, 0xf1, + 0x7c, 0xb8, 0x8c, 0x56, 0xf7, 0x2f, 0x4e, 0xb1, 0xf3, 0x48, 0x7c, 0x62, 0x1d, 0x80, 0x5b, 0x4e, + 0x17, 0xdf, 0xc1, 0x83, 0xfe, 0xd8, 0xad, 0xdb, 0x96, 0x84, 0xe9, 0xa6, 0x34, 0xf0, 0x21, 0x08, + 0xfd, 0xf6, 0xb8, 0x89, 0x4d, 0x92, 0xb6, 0x82, 0xaf, 0x41, 0x40, 0x89, 0x21, 0xed, 0x38, 0x1e, + 0xfd, 0x35, 0x8e, 0x9d, 0xdb, 0x81, 0xc4, 0x85, 0xd6, 0x1f, 0xbf, 0x7d, 0x28, 0xb8, 0xf9, 0xd1, + 0xa4, 0x28, 0x93, 0x15, 0x76, 0xdf, 0x22, 0x55, 0xe1, 0x1f, 0x70, 0xbf, 0x7e, 0x05, 0x13, 0xb8, + 0x4e, 0xdf, 0x14, 0x12, 0x9f, 0x6d, 0x64, 0x1a, 0x3a, 0xe9, 0xdb, 0x3f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xd7, 0xa7, 0xf1, 0xb0, 0xd5, 0x02, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/datacatalog/datacatalog.pb.go b/flyteidl/gen/pb-go/flyteidl/datacatalog/datacatalog.pb.go index 1095091b5c..d1d623e792 100644 --- a/flyteidl/gen/pb-go/flyteidl/datacatalog/datacatalog.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/datacatalog/datacatalog.pb.go @@ -1319,11 +1319,13 @@ func (m *Partition) GetValue() string { // // DatasetID message that is composed of several string fields. type DatasetID struct { - Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` - Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` - UUID string `protobuf:"bytes,5,opt,name=UUID,proto3" json:"UUID,omitempty"` + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` + Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` + UUID string `protobuf:"bytes,5,opt,name=UUID,proto3" json:"UUID,omitempty"` + // Optional, org key applied to the resource. + Org string `protobuf:"bytes,6,opt,name=org,proto3" json:"org,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -1389,6 +1391,13 @@ func (m *DatasetID) GetUUID() string { return "" } +func (m *DatasetID) GetOrg() string { + if m != nil { + return m.Org + } + return "" +} + // // Artifact message. It is composed of several string fields. type Artifact struct { @@ -2039,6 +2048,7 @@ type DatasetPropertyFilter struct { // *DatasetPropertyFilter_Name // *DatasetPropertyFilter_Domain // *DatasetPropertyFilter_Version + // *DatasetPropertyFilter_Org Property isDatasetPropertyFilter_Property `protobuf_oneof:"property"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -2090,6 +2100,10 @@ type DatasetPropertyFilter_Version struct { Version string `protobuf:"bytes,4,opt,name=version,proto3,oneof"` } +type DatasetPropertyFilter_Org struct { + Org string `protobuf:"bytes,5,opt,name=org,proto3,oneof"` +} + func (*DatasetPropertyFilter_Project) isDatasetPropertyFilter_Property() {} func (*DatasetPropertyFilter_Name) isDatasetPropertyFilter_Property() {} @@ -2098,6 +2112,8 @@ func (*DatasetPropertyFilter_Domain) isDatasetPropertyFilter_Property() {} func (*DatasetPropertyFilter_Version) isDatasetPropertyFilter_Property() {} +func (*DatasetPropertyFilter_Org) isDatasetPropertyFilter_Property() {} + func (m *DatasetPropertyFilter) GetProperty() isDatasetPropertyFilter_Property { if m != nil { return m.Property @@ -2133,6 +2149,13 @@ func (m *DatasetPropertyFilter) GetVersion() string { return "" } +func (m *DatasetPropertyFilter) GetOrg() string { + if x, ok := m.GetProperty().(*DatasetPropertyFilter_Org); ok { + return x.Org + } + return "" +} + // XXX_OneofWrappers is for the internal use of the proto package. func (*DatasetPropertyFilter) XXX_OneofWrappers() []interface{} { return []interface{}{ @@ -2140,6 +2163,7 @@ func (*DatasetPropertyFilter) XXX_OneofWrappers() []interface{} { (*DatasetPropertyFilter_Name)(nil), (*DatasetPropertyFilter_Domain)(nil), (*DatasetPropertyFilter_Version)(nil), + (*DatasetPropertyFilter_Org)(nil), } } @@ -2260,112 +2284,113 @@ func init() { } var fileDescriptor_275951237ff4368a = []byte{ - // 1675 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x58, 0x4b, 0x6f, 0xdb, 0xc6, - 0x13, 0x37, 0x25, 0x47, 0x32, 0x47, 0x96, 0x22, 0x6f, 0x6c, 0x47, 0x56, 0x12, 0x5b, 0x61, 0x02, - 0xff, 0x8d, 0xfc, 0x1b, 0x29, 0xb5, 0x93, 0xa0, 0x49, 0x8b, 0xb6, 0xb2, 0xa5, 0xd8, 0xaa, 0xe3, - 0x47, 0xe8, 0x07, 0xd0, 0x07, 0x20, 0xac, 0xcd, 0x35, 0xc3, 0x9a, 0x12, 0x19, 0x72, 0x9d, 0x5a, - 0xa7, 0xa2, 0x97, 0x1e, 0xda, 0xde, 0x0a, 0xf4, 0x0b, 0xf4, 0x83, 0xf4, 0x98, 0x5b, 0xbf, 0x50, - 0x2f, 0xc5, 0x92, 0xbb, 0x14, 0x49, 0x51, 0xb6, 0xe2, 0x43, 0x80, 0x5e, 0x08, 0xee, 0xee, 0xcc, - 0x6f, 0xe7, 0xb1, 0xb3, 0x33, 0xb3, 0xb0, 0x78, 0x62, 0xf6, 0x28, 0x31, 0x34, 0xb3, 0xa6, 0x61, - 0x8a, 0x8f, 0x31, 0xc5, 0xa6, 0xa5, 0x87, 0xff, 0xab, 0xb6, 0x63, 0x51, 0x0b, 0xe5, 0x42, 0x53, - 0xe5, 0xdb, 0x01, 0xd3, 0xb1, 0xe5, 0x90, 0x9a, 0x69, 0x50, 0xe2, 0x60, 0xd3, 0xf5, 0x49, 0xcb, - 0xf3, 0xba, 0x65, 0xe9, 0x26, 0xa9, 0x79, 0xa3, 0xa3, 0xb3, 0x93, 0x9a, 0x76, 0xe6, 0x60, 0x6a, - 0x58, 0x5d, 0xbe, 0xbe, 0x10, 0x5f, 0xa7, 0x46, 0x87, 0xb8, 0x14, 0x77, 0x6c, 0x9f, 0x40, 0x79, - 0x01, 0xd3, 0x6b, 0x0e, 0xc1, 0x94, 0x34, 0x30, 0xc5, 0x2e, 0xa1, 0x2a, 0x79, 0x73, 0x46, 0x5c, - 0x8a, 0xaa, 0x90, 0xd5, 0xfc, 0x99, 0x92, 0x54, 0x91, 0x96, 0x72, 0xcb, 0xd3, 0xd5, 0xb0, 0xa0, - 0x82, 0x5a, 0x10, 0x29, 0x37, 0x61, 0x26, 0x86, 0xe3, 0xda, 0x56, 0xd7, 0x25, 0x4a, 0x13, 0xa6, - 0xd6, 0x09, 0x8d, 0xa1, 0x3f, 0x8a, 0xa3, 0xcf, 0x26, 0xa1, 0xb7, 0x1a, 0x7d, 0xfc, 0x06, 0xa0, - 0x30, 0x8c, 0x0f, 0xfe, 0xde, 0x52, 0xfe, 0x21, 0x79, 0x30, 0x75, 0x87, 0x1a, 0x27, 0xf8, 0xf8, - 0xea, 0xe2, 0xa0, 0xbb, 0x90, 0xc3, 0x1c, 0xa4, 0x6d, 0x68, 0xa5, 0x54, 0x45, 0x5a, 0x92, 0x37, - 0xc6, 0x54, 0x10, 0x93, 0x2d, 0x0d, 0xdd, 0x82, 0x09, 0x8a, 0xf5, 0x76, 0x17, 0x77, 0x48, 0x29, - 0xcd, 0xd7, 0xb3, 0x14, 0xeb, 0xdb, 0xb8, 0x43, 0x56, 0x0b, 0x30, 0xf9, 0xe6, 0x8c, 0x38, 0xbd, - 0xf6, 0x6b, 0xdc, 0xd5, 0x4c, 0xa2, 0x6c, 0xc0, 0x8d, 0x88, 0x5c, 0x5c, 0xbf, 0x8f, 0x61, 0x42, - 0x20, 0x72, 0xc9, 0x66, 0x22, 0x92, 0x05, 0x0c, 0x01, 0x99, 0xf2, 0x95, 0x70, 0x44, 0x5c, 0xc9, - 0x2b, 0x60, 0x95, 0x60, 0x36, 0x8e, 0xc5, 0xbd, 0xba, 0x02, 0xf9, 0xba, 0xa6, 0xed, 0x63, 0x5d, - 0xa0, 0x2b, 0x90, 0xa6, 0x58, 0xe7, 0xc0, 0xc5, 0x08, 0x30, 0xa3, 0x62, 0x8b, 0x4a, 0x11, 0x0a, - 0x82, 0x89, 0xc3, 0xfc, 0x25, 0xc1, 0xf4, 0x4b, 0xc3, 0x0d, 0x14, 0x77, 0xaf, 0xee, 0x91, 0x27, - 0x90, 0x39, 0x31, 0x4c, 0x4a, 0x1c, 0xcf, 0x19, 0xb9, 0xe5, 0x3b, 0x11, 0x86, 0x17, 0xde, 0x52, - 0xf3, 0xdc, 0x76, 0x88, 0xeb, 0x1a, 0x56, 0x57, 0xe5, 0xc4, 0xe8, 0x73, 0x00, 0x1b, 0xeb, 0x46, - 0xd7, 0x0b, 0x1a, 0xcf, 0x4f, 0xb9, 0xe5, 0xf9, 0x08, 0xeb, 0x6e, 0xb0, 0xbc, 0x63, 0xb3, 0xaf, - 0xab, 0x86, 0x38, 0x94, 0x53, 0x98, 0x89, 0x29, 0xc0, 0x5d, 0xb7, 0x02, 0xb2, 0xb0, 0xa3, 0x5b, - 0x92, 0x2a, 0xe9, 0xe1, 0xf6, 0xee, 0xd3, 0xa1, 0x3b, 0x00, 0x5d, 0x72, 0x4e, 0xdb, 0xd4, 0x3a, - 0x25, 0x5d, 0xff, 0x54, 0xa9, 0x32, 0x9b, 0xd9, 0x67, 0x13, 0xca, 0x6f, 0x12, 0xdc, 0x60, 0xbb, - 0x71, 0xf5, 0x03, 0x6b, 0xf5, 0x75, 0x97, 0xae, 0xae, 0x7b, 0xea, 0xbd, 0x75, 0xd7, 0x7d, 0xe7, - 0xf5, 0xa5, 0xe1, 0xaa, 0x3f, 0x82, 0x09, 0xee, 0x15, 0xa1, 0x79, 0x72, 0x58, 0x06, 0x54, 0x97, - 0xe9, 0xfd, 0x8f, 0x04, 0x33, 0x07, 0xb6, 0x96, 0x70, 0xa8, 0x3f, 0x78, 0xe4, 0xa2, 0x87, 0x30, - 0xce, 0xa0, 0x4a, 0xe3, 0x9e, 0x62, 0x73, 0x89, 0x2e, 0x65, 0xdb, 0xaa, 0x1e, 0x19, 0x8b, 0xba, - 0x0e, 0xa1, 0xd8, 0x63, 0xb9, 0x96, 0x10, 0x75, 0x5b, 0x7c, 0x51, 0x0d, 0xc8, 0x06, 0xee, 0x86, - 0x67, 0x30, 0x1b, 0x57, 0x9e, 0x1b, 0x7a, 0x21, 0xaa, 0x8b, 0xe4, 0xd9, 0x2d, 0xa4, 0x89, 0x82, - 0x21, 0xaf, 0x12, 0x97, 0x38, 0x6f, 0x3d, 0x87, 0xb5, 0x1a, 0xe8, 0x09, 0x00, 0x37, 0x84, 0x60, - 0x18, 0x6e, 0x32, 0x99, 0x53, 0xb6, 0x34, 0x34, 0x17, 0xb2, 0x88, 0xef, 0x1d, 0x61, 0x0f, 0xe5, - 0x9d, 0x04, 0x77, 0xd6, 0x09, 0xdd, 0x71, 0x9a, 0xe7, 0x94, 0x74, 0xb5, 0xd0, 0x76, 0xc2, 0x47, - 0x75, 0x28, 0x38, 0xfd, 0xd9, 0xfe, 0xbe, 0xe5, 0xc8, 0xbe, 0x11, 0x39, 0xd5, 0x7c, 0x88, 0xc3, - 0xdf, 0xdf, 0xfa, 0xa1, 0x4b, 0x9c, 0xc0, 0x63, 0x6a, 0xd6, 0x1b, 0xb7, 0x34, 0xb4, 0x01, 0xe8, - 0x35, 0xc1, 0x0e, 0x3d, 0x22, 0x98, 0xb6, 0x8d, 0x2e, 0x65, 0x5c, 0x26, 0x0f, 0xe4, 0xb9, 0xaa, - 0x9f, 0xfe, 0xaa, 0x22, 0xfd, 0x55, 0x1b, 0x3c, 0x3d, 0xaa, 0x53, 0x01, 0x53, 0x8b, 0xf3, 0x28, - 0x7f, 0xa6, 0x20, 0x17, 0x92, 0xe2, 0xbf, 0x22, 0x37, 0x7a, 0x06, 0x40, 0xce, 0x6d, 0xc3, 0x21, - 0x6e, 0x1b, 0xd3, 0xd2, 0x38, 0x97, 0x31, 0x8e, 0xb0, 0x2f, 0x12, 0xbf, 0x2a, 0x73, 0xea, 0x3a, - 0x8d, 0x9c, 0xce, 0xcc, 0x48, 0xa7, 0x53, 0xf9, 0x0e, 0xe6, 0x87, 0xb9, 0x9b, 0x9f, 0xca, 0xe7, - 0x90, 0x0b, 0x59, 0x81, 0x1b, 0xad, 0x34, 0xcc, 0x68, 0x6a, 0x98, 0x58, 0xe9, 0xc1, 0x9c, 0x4a, - 0x4c, 0x82, 0x5d, 0xf2, 0xa1, 0x0f, 0x92, 0x72, 0x1b, 0xca, 0x49, 0x5b, 0xf3, 0x4c, 0xf5, 0x8b, - 0x04, 0x59, 0x1e, 0x1a, 0x68, 0x11, 0x52, 0x97, 0x06, 0x4f, 0xca, 0xd0, 0x22, 0xd6, 0x4d, 0x8d, - 0x64, 0x5d, 0x74, 0x1f, 0xf2, 0x36, 0x8b, 0x5f, 0xb6, 0xf7, 0x26, 0xe9, 0xb9, 0xa5, 0x74, 0x25, - 0xbd, 0x24, 0xab, 0xd1, 0x49, 0x65, 0x05, 0xe4, 0x5d, 0x31, 0x81, 0x8a, 0x90, 0x3e, 0x25, 0x3d, - 0x1e, 0xfc, 0xec, 0x17, 0x4d, 0xc3, 0xb5, 0xb7, 0xd8, 0x3c, 0x13, 0xa1, 0xea, 0x0f, 0x94, 0x1f, - 0x41, 0x0e, 0xc4, 0x43, 0x25, 0xc8, 0xda, 0x8e, 0xf5, 0x3d, 0xe1, 0xb5, 0x80, 0xac, 0x8a, 0x21, - 0x42, 0x30, 0x1e, 0x0a, 0x73, 0xef, 0x1f, 0xcd, 0x42, 0x46, 0xb3, 0x3a, 0xd8, 0xf0, 0x13, 0xa4, - 0xac, 0xf2, 0x11, 0x43, 0x79, 0x4b, 0x1c, 0x96, 0x53, 0xbc, 0x63, 0x27, 0xab, 0x62, 0xc8, 0x50, - 0x0e, 0x0e, 0x5a, 0x0d, 0xef, 0xca, 0x93, 0x55, 0xef, 0x5f, 0x79, 0x97, 0x82, 0x09, 0x71, 0x85, - 0xa1, 0x42, 0x60, 0x43, 0xd9, 0xb3, 0x55, 0xe8, 0x22, 0x4f, 0x8d, 0x76, 0x91, 0x8b, 0x8b, 0x38, - 0xfd, 0xfe, 0x17, 0xf1, 0xf8, 0x68, 0xce, 0x78, 0xca, 0xf2, 0x23, 0x37, 0xb3, 0x5b, 0xba, 0xe6, - 0xed, 0x33, 0x1b, 0xcb, 0x8f, 0x7c, 0x59, 0x0d, 0x51, 0xa2, 0xfb, 0x30, 0x4e, 0xb1, 0xee, 0x96, - 0x32, 0x1e, 0xc7, 0x60, 0x31, 0xe4, 0xad, 0xb2, 0xb0, 0x3d, 0xf6, 0x8a, 0x2b, 0x8d, 0x85, 0x6d, - 0xf6, 0xf2, 0xb0, 0xe5, 0xd4, 0x75, 0xaa, 0xec, 0xc2, 0x64, 0x58, 0xc3, 0xc0, 0x67, 0x52, 0xc8, - 0x67, 0x1f, 0x85, 0x0f, 0x01, 0x93, 0x5b, 0xf4, 0x11, 0x55, 0xd6, 0x47, 0x54, 0x5f, 0xfa, 0x7d, - 0x84, 0x38, 0x1c, 0x26, 0xa4, 0xf7, 0xb1, 0x9e, 0x08, 0xb4, 0x90, 0x90, 0x30, 0x23, 0xe9, 0x32, - 0xe4, 0xba, 0xf4, 0x68, 0xc5, 0xfc, 0x4f, 0x12, 0x4c, 0x08, 0x7b, 0xa3, 0xe7, 0x90, 0x3d, 0x25, - 0xbd, 0x76, 0x07, 0xdb, 0xbc, 0x58, 0xb8, 0x9b, 0xe8, 0x97, 0xea, 0x26, 0xe9, 0x6d, 0x61, 0xbb, - 0xd9, 0xa5, 0x4e, 0x4f, 0xcd, 0x9c, 0x7a, 0x83, 0xf2, 0x33, 0xc8, 0x85, 0xa6, 0x47, 0x0d, 0x85, - 0xe7, 0xa9, 0x4f, 0x24, 0x65, 0x07, 0x8a, 0xf1, 0xc2, 0x08, 0x7d, 0x0a, 0x59, 0xbf, 0x34, 0x72, - 0x13, 0x45, 0xd9, 0x33, 0xba, 0xba, 0x49, 0x76, 0x1d, 0xcb, 0x26, 0x0e, 0xed, 0xf9, 0xdc, 0xaa, - 0xe0, 0x50, 0xfe, 0x4e, 0xc3, 0x74, 0x12, 0x05, 0xfa, 0x02, 0x80, 0x25, 0xcf, 0x48, 0x85, 0x36, - 0x1f, 0x3f, 0x14, 0x51, 0x9e, 0x8d, 0x31, 0x55, 0xa6, 0x58, 0xe7, 0x00, 0xaf, 0xa0, 0x18, 0x9c, - 0xae, 0x76, 0xa4, 0xc8, 0xbd, 0x9f, 0x7c, 0x1a, 0x07, 0xc0, 0xae, 0x07, 0xfc, 0x1c, 0x72, 0x1b, - 0xae, 0x07, 0x4e, 0xe5, 0x88, 0xbe, 0xef, 0xee, 0x25, 0xc6, 0xd1, 0x00, 0x60, 0x41, 0x70, 0x73, - 0xbc, 0x4d, 0x28, 0x88, 0xba, 0x82, 0xc3, 0xf9, 0x31, 0xa6, 0x24, 0x1d, 0x85, 0x01, 0xb4, 0x3c, - 0xe7, 0xe5, 0x60, 0xbb, 0x30, 0xc1, 0x08, 0x30, 0xb5, 0x9c, 0x12, 0x54, 0xa4, 0xa5, 0xc2, 0xf2, - 0xe3, 0x4b, 0xfd, 0x50, 0x5d, 0xb3, 0x3a, 0x36, 0x76, 0x0c, 0x97, 0x95, 0xaa, 0x3e, 0xaf, 0x1a, - 0xa0, 0x28, 0x15, 0x40, 0x83, 0xeb, 0x08, 0x20, 0xd3, 0x7c, 0x75, 0x50, 0x7f, 0xb9, 0x57, 0x1c, - 0x5b, 0x9d, 0x82, 0xeb, 0x36, 0x07, 0xe4, 0x1a, 0x28, 0xeb, 0x30, 0x9b, 0xac, 0x7f, 0xbc, 0x86, - 0x94, 0x06, 0x6b, 0xc8, 0x55, 0x80, 0x09, 0x81, 0xa7, 0x7c, 0x06, 0x53, 0x03, 0x1e, 0x8e, 0x14, - 0x99, 0x52, 0xbc, 0x3d, 0x0c, 0x73, 0x7f, 0x0b, 0x37, 0x87, 0x38, 0x16, 0x3d, 0xf6, 0x43, 0x87, - 0x15, 0x0e, 0x12, 0x2f, 0x1c, 0xc2, 0x76, 0xda, 0x24, 0xbd, 0x43, 0x76, 0xde, 0x77, 0xb1, 0xc1, - 0xac, 0xcc, 0x82, 0xe6, 0x10, 0x9b, 0x11, 0xf0, 0xa7, 0x30, 0x19, 0xa6, 0x1a, 0x39, 0x99, 0xfc, - 0x2a, 0xc1, 0x4c, 0xa2, 0x37, 0x51, 0x39, 0x96, 0x59, 0x98, 0x5a, 0x22, 0xb7, 0x4c, 0x87, 0x73, - 0xcb, 0xc6, 0x18, 0xbf, 0x60, 0x4a, 0xd1, 0xec, 0xc2, 0x24, 0xe5, 0xf9, 0xa5, 0x1c, 0xcb, 0x2f, - 0x0c, 0x8b, 0x4f, 0x44, 0xb4, 0xf8, 0x3d, 0x05, 0x53, 0x03, 0xad, 0x0a, 0x93, 0xdc, 0x34, 0x3a, - 0x86, 0x2f, 0x47, 0x5e, 0xf5, 0x07, 0x6c, 0x36, 0xdc, 0x65, 0xf8, 0x03, 0xf4, 0x25, 0x64, 0x5d, - 0xcb, 0xa1, 0x9b, 0xa4, 0xe7, 0x09, 0x51, 0x58, 0x5e, 0xbc, 0xb8, 0x0f, 0xaa, 0xee, 0xf9, 0xd4, - 0xaa, 0x60, 0x43, 0x2f, 0x40, 0x66, 0xbf, 0x3b, 0x8e, 0xc6, 0x0f, 0x7f, 0x61, 0x79, 0x69, 0x04, - 0x0c, 0x8f, 0x5e, 0xed, 0xb3, 0x2a, 0x0f, 0x40, 0x0e, 0xe6, 0x51, 0x01, 0xa0, 0xd1, 0xdc, 0x5b, - 0x6b, 0x6e, 0x37, 0x5a, 0xdb, 0xeb, 0xc5, 0x31, 0x94, 0x07, 0xb9, 0x1e, 0x0c, 0x25, 0xe5, 0x36, - 0x64, 0xb9, 0x1c, 0x68, 0x0a, 0xf2, 0x6b, 0x6a, 0xb3, 0xbe, 0xdf, 0xda, 0xd9, 0x6e, 0xef, 0xb7, - 0xb6, 0x9a, 0xc5, 0xb1, 0xe5, 0x9f, 0xb3, 0x90, 0x63, 0x3e, 0x5a, 0xf3, 0x05, 0x40, 0x87, 0x90, - 0x8f, 0x3c, 0xd1, 0xa0, 0xe8, 0xed, 0x96, 0xf4, 0x0c, 0x54, 0x56, 0x2e, 0x22, 0xe1, 0xf5, 0xde, - 0x16, 0x40, 0xff, 0x69, 0x06, 0x45, 0x6f, 0xb6, 0x81, 0xa7, 0x9f, 0xf2, 0xc2, 0xd0, 0x75, 0x0e, - 0xf7, 0x35, 0x14, 0xa2, 0x8f, 0x0e, 0x28, 0x49, 0x88, 0x58, 0x23, 0x58, 0xbe, 0x77, 0x21, 0x0d, - 0x87, 0xde, 0x85, 0x5c, 0xe8, 0x95, 0x05, 0x0d, 0x88, 0x12, 0x07, 0xad, 0x0c, 0x27, 0xe0, 0x88, - 0x75, 0xc8, 0xf8, 0x4f, 0x1a, 0x28, 0x5a, 0x84, 0x46, 0x1e, 0x47, 0xca, 0xb7, 0x12, 0xd7, 0x38, - 0xc4, 0x21, 0xe4, 0x23, 0x2f, 0x08, 0x31, 0xb7, 0x24, 0x3d, 0x8f, 0xc4, 0xdc, 0x92, 0xfc, 0x00, - 0xb1, 0x07, 0x93, 0xe1, 0xee, 0x1c, 0x55, 0x06, 0x78, 0x62, 0xcf, 0x08, 0xe5, 0xbb, 0x17, 0x50, - 0xf4, 0x9d, 0x13, 0xed, 0x45, 0x63, 0xce, 0x49, 0xec, 0xd2, 0x63, 0xce, 0x19, 0xd2, 0xcc, 0xbe, - 0x81, 0xd9, 0xe4, 0xc6, 0x02, 0x3d, 0x88, 0xbb, 0x61, 0x78, 0xb3, 0x59, 0xfe, 0xff, 0x48, 0xb4, - 0x7c, 0x4b, 0x02, 0x68, 0xb0, 0xe4, 0x47, 0x8b, 0xb1, 0x76, 0x62, 0x48, 0x3b, 0x52, 0xfe, 0xdf, - 0xa5, 0x74, 0xfe, 0x36, 0xab, 0x6b, 0xdf, 0xd4, 0x75, 0x83, 0xbe, 0x3e, 0x3b, 0xaa, 0x1e, 0x5b, - 0x9d, 0x9a, 0x57, 0x87, 0x59, 0x8e, 0xee, 0xff, 0xd4, 0x82, 0xe7, 0x5d, 0x9d, 0x74, 0x6b, 0xf6, - 0xd1, 0x43, 0xdd, 0xaa, 0x25, 0x3d, 0x13, 0x1f, 0x65, 0xbc, 0x92, 0x70, 0xe5, 0xdf, 0x00, 0x00, - 0x00, 0xff, 0xff, 0x62, 0x68, 0x58, 0x86, 0x45, 0x16, 0x00, 0x00, + // 1694 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x58, 0xcd, 0x6f, 0xdb, 0xc6, + 0x12, 0x37, 0x25, 0x5b, 0x32, 0x47, 0x96, 0x22, 0x6f, 0x6c, 0x47, 0x56, 0x12, 0x5b, 0x61, 0x02, + 0x3f, 0x23, 0xef, 0x45, 0xca, 0xb3, 0x93, 0xe0, 0x25, 0xaf, 0x68, 0x2b, 0x5b, 0x8a, 0xad, 0x3a, + 0xfe, 0x08, 0xfd, 0x01, 0xf4, 0x03, 0x10, 0xd6, 0xe6, 0x9a, 0x61, 0x4d, 0x89, 0x0c, 0xb9, 0x4e, + 0xad, 0x63, 0x2f, 0x3d, 0x14, 0xbd, 0x14, 0x05, 0x7a, 0xed, 0xa1, 0x7f, 0x48, 0x8f, 0xb9, 0xf5, + 0x1f, 0xea, 0xa5, 0x58, 0x72, 0x97, 0x22, 0x29, 0xca, 0x56, 0x7c, 0x08, 0xd0, 0x0b, 0xc1, 0xdd, + 0x9d, 0xf9, 0xed, 0x7c, 0xec, 0xec, 0xcc, 0x2c, 0x2c, 0x9d, 0x9a, 0x3d, 0x4a, 0x0c, 0xcd, 0xac, + 0x69, 0x98, 0xe2, 0x13, 0x4c, 0xb1, 0x69, 0xe9, 0xe1, 0xff, 0xaa, 0xed, 0x58, 0xd4, 0x42, 0xb9, + 0xd0, 0x54, 0xf9, 0x4e, 0xc0, 0x74, 0x62, 0x39, 0xa4, 0x66, 0x1a, 0x94, 0x38, 0xd8, 0x74, 0x7d, + 0xd2, 0xf2, 0x82, 0x6e, 0x59, 0xba, 0x49, 0x6a, 0xde, 0xe8, 0xf8, 0xfc, 0xb4, 0xa6, 0x9d, 0x3b, + 0x98, 0x1a, 0x56, 0x97, 0xaf, 0x2f, 0xc6, 0xd7, 0xa9, 0xd1, 0x21, 0x2e, 0xc5, 0x1d, 0xdb, 0x27, + 0x50, 0x5e, 0xc2, 0xcc, 0xba, 0x43, 0x30, 0x25, 0x0d, 0x4c, 0xb1, 0x4b, 0xa8, 0x4a, 0xde, 0x9e, + 0x13, 0x97, 0xa2, 0x2a, 0x64, 0x35, 0x7f, 0xa6, 0x24, 0x55, 0xa4, 0xe5, 0xdc, 0xca, 0x4c, 0x35, + 0x2c, 0xa8, 0xa0, 0x16, 0x44, 0xca, 0x2d, 0x98, 0x8d, 0xe1, 0xb8, 0xb6, 0xd5, 0x75, 0x89, 0xd2, + 0x84, 0xe9, 0x0d, 0x42, 0x63, 0xe8, 0x8f, 0xe3, 0xe8, 0x73, 0x49, 0xe8, 0xad, 0x46, 0x1f, 0xbf, + 0x01, 0x28, 0x0c, 0xe3, 0x83, 0x7f, 0xb0, 0x94, 0xbf, 0x4a, 0x1e, 0x4c, 0xdd, 0xa1, 0xc6, 0x29, + 0x3e, 0xb9, 0xbe, 0x38, 0xe8, 0x1e, 0xe4, 0x30, 0x07, 0x69, 0x1b, 0x5a, 0x29, 0x55, 0x91, 0x96, + 0xe5, 0xcd, 0x31, 0x15, 0xc4, 0x64, 0x4b, 0x43, 0xb7, 0x61, 0x92, 0x62, 0xbd, 0xdd, 0xc5, 0x1d, + 0x52, 0x4a, 0xf3, 0xf5, 0x2c, 0xc5, 0xfa, 0x0e, 0xee, 0x90, 0xb5, 0x02, 0x4c, 0xbd, 0x3d, 0x27, + 0x4e, 0xaf, 0xfd, 0x06, 0x77, 0x35, 0x93, 0x28, 0x9b, 0x70, 0x33, 0x22, 0x17, 0xd7, 0xef, 0xbf, + 0x30, 0x29, 0x10, 0xb9, 0x64, 0xb3, 0x11, 0xc9, 0x02, 0x86, 0x80, 0x4c, 0xf9, 0x42, 0x38, 0x22, + 0xae, 0xe4, 0x35, 0xb0, 0x4a, 0x30, 0x17, 0xc7, 0xe2, 0x5e, 0x5d, 0x85, 0x7c, 0x5d, 0xd3, 0x0e, + 0xb0, 0x2e, 0xd0, 0x15, 0x48, 0x53, 0xac, 0x73, 0xe0, 0x62, 0x04, 0x98, 0x51, 0xb1, 0x45, 0xa5, + 0x08, 0x05, 0xc1, 0xc4, 0x61, 0xfe, 0x90, 0x60, 0xe6, 0x95, 0xe1, 0x06, 0x8a, 0xbb, 0xd7, 0xf7, + 0xc8, 0x53, 0xc8, 0x9c, 0x1a, 0x26, 0x25, 0x8e, 0xe7, 0x8c, 0xdc, 0xca, 0xdd, 0x08, 0xc3, 0x4b, + 0x6f, 0xa9, 0x79, 0x61, 0x3b, 0xc4, 0x75, 0x0d, 0xab, 0xab, 0x72, 0x62, 0xf4, 0x29, 0x80, 0x8d, + 0x75, 0xa3, 0xeb, 0x05, 0x8d, 0xe7, 0xa7, 0xdc, 0xca, 0x42, 0x84, 0x75, 0x2f, 0x58, 0xde, 0xb5, + 0xd9, 0xd7, 0x55, 0x43, 0x1c, 0xca, 0x19, 0xcc, 0xc6, 0x14, 0xe0, 0xae, 0x5b, 0x05, 0x59, 0xd8, + 0xd1, 0x2d, 0x49, 0x95, 0xf4, 0x70, 0x7b, 0xf7, 0xe9, 0xd0, 0x5d, 0x80, 0x2e, 0xb9, 0xa0, 0x6d, + 0x6a, 0x9d, 0x91, 0xae, 0x7f, 0xaa, 0x54, 0x99, 0xcd, 0x1c, 0xb0, 0x09, 0xe5, 0x27, 0x09, 0x6e, + 0xb2, 0xdd, 0xb8, 0xfa, 0x81, 0xb5, 0xfa, 0xba, 0x4b, 0xd7, 0xd7, 0x3d, 0xf5, 0xc1, 0xba, 0xeb, + 0xbe, 0xf3, 0xfa, 0xd2, 0x70, 0xd5, 0x1f, 0xc3, 0x24, 0xf7, 0x8a, 0xd0, 0x3c, 0x39, 0x2c, 0x03, + 0xaa, 0xab, 0xf4, 0xfe, 0x4b, 0x82, 0xd9, 0x43, 0x5b, 0x4b, 0x38, 0xd4, 0x1f, 0x3d, 0x72, 0xd1, + 0x23, 0x18, 0x67, 0x50, 0xa5, 0x71, 0x4f, 0xb1, 0xf9, 0x44, 0x97, 0xb2, 0x6d, 0x55, 0x8f, 0x8c, + 0x45, 0x5d, 0x87, 0x50, 0xec, 0xb1, 0x4c, 0x24, 0x44, 0xdd, 0x36, 0x5f, 0x54, 0x03, 0xb2, 0x81, + 0xbb, 0xe1, 0x39, 0xcc, 0xc5, 0x95, 0xe7, 0x86, 0x5e, 0x8c, 0xea, 0x22, 0x79, 0x76, 0x0b, 0x69, + 0xa2, 0x60, 0xc8, 0xab, 0xc4, 0x25, 0xce, 0x3b, 0xcf, 0x61, 0xad, 0x06, 0x7a, 0x0a, 0xc0, 0x0d, + 0x21, 0x18, 0x86, 0x9b, 0x4c, 0xe6, 0x94, 0x2d, 0x0d, 0xcd, 0x87, 0x2c, 0xe2, 0x7b, 0x47, 0xd8, + 0x43, 0x79, 0x2f, 0xc1, 0xdd, 0x0d, 0x42, 0x77, 0x9d, 0xe6, 0x05, 0x25, 0x5d, 0x2d, 0xb4, 0x9d, + 0xf0, 0x51, 0x1d, 0x0a, 0x4e, 0x7f, 0xb6, 0xbf, 0x6f, 0x39, 0xb2, 0x6f, 0x44, 0x4e, 0x35, 0x1f, + 0xe2, 0xf0, 0xf7, 0xb7, 0xbe, 0xeb, 0x12, 0x27, 0xf0, 0x98, 0x9a, 0xf5, 0xc6, 0x2d, 0x0d, 0x6d, + 0x02, 0x7a, 0x43, 0xb0, 0x43, 0x8f, 0x09, 0xa6, 0x6d, 0xa3, 0x4b, 0x19, 0x97, 0xc9, 0x03, 0x79, + 0xbe, 0xea, 0xa7, 0xbf, 0xaa, 0x48, 0x7f, 0xd5, 0x06, 0x4f, 0x8f, 0xea, 0x74, 0xc0, 0xd4, 0xe2, + 0x3c, 0xca, 0xef, 0x29, 0xc8, 0x85, 0xa4, 0xf8, 0xa7, 0xc8, 0x8d, 0x9e, 0x03, 0x90, 0x0b, 0xdb, + 0x70, 0x88, 0xdb, 0xc6, 0xb4, 0x34, 0xce, 0x65, 0x8c, 0x23, 0x1c, 0x88, 0xc4, 0xaf, 0xca, 0x9c, + 0xba, 0x4e, 0x23, 0xa7, 0x33, 0x33, 0xd2, 0xe9, 0x54, 0xbe, 0x81, 0x85, 0x61, 0xee, 0xe6, 0xa7, + 0xf2, 0x05, 0xe4, 0x42, 0x56, 0xe0, 0x46, 0x2b, 0x0d, 0x33, 0x9a, 0x1a, 0x26, 0x56, 0x7a, 0x30, + 0xaf, 0x12, 0x93, 0x60, 0x97, 0x7c, 0xec, 0x83, 0xa4, 0xdc, 0x81, 0x72, 0xd2, 0xd6, 0x3c, 0x53, + 0xfd, 0x28, 0x41, 0x96, 0x87, 0x06, 0x5a, 0x82, 0xd4, 0x95, 0xc1, 0x93, 0x32, 0xb4, 0x88, 0x75, + 0x53, 0x23, 0x59, 0x17, 0x3d, 0x80, 0xbc, 0xcd, 0xe2, 0x97, 0xed, 0xbd, 0x45, 0x7a, 0x6e, 0x29, + 0x5d, 0x49, 0x2f, 0xcb, 0x6a, 0x74, 0x52, 0x59, 0x05, 0x79, 0x4f, 0x4c, 0xa0, 0x22, 0xa4, 0xcf, + 0x48, 0x8f, 0x07, 0x3f, 0xfb, 0x45, 0x33, 0x30, 0xf1, 0x0e, 0x9b, 0xe7, 0x22, 0x54, 0xfd, 0x81, + 0xf2, 0xb3, 0x04, 0x72, 0x20, 0x1f, 0x2a, 0x41, 0xd6, 0x76, 0xac, 0x6f, 0x09, 0x2f, 0x06, 0x64, + 0x55, 0x0c, 0x11, 0x82, 0xf1, 0x50, 0x9c, 0x7b, 0xff, 0x68, 0x0e, 0x32, 0x9a, 0xd5, 0xc1, 0x86, + 0x9f, 0x21, 0x65, 0x95, 0x8f, 0x18, 0xca, 0x3b, 0xe2, 0xb0, 0xa4, 0xe2, 0x9d, 0x3b, 0x59, 0x15, + 0x43, 0x86, 0x72, 0x78, 0xd8, 0x6a, 0x78, 0x77, 0x9e, 0xac, 0x7a, 0xff, 0x4c, 0x52, 0xcb, 0xd1, + 0xbd, 0x83, 0x26, 0xab, 0xec, 0x57, 0x79, 0x9f, 0x82, 0x49, 0x71, 0xab, 0xa1, 0x42, 0x60, 0x56, + 0xd9, 0x33, 0x5f, 0xe8, 0x6e, 0x4f, 0x8d, 0x76, 0xb7, 0x8b, 0xbb, 0x39, 0xfd, 0xe1, 0x77, 0xf3, + 0xf8, 0x68, 0xfe, 0x79, 0xc6, 0x52, 0x26, 0xb7, 0xbc, 0x5b, 0x9a, 0xf0, 0xf6, 0x99, 0x8b, 0xa5, + 0x4c, 0xbe, 0xac, 0x86, 0x28, 0xd1, 0x03, 0x18, 0xa7, 0x58, 0x77, 0x4b, 0x19, 0x8f, 0x63, 0xb0, + 0x3e, 0xf2, 0x56, 0x59, 0x24, 0x9f, 0x78, 0xf5, 0x96, 0xc6, 0x22, 0x39, 0x7b, 0x75, 0x24, 0x73, + 0xea, 0x3a, 0x55, 0xf6, 0x60, 0x2a, 0xac, 0x61, 0xe0, 0x45, 0x29, 0xe4, 0xc5, 0xff, 0x84, 0xcf, + 0x05, 0x93, 0x5b, 0xb4, 0x16, 0x55, 0xd6, 0x5a, 0x54, 0x5f, 0xf9, 0xad, 0x85, 0x38, 0x2f, 0x26, + 0xa4, 0x0f, 0xb0, 0x9e, 0x08, 0xb4, 0x98, 0x90, 0x43, 0x23, 0x19, 0x34, 0xe4, 0xba, 0xf4, 0x68, + 0xf5, 0xfd, 0xf7, 0x12, 0x4c, 0x0a, 0x7b, 0xa3, 0x17, 0x90, 0x3d, 0x23, 0xbd, 0x76, 0x07, 0xdb, + 0xbc, 0x7e, 0xb8, 0x97, 0xe8, 0x97, 0xea, 0x16, 0xe9, 0x6d, 0x63, 0xbb, 0xd9, 0xa5, 0x4e, 0x4f, + 0xcd, 0x9c, 0x79, 0x83, 0xf2, 0x73, 0xc8, 0x85, 0xa6, 0x47, 0x8d, 0x8e, 0x17, 0xa9, 0xff, 0x49, + 0xca, 0x2e, 0x14, 0xe3, 0xb5, 0x12, 0xfa, 0x3f, 0x64, 0xfd, 0x6a, 0xc9, 0x4d, 0x14, 0x65, 0xdf, + 0xe8, 0xea, 0x26, 0xd9, 0x73, 0x2c, 0x9b, 0x38, 0xb4, 0xe7, 0x73, 0xab, 0x82, 0x43, 0xf9, 0x33, + 0x0d, 0x33, 0x49, 0x14, 0xe8, 0x33, 0x00, 0x96, 0x4f, 0x23, 0x45, 0xdb, 0x42, 0xfc, 0x50, 0x44, + 0x79, 0x36, 0xc7, 0x54, 0x99, 0x62, 0x9d, 0x03, 0xbc, 0x86, 0x62, 0x70, 0xba, 0xda, 0x91, 0xba, + 0xf7, 0x41, 0xf2, 0x69, 0x1c, 0x00, 0xbb, 0x11, 0xf0, 0x73, 0xc8, 0x1d, 0xb8, 0x11, 0x38, 0x95, + 0x23, 0xfa, 0xbe, 0xbb, 0x9f, 0x18, 0x47, 0x03, 0x80, 0x05, 0xc1, 0xcd, 0xf1, 0xb6, 0xa0, 0x20, + 0x4a, 0x0d, 0x0e, 0xe7, 0xc7, 0x98, 0x92, 0x74, 0x14, 0x06, 0xd0, 0xf2, 0x9c, 0x97, 0x83, 0xed, + 0xc1, 0x24, 0x23, 0xc0, 0xd4, 0x72, 0x4a, 0x50, 0x91, 0x96, 0x0b, 0x2b, 0x4f, 0xae, 0xf4, 0x43, + 0x75, 0xdd, 0xea, 0xd8, 0xd8, 0x31, 0x5c, 0x56, 0xbd, 0xfa, 0xbc, 0x6a, 0x80, 0xa2, 0x54, 0x00, + 0x0d, 0xae, 0x23, 0x80, 0x4c, 0xf3, 0xf5, 0x61, 0xfd, 0xd5, 0x7e, 0x71, 0x6c, 0x6d, 0x1a, 0x6e, + 0xd8, 0x1c, 0x90, 0x6b, 0xa0, 0x6c, 0xc0, 0x5c, 0xb2, 0xfe, 0xf1, 0xb2, 0x52, 0x1a, 0x2c, 0x2b, + 0xd7, 0x00, 0x26, 0x05, 0x9e, 0xf2, 0x09, 0x4c, 0x0f, 0x78, 0x38, 0x52, 0x77, 0x4a, 0xf1, 0x8e, + 0x31, 0xcc, 0xfd, 0x35, 0xdc, 0x1a, 0xe2, 0x58, 0xf4, 0xc4, 0x0f, 0x1d, 0x56, 0x4b, 0x48, 0xbc, + 0x96, 0x08, 0xdb, 0x69, 0x8b, 0xf4, 0x8e, 0xd8, 0x79, 0xdf, 0xc3, 0x06, 0xb3, 0x32, 0x0b, 0x9a, + 0x23, 0x6c, 0x46, 0xc0, 0x9f, 0xc1, 0x54, 0x98, 0x6a, 0xe4, 0xfc, 0xf2, 0x9b, 0x04, 0xb3, 0x89, + 0xde, 0x44, 0xe5, 0x58, 0xae, 0x61, 0x6a, 0x89, 0x6c, 0x33, 0x13, 0xce, 0x36, 0x9b, 0x63, 0xfc, + 0x82, 0x29, 0x45, 0xf3, 0x0d, 0x93, 0x94, 0x67, 0x9c, 0x72, 0x2c, 0xe3, 0x30, 0xac, 0x7e, 0xce, + 0xf1, 0xf2, 0xcb, 0x04, 0x9f, 0x67, 0x83, 0x88, 0x66, 0xbf, 0xa4, 0x60, 0x7a, 0xa0, 0xa3, 0x61, + 0xda, 0x98, 0x46, 0xc7, 0xf0, 0x65, 0xcb, 0xab, 0xfe, 0x80, 0xcd, 0x86, 0x9b, 0x11, 0x7f, 0x80, + 0x3e, 0x87, 0xac, 0x6b, 0x39, 0x74, 0x8b, 0xf4, 0x3c, 0xc1, 0x0a, 0x2b, 0x4b, 0x97, 0xb7, 0x4b, + 0xd5, 0x7d, 0x9f, 0x5a, 0x15, 0x6c, 0xe8, 0x25, 0xc8, 0xec, 0x77, 0xd7, 0xd1, 0x78, 0x40, 0x14, + 0x56, 0x96, 0x47, 0xc0, 0xf0, 0xe8, 0xd5, 0x3e, 0xab, 0xf2, 0x10, 0xe4, 0x60, 0x1e, 0x15, 0x00, + 0x1a, 0xcd, 0xfd, 0xf5, 0xe6, 0x4e, 0xa3, 0xb5, 0xb3, 0x51, 0x1c, 0x43, 0x79, 0x90, 0xeb, 0xc1, + 0x50, 0x52, 0xee, 0x40, 0x96, 0xcb, 0x81, 0xa6, 0x21, 0xbf, 0xae, 0x36, 0xeb, 0x07, 0xad, 0xdd, + 0x9d, 0xf6, 0x41, 0x6b, 0xbb, 0x59, 0x1c, 0x5b, 0xf9, 0x21, 0x0b, 0x39, 0xe6, 0xb7, 0x75, 0x5f, + 0x00, 0x74, 0x04, 0xf9, 0xc8, 0x4b, 0x0e, 0x8a, 0xde, 0x78, 0x49, 0xaf, 0x45, 0x65, 0xe5, 0x32, + 0x12, 0x5e, 0x16, 0x6e, 0x03, 0xf4, 0x5f, 0x70, 0x50, 0xf4, 0xb6, 0x1b, 0x78, 0x21, 0x2a, 0x2f, + 0x0e, 0x5d, 0xe7, 0x70, 0x5f, 0x42, 0x21, 0xfa, 0x36, 0x81, 0x92, 0x84, 0x88, 0xf5, 0x8b, 0xe5, + 0xfb, 0x97, 0xd2, 0x70, 0xe8, 0x3d, 0xc8, 0x85, 0x1e, 0x63, 0xd0, 0x80, 0x28, 0x71, 0xd0, 0xca, + 0x70, 0x02, 0x8e, 0x58, 0x87, 0x8c, 0xff, 0xf2, 0x81, 0xa2, 0xb5, 0x6a, 0xe4, 0x0d, 0xa5, 0x7c, + 0x3b, 0x71, 0x8d, 0x43, 0x1c, 0x41, 0x3e, 0xf2, 0xd0, 0x10, 0x73, 0x4b, 0xd2, 0x2b, 0x4a, 0xcc, + 0x2d, 0xc9, 0xef, 0x14, 0xfb, 0x30, 0x15, 0x6e, 0xe2, 0x51, 0x65, 0x80, 0x27, 0xf6, 0xda, 0x50, + 0xbe, 0x77, 0x09, 0x45, 0xdf, 0x39, 0xd1, 0x96, 0x35, 0xe6, 0x9c, 0xc4, 0x66, 0x3e, 0xe6, 0x9c, + 0x21, 0x3d, 0xef, 0x5b, 0x98, 0x4b, 0xee, 0x3f, 0xd0, 0xc3, 0xb8, 0x1b, 0x86, 0xf7, 0xa4, 0xe5, + 0x7f, 0x8f, 0x44, 0xcb, 0xb7, 0x24, 0x80, 0x06, 0x3b, 0x03, 0xb4, 0x14, 0xeb, 0x3a, 0x86, 0x74, + 0x2d, 0xe5, 0x7f, 0x5d, 0x49, 0xe7, 0x6f, 0xb3, 0xb6, 0xfe, 0x55, 0x5d, 0x37, 0xe8, 0x9b, 0xf3, + 0xe3, 0xea, 0x89, 0xd5, 0xa9, 0x79, 0xb5, 0x99, 0xe5, 0xe8, 0xfe, 0x4f, 0x2d, 0x78, 0x05, 0xd6, + 0x49, 0xb7, 0x66, 0x1f, 0x3f, 0xd2, 0xad, 0x5a, 0xd2, 0x6b, 0xf2, 0x71, 0xc6, 0x2b, 0x13, 0x57, + 0xff, 0x0e, 0x00, 0x00, 0xff, 0xff, 0xd3, 0xc0, 0xa3, 0x08, 0x6c, 0x16, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. diff --git a/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go b/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go index b692594368..dd2cf39d08 100644 --- a/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/event/cloudevents.pb.go @@ -26,18 +26,16 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // information that downstream consumers may find useful. type CloudEventWorkflowExecution struct { RawEvent *WorkflowExecutionEvent `protobuf:"bytes,1,opt,name=raw_event,json=rawEvent,proto3" json:"raw_event,omitempty"` - OutputData *core.LiteralMap `protobuf:"bytes,2,opt,name=output_data,json=outputData,proto3" json:"output_data,omitempty"` - OutputInterface *core.TypedInterface `protobuf:"bytes,3,opt,name=output_interface,json=outputInterface,proto3" json:"output_interface,omitempty"` - InputData *core.LiteralMap `protobuf:"bytes,4,opt,name=input_data,json=inputData,proto3" json:"input_data,omitempty"` + OutputInterface *core.TypedInterface `protobuf:"bytes,2,opt,name=output_interface,json=outputInterface,proto3" json:"output_interface,omitempty"` // The following are ExecutionMetadata fields // We can't have the ExecutionMetadata object directly because of import cycle - ArtifactIds []*core.ArtifactID `protobuf:"bytes,5,rep,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` - ReferenceExecution *core.WorkflowExecutionIdentifier `protobuf:"bytes,6,opt,name=reference_execution,json=referenceExecution,proto3" json:"reference_execution,omitempty"` - Principal string `protobuf:"bytes,7,opt,name=principal,proto3" json:"principal,omitempty"` + ArtifactIds []*core.ArtifactID `protobuf:"bytes,3,rep,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` + ReferenceExecution *core.WorkflowExecutionIdentifier `protobuf:"bytes,4,opt,name=reference_execution,json=referenceExecution,proto3" json:"reference_execution,omitempty"` + Principal string `protobuf:"bytes,5,opt,name=principal,proto3" json:"principal,omitempty"` // The ID of the LP that generated the execution that generated the Artifact. // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - LaunchPlanId *core.Identifier `protobuf:"bytes,8,opt,name=launch_plan_id,json=launchPlanId,proto3" json:"launch_plan_id,omitempty"` + LaunchPlanId *core.Identifier `protobuf:"bytes,6,opt,name=launch_plan_id,json=launchPlanId,proto3" json:"launch_plan_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -75,13 +73,6 @@ func (m *CloudEventWorkflowExecution) GetRawEvent() *WorkflowExecutionEvent { return nil } -func (m *CloudEventWorkflowExecution) GetOutputData() *core.LiteralMap { - if m != nil { - return m.OutputData - } - return nil -} - func (m *CloudEventWorkflowExecution) GetOutputInterface() *core.TypedInterface { if m != nil { return m.OutputInterface @@ -89,13 +80,6 @@ func (m *CloudEventWorkflowExecution) GetOutputInterface() *core.TypedInterface return nil } -func (m *CloudEventWorkflowExecution) GetInputData() *core.LiteralMap { - if m != nil { - return m.InputData - } - return nil -} - func (m *CloudEventWorkflowExecution) GetArtifactIds() []*core.ArtifactID { if m != nil { return m.ArtifactIds @@ -128,19 +112,16 @@ type CloudEventNodeExecution struct { RawEvent *NodeExecutionEvent `protobuf:"bytes,1,opt,name=raw_event,json=rawEvent,proto3" json:"raw_event,omitempty"` // The relevant task execution if applicable TaskExecId *core.TaskExecutionIdentifier `protobuf:"bytes,2,opt,name=task_exec_id,json=taskExecId,proto3" json:"task_exec_id,omitempty"` - // Hydrated output - OutputData *core.LiteralMap `protobuf:"bytes,3,opt,name=output_data,json=outputData,proto3" json:"output_data,omitempty"` // The typed interface for the task that produced the event. - OutputInterface *core.TypedInterface `protobuf:"bytes,4,opt,name=output_interface,json=outputInterface,proto3" json:"output_interface,omitempty"` - InputData *core.LiteralMap `protobuf:"bytes,5,opt,name=input_data,json=inputData,proto3" json:"input_data,omitempty"` + OutputInterface *core.TypedInterface `protobuf:"bytes,3,opt,name=output_interface,json=outputInterface,proto3" json:"output_interface,omitempty"` // The following are ExecutionMetadata fields // We can't have the ExecutionMetadata object directly because of import cycle - ArtifactIds []*core.ArtifactID `protobuf:"bytes,6,rep,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` - Principal string `protobuf:"bytes,7,opt,name=principal,proto3" json:"principal,omitempty"` + ArtifactIds []*core.ArtifactID `protobuf:"bytes,4,rep,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` + Principal string `protobuf:"bytes,5,opt,name=principal,proto3" json:"principal,omitempty"` // The ID of the LP that generated the execution that generated the Artifact. // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - LaunchPlanId *core.Identifier `protobuf:"bytes,8,opt,name=launch_plan_id,json=launchPlanId,proto3" json:"launch_plan_id,omitempty"` + LaunchPlanId *core.Identifier `protobuf:"bytes,6,opt,name=launch_plan_id,json=launchPlanId,proto3" json:"launch_plan_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -185,13 +166,6 @@ func (m *CloudEventNodeExecution) GetTaskExecId() *core.TaskExecutionIdentifier return nil } -func (m *CloudEventNodeExecution) GetOutputData() *core.LiteralMap { - if m != nil { - return m.OutputData - } - return nil -} - func (m *CloudEventNodeExecution) GetOutputInterface() *core.TypedInterface { if m != nil { return m.OutputInterface @@ -199,13 +173,6 @@ func (m *CloudEventNodeExecution) GetOutputInterface() *core.TypedInterface { return nil } -func (m *CloudEventNodeExecution) GetInputData() *core.LiteralMap { - if m != nil { - return m.InputData - } - return nil -} - func (m *CloudEventNodeExecution) GetArtifactIds() []*core.ArtifactID { if m != nil { return m.ArtifactIds @@ -273,10 +240,10 @@ type CloudEventExecutionStart struct { // The launch plan used. LaunchPlanId *core.Identifier `protobuf:"bytes,2,opt,name=launch_plan_id,json=launchPlanId,proto3" json:"launch_plan_id,omitempty"` WorkflowId *core.Identifier `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` - // Artifact IDs found + // Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run. ArtifactIds []*core.ArtifactID `protobuf:"bytes,4,rep,name=artifact_ids,json=artifactIds,proto3" json:"artifact_ids,omitempty"` - // Artifact keys found. - ArtifactKeys []string `protobuf:"bytes,5,rep,name=artifact_keys,json=artifactKeys,proto3" json:"artifact_keys,omitempty"` + // Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service. + ArtifactTrackers []string `protobuf:"bytes,5,rep,name=artifact_trackers,json=artifactTrackers,proto3" json:"artifact_trackers,omitempty"` Principal string `protobuf:"bytes,6,opt,name=principal,proto3" json:"principal,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -336,9 +303,9 @@ func (m *CloudEventExecutionStart) GetArtifactIds() []*core.ArtifactID { return nil } -func (m *CloudEventExecutionStart) GetArtifactKeys() []string { +func (m *CloudEventExecutionStart) GetArtifactTrackers() []string { if m != nil { - return m.ArtifactKeys + return m.ArtifactTrackers } return nil } @@ -360,43 +327,40 @@ func init() { func init() { proto.RegisterFile("flyteidl/event/cloudevents.proto", fileDescriptor_f8af3ecc827e5d5e) } var fileDescriptor_f8af3ecc827e5d5e = []byte{ - // 595 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x5d, 0x6f, 0xd3, 0x30, - 0x14, 0x55, 0x3f, 0x56, 0x56, 0xb7, 0x0c, 0x14, 0x1e, 0x08, 0x65, 0x83, 0xaa, 0x48, 0x68, 0x42, - 0x22, 0x91, 0xe0, 0x05, 0xf1, 0xa1, 0x09, 0xb6, 0x49, 0x8b, 0x60, 0x08, 0x05, 0x24, 0xa4, 0xf1, - 0x10, 0xb9, 0xf1, 0x4d, 0x66, 0x35, 0x8d, 0x23, 0xc7, 0xa1, 0xf4, 0x91, 0xff, 0xc1, 0xff, 0xe3, - 0x6f, 0xa0, 0x38, 0x89, 0xd3, 0xb8, 0x95, 0x46, 0x35, 0x78, 0x99, 0x3c, 0xdf, 0x73, 0x8e, 0xaf, - 0xcf, 0x3d, 0xa9, 0xd1, 0x38, 0x88, 0x96, 0x02, 0x28, 0x89, 0x6c, 0xf8, 0x0e, 0xb1, 0xb0, 0xfd, - 0x88, 0x65, 0x44, 0x2e, 0x53, 0x2b, 0xe1, 0x4c, 0x30, 0x63, 0xaf, 0x42, 0x58, 0x72, 0x7b, 0x34, - 0xd2, 0x18, 0xf2, 0x6f, 0x81, 0x1d, 0xed, 0xab, 0x9a, 0xcf, 0x38, 0xd8, 0x11, 0x15, 0xc0, 0x71, - 0x54, 0x2a, 0x8d, 0x0e, 0x9a, 0x55, 0x1a, 0x0b, 0xe0, 0x01, 0xf6, 0xa1, 0x2c, 0x3f, 0x6c, 0x96, - 0x31, 0x17, 0x34, 0xc0, 0xbe, 0xf0, 0x28, 0x29, 0x01, 0x0f, 0x34, 0x3e, 0x81, 0x58, 0xd0, 0x80, - 0x02, 0xaf, 0x04, 0x42, 0xc6, 0xc2, 0x08, 0x6c, 0xf9, 0xdf, 0x34, 0x0b, 0x6c, 0x41, 0xe7, 0x90, - 0x0a, 0x3c, 0x4f, 0x0a, 0xc0, 0xe4, 0x57, 0x17, 0xdd, 0x3f, 0xce, 0x2f, 0x78, 0x9a, 0xf7, 0xfc, - 0x95, 0xf1, 0x59, 0x10, 0xb1, 0xc5, 0xe9, 0x0f, 0xf0, 0x33, 0x41, 0x59, 0x6c, 0x1c, 0xa3, 0x3e, - 0xc7, 0x0b, 0x4f, 0xde, 0xc8, 0x6c, 0x8d, 0x5b, 0x87, 0x83, 0x67, 0x8f, 0xad, 0xe6, 0xf5, 0xad, - 0x35, 0x96, 0xd4, 0x72, 0x77, 0x39, 0x5e, 0xc8, 0x95, 0xf1, 0x12, 0x0d, 0x58, 0x26, 0x92, 0x4c, - 0x78, 0x04, 0x0b, 0x6c, 0xb6, 0xa5, 0xcc, 0xbd, 0x5a, 0x26, 0xef, 0xdd, 0xfa, 0x50, 0x38, 0x73, - 0x8e, 0x13, 0x17, 0x15, 0xe8, 0x13, 0x2c, 0xb0, 0x71, 0x86, 0x6e, 0x97, 0x5c, 0x65, 0x8e, 0xd9, - 0x91, 0x02, 0x07, 0x9a, 0xc0, 0x97, 0x65, 0x02, 0xc4, 0xa9, 0x40, 0xee, 0xad, 0x82, 0xa6, 0x36, - 0x8c, 0x17, 0x08, 0xd1, 0x58, 0x35, 0xd1, 0xbd, 0xaa, 0x89, 0xbe, 0x04, 0xcb, 0x1e, 0x5e, 0xa3, - 0xe1, 0x8a, 0xf5, 0xa9, 0xb9, 0x33, 0xee, 0x6c, 0xe0, 0xbe, 0x2d, 0x21, 0xce, 0x89, 0x3b, 0xa8, - 0xe0, 0x0e, 0x49, 0x8d, 0x6f, 0xe8, 0x0e, 0x87, 0x00, 0x38, 0xc4, 0x3e, 0x78, 0x50, 0x79, 0x64, - 0xf6, 0x64, 0x03, 0x4f, 0x34, 0x91, 0x35, 0x2f, 0x1d, 0x35, 0x52, 0xd7, 0x50, 0x32, 0xf5, 0x7c, - 0xf6, 0x51, 0x3f, 0xe1, 0x34, 0xf6, 0x69, 0x82, 0x23, 0xf3, 0xc6, 0xb8, 0x75, 0xd8, 0x77, 0xeb, - 0x0d, 0xe3, 0x08, 0xed, 0x45, 0x38, 0x8b, 0xfd, 0x4b, 0x2f, 0x89, 0x70, 0xec, 0x51, 0x62, 0xee, - 0x6e, 0xbc, 0xf6, 0xca, 0x21, 0xc3, 0x82, 0xf0, 0x29, 0xc2, 0xb1, 0x43, 0x26, 0x3f, 0xbb, 0xe8, - 0x6e, 0x1d, 0x8f, 0x8f, 0x8c, 0xac, 0x1c, 0x7d, 0xb4, 0x1e, 0x8d, 0x89, 0x1e, 0x8d, 0x06, 0x43, - 0x8f, 0xc5, 0x19, 0x1a, 0x0a, 0x9c, 0xce, 0xa4, 0x27, 0x79, 0x6f, 0x6d, 0x3d, 0x5e, 0xc5, 0x58, - 0x71, 0x3a, 0xdb, 0xe4, 0x06, 0x12, 0x65, 0xc1, 0x21, 0x7a, 0xc0, 0x3a, 0xd7, 0x0d, 0x58, 0xf7, - 0x1f, 0x04, 0x6c, 0xe7, 0x1a, 0x01, 0xeb, 0x6d, 0x15, 0xb0, 0xff, 0x9c, 0x81, 0x8b, 0xd5, 0x08, - 0x34, 0xa6, 0xf1, 0x57, 0x11, 0x68, 0x30, 0xb4, 0x08, 0x4c, 0x7e, 0xb7, 0x91, 0x59, 0x8b, 0x2b, - 0xd8, 0x67, 0x81, 0xb9, 0x30, 0xce, 0xd1, 0x50, 0x7d, 0x2e, 0x79, 0xdf, 0xad, 0xad, 0xbf, 0x98, - 0x01, 0xd4, 0x9b, 0x1b, 0x8c, 0x68, 0x6f, 0x65, 0x44, 0x9e, 0xb2, 0x45, 0x79, 0x58, 0xce, 0xee, - 0x5c, 0xc5, 0x46, 0x15, 0xda, 0x21, 0x6b, 0x13, 0xee, 0x6e, 0x35, 0xe1, 0x47, 0xe8, 0xa6, 0x62, - 0xcf, 0x60, 0x59, 0xfc, 0x02, 0xf5, 0x5d, 0x25, 0xf9, 0x1e, 0x96, 0x5a, 0x0c, 0x7a, 0x5a, 0x0c, - 0xde, 0xbd, 0xb9, 0x78, 0x15, 0x52, 0x71, 0x99, 0x4d, 0x2d, 0x9f, 0xcd, 0x6d, 0x79, 0x2c, 0xe3, - 0x61, 0xb1, 0xb0, 0xd5, 0x2b, 0x12, 0x42, 0x6c, 0x27, 0xd3, 0xa7, 0x21, 0xb3, 0x9b, 0x4f, 0xda, - 0xb4, 0x27, 0x9f, 0x8b, 0xe7, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x96, 0x62, 0x9e, 0x02, 0x1d, - 0x07, 0x00, 0x00, + // 552 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x54, 0xdd, 0x8a, 0xd3, 0x40, + 0x14, 0xa6, 0xcd, 0x6e, 0xb1, 0xd3, 0xb2, 0xae, 0xf1, 0xc2, 0x58, 0x77, 0x35, 0xf4, 0x42, 0x8a, + 0x62, 0x02, 0xeb, 0x9d, 0x3f, 0x2c, 0xba, 0x2e, 0x98, 0x0b, 0x45, 0xe2, 0x82, 0xb0, 0x5e, 0x84, + 0x69, 0xe6, 0x24, 0x3b, 0x74, 0x3a, 0x13, 0x26, 0x13, 0xeb, 0x3e, 0x83, 0xef, 0xe1, 0xeb, 0xf9, + 0x0a, 0x92, 0xc9, 0x5f, 0x93, 0x16, 0xb4, 0xe8, 0xde, 0x84, 0xc9, 0x39, 0xdf, 0xf7, 0x9d, 0x9f, + 0x6f, 0x18, 0x64, 0x47, 0xec, 0x5a, 0x01, 0x25, 0xcc, 0x85, 0x6f, 0xc0, 0x95, 0x1b, 0x32, 0x91, + 0x11, 0x7d, 0x4c, 0x9d, 0x44, 0x0a, 0x25, 0xcc, 0x83, 0x0a, 0xe1, 0xe8, 0xf0, 0x64, 0xd2, 0x61, + 0xe8, 0x6f, 0x81, 0x9d, 0x1c, 0xd5, 0xb9, 0x50, 0x48, 0x70, 0x19, 0x55, 0x20, 0x31, 0x2b, 0x95, + 0x26, 0xc7, 0xed, 0x2c, 0xe5, 0x0a, 0x64, 0x84, 0x43, 0x28, 0xd3, 0x8f, 0xda, 0x69, 0x2c, 0x15, + 0x8d, 0x70, 0xa8, 0x02, 0x4a, 0x4a, 0xc0, 0xc3, 0x0e, 0x9f, 0x00, 0x57, 0x34, 0xa2, 0x20, 0x2b, + 0x81, 0x58, 0x88, 0x98, 0x81, 0xab, 0xff, 0xe6, 0x59, 0xe4, 0x2a, 0xba, 0x84, 0x54, 0xe1, 0x65, + 0x52, 0x00, 0xa6, 0x3f, 0x0d, 0xf4, 0xe0, 0x2c, 0x1f, 0xf0, 0x3c, 0xef, 0xf9, 0x8b, 0x90, 0x8b, + 0x88, 0x89, 0xd5, 0xf9, 0x77, 0x08, 0x33, 0x45, 0x05, 0x37, 0xcf, 0xd0, 0x50, 0xe2, 0x55, 0xa0, + 0x27, 0xb2, 0x7a, 0x76, 0x6f, 0x36, 0x3a, 0x79, 0xec, 0xb4, 0xc7, 0x77, 0x36, 0x58, 0x5a, 0xcb, + 0xbf, 0x25, 0xf1, 0x4a, 0x9f, 0xcc, 0xf7, 0xe8, 0x50, 0x64, 0x2a, 0xc9, 0x54, 0x50, 0x0f, 0x68, + 0xf5, 0xb5, 0xd6, 0x71, 0xa3, 0x95, 0x0f, 0xe0, 0x5c, 0x5c, 0x27, 0x40, 0xbc, 0x0a, 0xe4, 0xdf, + 0x2e, 0x68, 0x75, 0xc0, 0x7c, 0x85, 0xc6, 0x6b, 0x4b, 0x48, 0x2d, 0xc3, 0x36, 0x66, 0xa3, 0x93, + 0xfb, 0x1d, 0x95, 0x37, 0x25, 0xc4, 0x7b, 0xe7, 0x8f, 0x2a, 0xb8, 0x47, 0x52, 0xf3, 0x2b, 0xba, + 0x2b, 0x21, 0x02, 0x09, 0x3c, 0x84, 0x00, 0xaa, 0x6e, 0xad, 0x3d, 0xdd, 0xca, 0x93, 0x8e, 0xc8, + 0xc6, 0x54, 0x5e, 0xbd, 0x5c, 0xdf, 0xac, 0x65, 0x9a, 0x4d, 0x1d, 0xa1, 0x61, 0x22, 0x29, 0x0f, + 0x69, 0x82, 0x99, 0xb5, 0x6f, 0xf7, 0x66, 0x43, 0xbf, 0x09, 0x98, 0xa7, 0xe8, 0x80, 0xe1, 0x8c, + 0x87, 0x57, 0x41, 0xc2, 0x30, 0x0f, 0x28, 0xb1, 0x06, 0xba, 0x6a, 0xb7, 0xf5, 0xb5, 0x22, 0xe3, + 0x82, 0xf0, 0x89, 0x61, 0xee, 0x91, 0xe9, 0x0f, 0x03, 0xdd, 0x6b, 0x8c, 0xfa, 0x28, 0xc8, 0x5a, + 0xe9, 0xd3, 0x4d, 0x93, 0xa6, 0x5d, 0x93, 0x5a, 0x8c, 0x4d, 0x83, 0xc6, 0x0a, 0xa7, 0x0b, 0xbd, + 0x93, 0xbc, 0xb7, 0x7e, 0xd7, 0xe8, 0xc2, 0x1c, 0x9c, 0x2e, 0xb6, 0x6d, 0x03, 0xa9, 0x32, 0xe1, + 0x91, 0xad, 0x56, 0x1b, 0xff, 0xc5, 0xea, 0xbd, 0x9d, 0xac, 0xbe, 0x61, 0x37, 0x2e, 0xd7, 0xcd, + 0x68, 0xed, 0xe5, 0xaf, 0xcc, 0x68, 0x31, 0x3a, 0x66, 0x4c, 0x7f, 0xf5, 0x91, 0xd5, 0x88, 0xd7, + 0xb0, 0xcf, 0x0a, 0x4b, 0x65, 0x7e, 0x40, 0xe3, 0xfa, 0xe2, 0xe6, 0x7d, 0xf7, 0x76, 0xbe, 0xbb, + 0x23, 0x68, 0x82, 0x5b, 0x16, 0xd1, 0xdf, 0x69, 0x11, 0xe6, 0x0b, 0x34, 0x5a, 0x95, 0xc5, 0x72, + 0xb6, 0xf1, 0x27, 0x36, 0xaa, 0xd0, 0x1e, 0xf9, 0x47, 0x87, 0x9f, 0xa2, 0x3b, 0x35, 0x5b, 0x49, + 0x1c, 0x2e, 0x40, 0xa6, 0xd6, 0xbe, 0x6d, 0xcc, 0x86, 0xfe, 0x61, 0x95, 0xb8, 0x28, 0xe3, 0xed, + 0xeb, 0x30, 0xe8, 0x5c, 0x87, 0xb7, 0xaf, 0x2f, 0x5f, 0xc6, 0x54, 0x5d, 0x65, 0x73, 0x27, 0x14, + 0x4b, 0x57, 0x97, 0x17, 0x32, 0x2e, 0x0e, 0x6e, 0xfd, 0xc2, 0xc6, 0xc0, 0xdd, 0x64, 0xfe, 0x2c, + 0x16, 0x6e, 0xfb, 0xb9, 0x9f, 0x0f, 0xf4, 0x53, 0xfa, 0xfc, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xff, 0xef, 0x43, 0x56, 0x39, 0x06, 0x00, 0x00, } diff --git a/flyteidl/gen/pb-go/flyteidl/service/admin.pb.go b/flyteidl/gen/pb-go/flyteidl/service/admin.pb.go index 60bc47c3ec..cd824e0f8f 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/admin.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/service/admin.pb.go @@ -29,143 +29,185 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package func init() { proto.RegisterFile("flyteidl/service/admin.proto", fileDescriptor_5cfa31da1d67295d) } var fileDescriptor_5cfa31da1d67295d = []byte{ - // 2165 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x9a, 0xdf, 0x6f, 0x1d, 0x47, - 0x15, 0xc7, 0x35, 0x36, 0x02, 0x31, 0x4d, 0x62, 0x7b, 0x9a, 0x60, 0x67, 0x63, 0x27, 0xe9, 0xba, - 0x8e, 0x7f, 0xdf, 0x75, 0x93, 0xb4, 0x55, 0x43, 0xd3, 0xd4, 0xad, 0x9d, 0x2b, 0x43, 0x9a, 0x14, - 0x93, 0x82, 0x64, 0x21, 0x5d, 0xad, 0xef, 0x4e, 0x9c, 0x4d, 0xee, 0xbd, 0x7b, 0xbb, 0x3b, 0x76, - 0xb1, 0x2c, 0x8b, 0x1f, 0x42, 0x88, 0x0a, 0x89, 0x07, 0x7e, 0x08, 0x0a, 0x11, 0xa5, 0x50, 0xc4, - 0xcf, 0xf2, 0x02, 0x2a, 0xe2, 0xa5, 0x12, 0x02, 0x24, 0x5e, 0x78, 0x81, 0x77, 0x5e, 0xe8, 0x33, - 0x7f, 0x03, 0xda, 0x33, 0x33, 0x7b, 0x77, 0x76, 0x77, 0x76, 0x67, 0x4d, 0xca, 0x13, 0x6f, 0xf6, - 0x3d, 0xdf, 0x99, 0xf9, 0x9c, 0x33, 0x67, 0xce, 0xcc, 0xee, 0x0e, 0x9e, 0xbc, 0xd3, 0xd9, 0x67, - 0xd4, 0xf7, 0x3a, 0x4e, 0x44, 0xc3, 0x3d, 0xbf, 0x4d, 0x1d, 0xd7, 0xeb, 0xfa, 0xbd, 0x46, 0x3f, - 0x0c, 0x58, 0x40, 0x46, 0xa5, 0xb5, 0x21, 0xac, 0xd6, 0xe4, 0x4e, 0x10, 0xec, 0x74, 0xa8, 0xe3, - 0xf6, 0x7d, 0xc7, 0xed, 0xf5, 0x02, 0xe6, 0x32, 0x3f, 0xe8, 0x45, 0x5c, 0x6f, 0x0d, 0x7a, 0x83, - 0x5e, 0x9c, 0x7e, 0x18, 0xdc, 0xa3, 0x6d, 0x26, 0xac, 0x8d, 0x62, 0x6b, 0xcb, 0x0b, 0xba, 0xae, - 0xdf, 0x6b, 0xb9, 0x8c, 0x85, 0xfe, 0xf6, 0x2e, 0xa3, 0xb2, 0xb7, 0x59, 0x8d, 0x3e, 0x27, 0x3c, - 0x9d, 0x11, 0x32, 0x37, 0xba, 0x2f, 0x4c, 0x53, 0x19, 0xd3, 0x6b, 0x41, 0x78, 0xff, 0x4e, 0x27, - 0x78, 0x4d, 0x98, 0xe7, 0x34, 0xe6, 0xfc, 0x18, 0xe7, 0x33, 0xca, 0x8e, 0xbb, 0xdb, 0x6b, 0xdf, - 0x6d, 0xf5, 0x3b, 0xae, 0x08, 0x96, 0x65, 0x65, 0x14, 0x74, 0x8f, 0xf6, 0xa4, 0xeb, 0x67, 0xb3, - 0xb6, 0xcf, 0xd3, 0xf6, 0x6e, 0x1c, 0x39, 0x8d, 0xab, 0x5d, 0x97, 0xb5, 0xef, 0xba, 0xdb, 0x1d, - 0xda, 0x0a, 0x69, 0x14, 0xec, 0x86, 0x6d, 0x2a, 0x84, 0xd3, 0x19, 0x61, 0x2f, 0xf0, 0x68, 0x2b, - 0xdb, 0xdb, 0x74, 0x41, 0x3c, 0x72, 0xa2, 0xec, 0x5c, 0xed, 0xd1, 0x30, 0x1a, 0x58, 0xcf, 0x64, - 0xac, 0xed, 0xa0, 0xdb, 0xd5, 0xd2, 0x7a, 0x34, 0x6a, 0x87, 0x7e, 0x3f, 0xee, 0xbc, 0x45, 0x7b, - 0xcc, 0x67, 0xfb, 0x5c, 0x78, 0xf1, 0x2b, 0x37, 0xf1, 0xb1, 0xd5, 0x58, 0xf2, 0x69, 0x9e, 0x3e, - 0xa4, 0x8b, 0xf1, 0x8b, 0x21, 0x75, 0x19, 0xbd, 0xed, 0x46, 0xf7, 0xc9, 0x63, 0x49, 0x46, 0x34, - 0x78, 0xd6, 0xc5, 0xbf, 0x72, 0xfb, 0x26, 0x7d, 0x75, 0x97, 0x46, 0xcc, 0xb2, 0xcb, 0x24, 0x51, - 0x3f, 0xe8, 0x45, 0xd4, 0x9e, 0xf8, 0xf2, 0x3f, 0xde, 0xff, 0xd6, 0x10, 0xb1, 0x8f, 0x43, 0x56, - 0xee, 0x3d, 0x01, 0xfe, 0x46, 0x57, 0xd0, 0x02, 0xf9, 0x1a, 0xc2, 0x1f, 0x69, 0x52, 0x06, 0x83, - 0x9d, 0xcf, 0xf6, 0x74, 0x6b, 0x3b, 0xce, 0xa6, 0x26, 0x65, 0x72, 0xac, 0x93, 0x45, 0x63, 0xd9, - 0xeb, 0xd0, 0xfb, 0x35, 0x72, 0x55, 0xe9, 0xdd, 0x39, 0xf0, 0xbd, 0x86, 0x48, 0xc8, 0x43, 0xf8, - 0x87, 0x67, 0x31, 0xff, 0xbb, 0xe7, 0x76, 0x29, 0xff, 0x4b, 0x44, 0xf5, 0x90, 0x7c, 0x17, 0xe1, - 0x47, 0x6e, 0xf8, 0x11, 0xb0, 0x6c, 0x78, 0x11, 0x59, 0xc9, 0x0e, 0x76, 0xd3, 0xed, 0x52, 0x6f, - 0x1d, 0xa2, 0xb7, 0xe1, 0xc5, 0x51, 0xbc, 0xe3, 0xd3, 0x30, 0x6e, 0x21, 0xf1, 0xe6, 0x8d, 0x5b, - 0xd8, 0x8b, 0xc0, 0x3c, 0x43, 0xa6, 0xd3, 0xcc, 0x2d, 0xdf, 0x8b, 0x9c, 0x83, 0x01, 0xb3, 0x00, - 0x26, 0xbf, 0x41, 0xf8, 0xa3, 0x92, 0x2c, 0x22, 0xd3, 0xd9, 0x51, 0x36, 0x45, 0x02, 0xa6, 0x51, - 0x26, 0x8a, 0x22, 0x05, 0x23, 0x6f, 0xc3, 0xc8, 0x9f, 0x23, 0x2b, 0x75, 0xa3, 0xb5, 0x35, 0x47, - 0x2e, 0x98, 0xb5, 0x21, 0x87, 0xf8, 0x04, 0xcf, 0x80, 0xcf, 0x8a, 0xd5, 0x4a, 0x66, 0xb2, 0x3c, - 0xd2, 0xa2, 0x26, 0xd3, 0x85, 0x2a, 0x99, 0x48, 0xa8, 0x49, 0x70, 0xe2, 0x63, 0xf6, 0x98, 0x04, - 0x92, 0x65, 0x01, 0x92, 0xea, 0xdb, 0x08, 0x3f, 0xd2, 0xa4, 0x2c, 0x19, 0xbc, 0x3a, 0xb1, 0x26, - 0x74, 0xe3, 0xda, 0x1b, 0x30, 0xd2, 0x8b, 0x64, 0x35, 0x37, 0x52, 0xed, 0x04, 0x7b, 0x13, 0xe1, - 0x91, 0x78, 0x0a, 0x64, 0xdf, 0x1f, 0x78, 0x92, 0x39, 0xc0, 0x3e, 0x4f, 0x66, 0xb3, 0xec, 0xba, - 0x44, 0x7b, 0x0f, 0xe1, 0xe3, 0x69, 0x42, 0xc3, 0x64, 0x9b, 0xd4, 0x45, 0x0f, 0x28, 0xee, 0x01, - 0x85, 0x47, 0x2e, 0x1f, 0x25, 0x82, 0x5b, 0x4b, 0x64, 0xc1, 0xbc, 0x1d, 0xf9, 0x2a, 0xc2, 0xa3, - 0x3c, 0x55, 0x6e, 0x40, 0xf5, 0x7f, 0xb9, 0xe3, 0xf6, 0xc8, 0x6c, 0x16, 0x6f, 0x60, 0x53, 0xb3, - 0x6f, 0xae, 0x5a, 0x28, 0xf2, 0xef, 0x1c, 0xf8, 0x74, 0xda, 0x3e, 0x29, 0xd9, 0x52, 0x9b, 0x0d, - 0xa4, 0xe0, 0x0f, 0x10, 0x3e, 0xde, 0xa4, 0x2c, 0x45, 0x51, 0x9d, 0x84, 0x96, 0x7e, 0x78, 0xfb, - 0x06, 0x0c, 0x78, 0x9d, 0xac, 0x15, 0x0d, 0x58, 0x3b, 0x13, 0x7f, 0x8c, 0xf0, 0xa3, 0x4d, 0xca, - 0x56, 0xdb, 0xcc, 0xdf, 0x2b, 0x8d, 0x54, 0x56, 0x61, 0x82, 0x7a, 0x1d, 0x50, 0x9f, 0x27, 0xcf, - 0x49, 0x54, 0x17, 0x3a, 0x69, 0xd5, 0x24, 0x26, 0x0f, 0x10, 0x3e, 0x15, 0x27, 0x50, 0x96, 0x21, - 0x22, 0x8b, 0x55, 0x98, 0xe9, 0xe4, 0x3c, 0xab, 0x47, 0x85, 0xf4, 0x7c, 0x0a, 0x70, 0x57, 0x48, - 0xa3, 0x14, 0x37, 0xbf, 0x56, 0xde, 0x46, 0x78, 0x2c, 0xee, 0x60, 0xd0, 0xdd, 0x07, 0xbe, 0x9e, - 0x2f, 0x02, 0x6a, 0x6a, 0x45, 0xa4, 0x18, 0x75, 0x4b, 0xfa, 0xaf, 0xa2, 0xe8, 0xa4, 0xe3, 0x67, - 0xb4, 0xa8, 0xab, 0xe2, 0xd6, 0x07, 0x98, 0x7b, 0xe4, 0xe9, 0x23, 0x66, 0xe4, 0x96, 0x43, 0x96, - 0x6b, 0x35, 0x25, 0xef, 0x22, 0x3c, 0xfa, 0x4a, 0xdf, 0x33, 0x5e, 0xdc, 0x5c, 0x6b, 0xb0, 0xb8, - 0xa5, 0x50, 0x2c, 0xee, 0x5b, 0xe0, 0xd9, 0x86, 0xf5, 0x50, 0xd6, 0x5a, 0x5c, 0x0c, 0xbe, 0x84, - 0xf0, 0x08, 0x2f, 0x20, 0xeb, 0xf2, 0x88, 0x47, 0x72, 0x3b, 0x5d, 0x62, 0x52, 0x6b, 0xd2, 0x6c, - 0xa5, 0x4e, 0x50, 0x4f, 0x01, 0xf5, 0xb8, 0x4d, 0x24, 0x75, 0x72, 0x9c, 0x84, 0x82, 0xf4, 0x0d, - 0x84, 0xc7, 0x36, 0x29, 0xf7, 0x64, 0x40, 0x31, 0xa7, 0xed, 0x5d, 0x6a, 0x6b, 0x73, 0x5c, 0x00, - 0x8e, 0xf3, 0xf6, 0x99, 0x3c, 0x87, 0x13, 0x8a, 0x4e, 0x63, 0xa0, 0xaf, 0x23, 0x3c, 0xba, 0x49, - 0xdb, 0xc1, 0x1e, 0x0d, 0x07, 0x3c, 0xb3, 0x25, 0x3c, 0x20, 0xad, 0x8d, 0x33, 0x03, 0x38, 0xe7, - 0x6c, 0xab, 0x10, 0x07, 0xfa, 0x8c, 0x69, 0xbe, 0x83, 0xf0, 0xb1, 0x26, 0x65, 0x03, 0x92, 0x45, - 0xdd, 0x9e, 0x96, 0x48, 0x52, 0x95, 0xfb, 0xb4, 0x96, 0xc6, 0xbe, 0x0a, 0xe3, 0x3f, 0x4d, 0x9e, - 0x2c, 0x18, 0xdf, 0xa0, 0x08, 0xbe, 0x8d, 0xf0, 0x08, 0x4f, 0x4f, 0x93, 0xd4, 0x51, 0x33, 0x7e, - 0xb6, 0x52, 0x27, 0x62, 0xf4, 0x3c, 0x30, 0x5e, 0xb1, 0x8e, 0xc6, 0x18, 0x87, 0xef, 0x0f, 0x08, - 0x8f, 0xa6, 0xc3, 0xb7, 0xe6, 0x32, 0x97, 0x38, 0x26, 0x21, 0x8c, 0x95, 0x12, 0x78, 0xc5, 0xbc, - 0x81, 0x20, 0x7f, 0x01, 0xc8, 0x9f, 0x25, 0x57, 0x24, 0xb9, 0xe7, 0x32, 0xb7, 0x66, 0x88, 0x5f, - 0x47, 0xf8, 0x44, 0x5c, 0xd1, 0x92, 0x41, 0x0c, 0x0b, 0xe4, 0x94, 0x36, 0xbc, 0x50, 0x1f, 0x2f, - 0x01, 0xda, 0x32, 0x59, 0xac, 0x11, 0x54, 0xf2, 0x0e, 0xc2, 0xe4, 0x36, 0x0d, 0xbb, 0x7e, 0x4f, - 0x99, 0xf1, 0x79, 0xed, 0x50, 0x89, 0x58, 0x52, 0x2d, 0x98, 0x48, 0xd5, 0x79, 0x5f, 0x38, 0xfa, - 0xbc, 0xff, 0x9d, 0xcf, 0xfb, 0xcd, 0xc0, 0xa3, 0x25, 0x8b, 0x58, 0x31, 0xa7, 0x96, 0xcd, 0x54, - 0xa9, 0xd0, 0xde, 0x03, 0xbc, 0x3e, 0xe9, 0x49, 0x3c, 0xf5, 0x51, 0x9a, 0x33, 0x26, 0xff, 0xb6, - 0xb2, 0xc0, 0x8a, 0x25, 0x4d, 0xaf, 0x18, 0x06, 0x15, 0x1b, 0x7a, 0xf7, 0xbd, 0x43, 0xf2, 0x4f, - 0x84, 0x49, 0x3c, 0x85, 0x0a, 0x4d, 0x94, 0xaf, 0x95, 0x8a, 0x3d, 0x9d, 0x19, 0x8f, 0x55, 0x2a, - 0xed, 0x03, 0xf0, 0x6d, 0x97, 0x44, 0x5a, 0xdf, 0x92, 0xb3, 0xba, 0xc6, 0xc3, 0x62, 0x7b, 0xe2, - 0x67, 0xb1, 0x99, 0x67, 0xfc, 0x4f, 0x3f, 0x84, 0x4f, 0xe7, 0x1d, 0xbc, 0x1e, 0x84, 0xf0, 0x18, - 0xee, 0x94, 0xd2, 0x0b, 0x55, 0x4d, 0x77, 0x7f, 0x3b, 0x0c, 0xfe, 0xfe, 0x7a, 0x98, 0xfc, 0x62, - 0x58, 0x7a, 0xdc, 0xbe, 0xeb, 0x77, 0xbc, 0x90, 0x66, 0x5f, 0x7e, 0x44, 0xce, 0x81, 0xfa, 0x43, - 0x4b, 0xce, 0x8d, 0xf2, 0x8b, 0x26, 0x2a, 0xb5, 0x9b, 0x26, 0x01, 0xab, 0xdd, 0x52, 0x64, 0x8e, - 0x49, 0x3b, 0x99, 0x5a, 0x45, 0x6a, 0xf1, 0xe0, 0x5f, 0xea, 0x83, 0xd4, 0x94, 0xc0, 0x4a, 0x89, - 0x96, 0x4a, 0x0a, 0xe4, 0xc1, 0xa4, 0x48, 0x13, 0x52, 0x16, 0xee, 0xb7, 0x5c, 0xc6, 0x68, 0xb7, - 0xcf, 0x0e, 0xc9, 0xbf, 0x11, 0x3e, 0x99, 0x5d, 0xdd, 0x50, 0xd9, 0x17, 0xab, 0x56, 0x78, 0xba, - 0xaa, 0x2f, 0x99, 0x89, 0x45, 0x4d, 0xca, 0x2d, 0x0c, 0xa8, 0xe8, 0xff, 0xa3, 0x95, 0xff, 0x05, - 0x3c, 0xb2, 0x49, 0x77, 0xfc, 0x88, 0xd1, 0xf0, 0x65, 0xde, 0x61, 0x7e, 0xb3, 0x15, 0x06, 0xa9, - 0xd3, 0x6e, 0xb6, 0x39, 0x9d, 0x70, 0xf0, 0x0c, 0x38, 0x78, 0xca, 0x1e, 0x95, 0x0e, 0x0a, 0x74, - 0x38, 0xa5, 0xbd, 0x8a, 0x8f, 0xf3, 0xbd, 0x59, 0x0e, 0x3f, 0xae, 0xe9, 0xd6, 0x9a, 0xd1, 0x18, - 0x32, 0x5b, 0xfb, 0x79, 0x18, 0xcd, 0xb2, 0x4e, 0x65, 0x47, 0x8b, 0x1d, 0x87, 0x12, 0x7e, 0x07, - 0x1f, 0x8b, 0x97, 0xa8, 0x68, 0x1e, 0x11, 0x5b, 0xd3, 0x71, 0xe9, 0xdb, 0x25, 0xd9, 0x5a, 0xbe, - 0xe9, 0x23, 0x39, 0xef, 0xc8, 0x1b, 0x08, 0x3f, 0xaa, 0xbe, 0x14, 0x5a, 0xdf, 0xa3, 0x3d, 0x46, - 0x96, 0x2b, 0x37, 0x7d, 0xd0, 0xc9, 0xa1, 0x1b, 0xa6, 0x72, 0x11, 0x80, 0x69, 0x00, 0x9a, 0xb2, - 0x27, 0x92, 0x3d, 0x2e, 0x36, 0x47, 0xea, 0x0b, 0xa3, 0xd7, 0x93, 0x03, 0x3a, 0xe4, 0x26, 0x70, - 0xcd, 0x97, 0xa6, 0xad, 0xc2, 0xb4, 0x60, 0x22, 0xd5, 0xbd, 0x39, 0x10, 0x3c, 0x71, 0x0e, 0x66, - 0x58, 0xe2, 0x3a, 0xab, 0x61, 0x01, 0x93, 0x19, 0x4b, 0x91, 0xb4, 0x82, 0x25, 0x79, 0x3b, 0xfb, - 0xc5, 0x61, 0xd8, 0xde, 0x95, 0x2e, 0xf2, 0xdb, 0xbb, 0x62, 0x2e, 0xdb, 0xde, 0x15, 0xa1, 0xfd, - 0x93, 0x21, 0x18, 0xfe, 0xc1, 0x10, 0x79, 0x63, 0x48, 0x79, 0x0b, 0x9a, 0x59, 0xe7, 0xc6, 0xb5, - 0xbf, 0x46, 0xb1, 0x37, 0xae, 0xee, 0x15, 0xe5, 0xbc, 0xb0, 0x7e, 0x17, 0x15, 0xec, 0x7c, 0x85, - 0x2e, 0x2c, 0xc9, 0xf9, 0x1a, 0xfc, 0xbd, 0x21, 0x7e, 0x18, 0x51, 0x62, 0x57, 0x70, 0x18, 0x51, - 0xec, 0xa5, 0xbb, 0x73, 0x4e, 0x69, 0xff, 0x0e, 0xc1, 0x4c, 0xbc, 0x83, 0xc8, 0x2f, 0x91, 0x76, - 0x26, 0x8c, 0xa7, 0xc1, 0x74, 0x0e, 0xcc, 0x26, 0x40, 0x1f, 0x7d, 0xf2, 0x60, 0x18, 0xb6, 0x27, - 0xc5, 0x9f, 0xe2, 0xed, 0x29, 0x9b, 0xa1, 0xa5, 0xdb, 0x53, 0xb1, 0x58, 0x2c, 0x99, 0x9f, 0xf3, - 0xa4, 0x7d, 0x6b, 0x88, 0xfc, 0x70, 0x48, 0xd9, 0xa1, 0xfe, 0x9f, 0xb9, 0xd9, 0xcc, 0xfd, 0x17, - 0xc2, 0x53, 0xca, 0x66, 0xb6, 0x06, 0x5d, 0xae, 0x26, 0xdf, 0xed, 0xc8, 0x65, 0xcd, 0x36, 0x92, - 0x15, 0xaa, 0x8f, 0xb5, 0x4f, 0xd6, 0x6c, 0x25, 0x66, 0xee, 0x15, 0x98, 0xb8, 0x5b, 0xd6, 0x27, - 0x32, 0x3b, 0x53, 0xfe, 0xe3, 0xa6, 0x73, 0xa0, 0x7e, 0x5b, 0x14, 0xc1, 0x49, 0xfd, 0x28, 0x82, - 0x13, 0x97, 0xc8, 0x3f, 0x22, 0x6c, 0x35, 0x29, 0xd3, 0xb9, 0xf8, 0x84, 0x21, 0x6c, 0xaa, 0x6c, - 0x5e, 0xac, 0xd3, 0x44, 0x38, 0xf7, 0x2c, 0x38, 0xf7, 0xd4, 0xe0, 0x1d, 0x7b, 0x89, 0x73, 0xf9, - 0x77, 0x84, 0x7f, 0x43, 0x78, 0x6a, 0x8d, 0x76, 0xe8, 0x7f, 0x3f, 0x53, 0xbc, 0x97, 0xba, 0x33, - 0x25, 0x5b, 0x09, 0x67, 0xae, 0x81, 0x33, 0xcf, 0x2c, 0x1c, 0xc9, 0x99, 0x78, 0x4e, 0xde, 0x45, - 0x78, 0x5c, 0xc9, 0xbc, 0x94, 0x27, 0x0d, 0x0d, 0x93, 0x2e, 0xdb, 0x1c, 0x63, 0xbd, 0xa0, 0xbf, - 0x02, 0xf4, 0x97, 0x2d, 0x27, 0x4b, 0x5f, 0x91, 0x60, 0x31, 0xf8, 0x9b, 0xfc, 0xc0, 0x9d, 0xa7, - 0x5e, 0xac, 0xa4, 0x48, 0x25, 0xd0, 0x92, 0x99, 0x58, 0xf0, 0x2e, 0x01, 0xef, 0x05, 0xf2, 0x78, - 0x19, 0xaf, 0x84, 0x24, 0xbf, 0x42, 0x78, 0x5c, 0x49, 0x95, 0x5a, 0xa1, 0x55, 0xd3, 0xc3, 0x31, - 0xd6, 0x0b, 0x54, 0xf1, 0x3d, 0x6b, 0xc1, 0x08, 0x35, 0x8e, 0xe7, 0xfb, 0x08, 0x4f, 0xf0, 0xe9, - 0x91, 0xa7, 0xc4, 0x14, 0xae, 0xf6, 0xf5, 0x94, 0x2e, 0x15, 0x56, 0xcc, 0x1b, 0x08, 0x60, 0x0a, - 0xc0, 0x2d, 0x6b, 0x2b, 0xf7, 0x01, 0xee, 0x08, 0xd5, 0x46, 0xf9, 0x4d, 0x76, 0x04, 0x6e, 0xfe, - 0x1e, 0xe1, 0x53, 0xa9, 0xef, 0x9d, 0x29, 0x1f, 0x97, 0xaa, 0x91, 0x53, 0x89, 0xb3, 0x6c, 0xa8, - 0x16, 0xde, 0xad, 0x82, 0x77, 0x1f, 0x27, 0xcf, 0x94, 0x7a, 0x97, 0x5b, 0xa1, 0x83, 0x77, 0x13, - 0x87, 0xe4, 0x4f, 0x08, 0x4f, 0xf0, 0x49, 0x3e, 0xda, 0x04, 0xa9, 0x09, 0xb5, 0x62, 0xde, 0x40, - 0xb8, 0xb0, 0x06, 0x2e, 0x3c, 0xb7, 0x70, 0x74, 0x17, 0xe2, 0xf8, 0xff, 0x08, 0xe1, 0xf1, 0xf8, - 0x20, 0xf5, 0x92, 0xbc, 0x13, 0x52, 0xb6, 0x28, 0x34, 0x42, 0xed, 0xa2, 0xd0, 0xea, 0x85, 0x0b, - 0x8f, 0x83, 0x0b, 0x67, 0xc9, 0xa4, 0x74, 0x61, 0x70, 0x33, 0x65, 0xe0, 0x43, 0x5c, 0x59, 0xe0, - 0x6b, 0xd5, 0xe0, 0xe3, 0x92, 0x4f, 0xa3, 0xfc, 0xc3, 0x6d, 0xea, 0xdb, 0x53, 0xfa, 0x0c, 0x79, - 0xae, 0x42, 0x97, 0x4f, 0x85, 0xf8, 0xa8, 0xe0, 0xf1, 0xab, 0x26, 0x7e, 0x1c, 0x42, 0x79, 0x49, - 0xa6, 0xc5, 0xf6, 0xfb, 0xf1, 0x19, 0x22, 0xbf, 0x09, 0xfd, 0x0c, 0xe1, 0x13, 0x4d, 0x9a, 0x02, - 0xdc, 0xcf, 0x5f, 0x1a, 0x48, 0x19, 0x53, 0x69, 0x7b, 0xa6, 0x44, 0x66, 0x7f, 0x0a, 0xc8, 0x3e, - 0x49, 0x36, 0x4c, 0xc9, 0xaa, 0x5f, 0x18, 0xbf, 0x87, 0xf0, 0x18, 0x5f, 0xe8, 0x69, 0xd8, 0xb9, - 0x12, 0x0a, 0xb5, 0x8e, 0xcc, 0x1b, 0x28, 0xc5, 0xe4, 0xde, 0x06, 0xfa, 0x9b, 0xd6, 0xc3, 0xa3, - 0x8f, 0xf3, 0xb5, 0x83, 0x71, 0x93, 0xb2, 0xcf, 0xf0, 0xb3, 0x5b, 0xfe, 0x8e, 0xcf, 0xc0, 0xa6, - 0xbd, 0xe3, 0x93, 0x96, 0x08, 0xd4, 0x71, 0x40, 0x1d, 0x23, 0x23, 0x12, 0x55, 0x9c, 0x0d, 0xc9, - 0x9f, 0xf9, 0xa6, 0xb6, 0x36, 0xb8, 0x82, 0x24, 0x22, 0x56, 0xfd, 0x45, 0x3c, 0x87, 0x96, 0xeb, - 0xc4, 0xde, 0x81, 0x61, 0x5d, 0xd2, 0x4a, 0x4e, 0xe3, 0xd9, 0xab, 0x4e, 0x10, 0x27, 0x38, 0x9e, - 0xd6, 0x0c, 0x95, 0xfa, 0xcd, 0xfc, 0x9b, 0x43, 0x7c, 0x91, 0x67, 0x11, 0xfc, 0xa2, 0x32, 0x9b, - 0xe3, 0x4c, 0xaf, 0xa6, 0x19, 0x23, 0xb5, 0xfd, 0x16, 0x7f, 0x2a, 0xfb, 0x3e, 0x22, 0xb7, 0xca, - 0x7d, 0xab, 0xed, 0xd8, 0x56, 0x93, 0xac, 0x3f, 0x94, 0x2e, 0xc9, 0x5f, 0xf8, 0x45, 0x82, 0xe4, - 0x71, 0xe9, 0x25, 0xca, 0x42, 0xbf, 0x1d, 0x91, 0x8b, 0x26, 0x5f, 0x72, 0x84, 0x58, 0x86, 0xe5, - 0x52, 0xad, 0x36, 0x22, 0xeb, 0x72, 0x77, 0xbf, 0xba, 0x5c, 0x50, 0xef, 0x53, 0xc6, 0x0b, 0xd7, - 0xb6, 0xae, 0xee, 0xf8, 0xec, 0xee, 0xee, 0x76, 0xa3, 0x1d, 0x74, 0x1d, 0xe0, 0x08, 0xc2, 0x1d, - 0xfe, 0x87, 0x93, 0xdc, 0xa5, 0xdb, 0xa1, 0x3d, 0xa7, 0xbf, 0xbd, 0xbc, 0x13, 0x38, 0xd9, 0x3b, - 0x99, 0xdb, 0x1f, 0x86, 0xeb, 0x74, 0x97, 0xfe, 0x13, 0x00, 0x00, 0xff, 0xff, 0x69, 0x42, 0xd8, - 0x48, 0xae, 0x29, 0x00, 0x00, + // 2842 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x5b, 0x6b, 0x8c, 0x15, 0x57, + 0x1d, 0xcf, 0x99, 0x6d, 0x45, 0xa7, 0x3c, 0x96, 0x7f, 0x41, 0x60, 0x78, 0x95, 0xa1, 0xb0, 0xcb, + 0x05, 0xf6, 0x6e, 0x79, 0xb4, 0xb2, 0xda, 0x96, 0x2d, 0x0b, 0xb7, 0x50, 0x5e, 0x2e, 0xb4, 0xda, + 0x5b, 0xe3, 0xcd, 0xec, 0xbd, 0x87, 0x65, 0xe0, 0xde, 0x3b, 0xd7, 0x99, 0xd9, 0x6d, 0x37, 0x64, + 0xab, 0x29, 0x15, 0xb1, 0x06, 0x30, 0x41, 0x30, 0x8d, 0x2d, 0x31, 0x8d, 0x8d, 0xf5, 0x11, 0x3f, + 0x98, 0x68, 0x34, 0xd1, 0xc4, 0x0f, 0x92, 0x34, 0x6d, 0x4c, 0xda, 0xd4, 0x6a, 0xa3, 0xfd, 0x64, + 0x48, 0x68, 0xa2, 0x51, 0xab, 0x1f, 0xac, 0x5f, 0x8c, 0x99, 0xf3, 0x98, 0x39, 0xf3, 0x38, 0xf3, + 0xb8, 0xec, 0x12, 0x92, 0xf6, 0xdb, 0xee, 0x3d, 0xff, 0x39, 0xe7, 0xf7, 0xfb, 0xfd, 0x1f, 0xe7, + 0x9c, 0x99, 0x73, 0xd4, 0x15, 0xc7, 0x9a, 0x53, 0x2e, 0x36, 0x1b, 0xcd, 0xb2, 0x83, 0xed, 0x49, + 0xb3, 0x8e, 0xcb, 0x46, 0xa3, 0x65, 0xb6, 0x07, 0x3a, 0xb6, 0xe5, 0x5a, 0xd0, 0xcb, 0x5b, 0x07, + 0x58, 0xab, 0xb6, 0x62, 0xdc, 0xb2, 0xc6, 0x9b, 0xb8, 0x6c, 0x74, 0xcc, 0xb2, 0xd1, 0x6e, 0x5b, + 0xae, 0xe1, 0x9a, 0x56, 0xdb, 0xa1, 0xf6, 0x5a, 0xd0, 0x1b, 0xe9, 0xa5, 0xdc, 0xb1, 0xad, 0x13, + 0xb8, 0xee, 0xb2, 0xd6, 0x81, 0xe4, 0xd6, 0x5a, 0xc3, 0x6a, 0x19, 0x66, 0xbb, 0x66, 0xb8, 0xae, + 0x6d, 0x8e, 0x4d, 0xb8, 0x98, 0xf7, 0xd6, 0x27, 0xb1, 0x8f, 0x19, 0x2e, 0x8b, 0x18, 0xba, 0x86, + 0x73, 0x92, 0x35, 0xad, 0x8c, 0x34, 0x3d, 0x69, 0xd9, 0x27, 0x8f, 0x35, 0xad, 0x27, 0x59, 0x73, + 0xbf, 0xa4, 0x39, 0x3e, 0xc6, 0x5d, 0x11, 0xcb, 0xa6, 0x31, 0xd1, 0xae, 0x1f, 0xaf, 0x75, 0x9a, + 0x06, 0x13, 0x4b, 0xd3, 0x22, 0x16, 0x78, 0x12, 0xb7, 0x39, 0xf5, 0x55, 0xd1, 0xb6, 0xa7, 0x70, + 0x7d, 0xc2, 0x53, 0x4e, 0x42, 0xb5, 0x65, 0xb8, 0xf5, 0xe3, 0xc6, 0x58, 0x13, 0xd7, 0x6c, 0xec, + 0x58, 0x13, 0x76, 0x1d, 0x33, 0xc3, 0xb5, 0x11, 0xc3, 0xb6, 0xd5, 0xc0, 0xb5, 0x68, 0x6f, 0x6b, + 0x13, 0xf4, 0x88, 0x19, 0x45, 0x7d, 0x35, 0x89, 0x6d, 0x27, 0x68, 0x5d, 0x1e, 0x69, 0xad, 0x5b, + 0xad, 0x96, 0x14, 0x6d, 0x03, 0x3b, 0x75, 0xdb, 0xec, 0x78, 0x9d, 0xd7, 0x70, 0xdb, 0x35, 0xdd, + 0x29, 0x6a, 0xb8, 0xe5, 0x17, 0x53, 0xea, 0xdc, 0x61, 0xcf, 0xe4, 0x08, 0x0d, 0x1f, 0x38, 0x8f, + 0x54, 0x75, 0x97, 0x8d, 0x0d, 0x17, 0x1f, 0x35, 0x9c, 0x93, 0xb0, 0xc6, 0x0f, 0x89, 0x01, 0x1a, + 0x76, 0xde, 0xaf, 0xb4, 0x7d, 0x14, 0x7f, 0x69, 0x02, 0x3b, 0xae, 0xa6, 0xa7, 0x99, 0x38, 0x1d, + 0xab, 0xed, 0x60, 0x7d, 0xc7, 0x33, 0x6f, 0x5d, 0xbb, 0xa8, 0x6c, 0xd5, 0xe7, 0x91, 0xb0, 0x9c, + 0xbc, 0x87, 0x10, 0x76, 0x86, 0x50, 0xa9, 0xba, 0x5a, 0xd7, 0x42, 0xbf, 0x95, 0x2d, 0x7b, 0xbc, + 0x7c, 0xca, 0x6c, 0x0c, 0x58, 0xf6, 0xf8, 0xf4, 0x10, 0x2a, 0xc1, 0x3b, 0x48, 0x9d, 0x53, 0xc1, + 0x2e, 0x41, 0x73, 0x57, 0x74, 0xa8, 0x43, 0x63, 0x5e, 0xbc, 0x55, 0xb0, 0xcb, 0xc1, 0x2c, 0x4a, + 0x02, 0xa3, 0x7f, 0x0b, 0x91, 0xf1, 0xcf, 0x21, 0xb8, 0x3f, 0x3c, 0x98, 0x37, 0x10, 0x0b, 0xda, + 0x69, 0xf2, 0x0f, 0x8d, 0x74, 0xfa, 0x77, 0xdb, 0x68, 0x61, 0xfa, 0x17, 0x53, 0x7e, 0xba, 0xba, + 0x1f, 0xf6, 0xc9, 0xd1, 0x16, 0xed, 0x0d, 0xae, 0x22, 0xf5, 0x8e, 0xfd, 0xa6, 0x43, 0xb8, 0xed, + 0x6d, 0x38, 0x30, 0x18, 0x05, 0x7f, 0xd0, 0x68, 0xe1, 0xc6, 0x6e, 0xe2, 0xaf, 0xbd, 0x0d, 0xcf, + 0x6f, 0xc7, 0x4c, 0x6c, 0x7b, 0x4f, 0x70, 0xba, 0x1b, 0x72, 0x3f, 0xa1, 0x3f, 0x4e, 0x24, 0x38, + 0x02, 0x6b, 0x45, 0x02, 0x35, 0xb3, 0xe1, 0x94, 0x4f, 0x05, 0x98, 0x19, 0xe0, 0xea, 0x26, 0x28, + 0x25, 0xf0, 0xa4, 0x24, 0x63, 0xd6, 0xf0, 0x8a, 0xa2, 0x7e, 0x82, 0xf3, 0x70, 0x60, 0x6d, 0x14, + 0xd3, 0x28, 0x4b, 0x10, 0x11, 0xf8, 0xd2, 0x24, 0x3f, 0x11, 0x9c, 0x7f, 0xa6, 0xbe, 0xfa, 0x3d, + 0x82, 0xc1, 0xa2, 0xbe, 0xaa, 0x3e, 0x18, 0xf5, 0x6f, 0x41, 0xf7, 0x54, 0xfb, 0x61, 0x7d, 0xbe, + 0x41, 0xab, 0xdb, 0x61, 0x6b, 0x17, 0x43, 0xc1, 0x4b, 0x48, 0x9d, 0x4f, 0x73, 0xe3, 0x73, 0xac, + 0x90, 0xc1, 0xba, 0xa8, 0x14, 0xbc, 0x25, 0x9c, 0x66, 0xeb, 0xb3, 0xcc, 0x58, 0xaa, 0x0d, 0x13, + 0xf9, 0x3e, 0xad, 0x2f, 0xe4, 0xf0, 0x78, 0xc5, 0x24, 0xe9, 0xb6, 0x56, 0x5f, 0x15, 0xfb, 0x3d, + 0x96, 0x72, 0xef, 0x21, 0xf5, 0x8e, 0x0a, 0x76, 0x7d, 0x84, 0xd9, 0x69, 0xb7, 0x54, 0x06, 0x4e, + 0x7f, 0x81, 0xba, 0xf3, 0x12, 0x82, 0xe1, 0xf8, 0xc0, 0x45, 0xd3, 0xef, 0x30, 0x1c, 0x4c, 0x47, + 0x5f, 0x38, 0x05, 0xdf, 0x44, 0xea, 0x02, 0x2f, 0xec, 0x38, 0xde, 0x59, 0x4f, 0x43, 0x83, 0xc8, + 0xf1, 0x04, 0xf4, 0x45, 0x89, 0xc8, 0x52, 0x71, 0x10, 0x06, 0x24, 0x9c, 0x65, 0xe9, 0xf8, 0x1b, + 0x45, 0x9d, 0x27, 0x72, 0xca, 0x99, 0x92, 0x2b, 0x64, 0x3e, 0x24, 0xb8, 0xff, 0x4a, 0xfd, 0x78, + 0x0d, 0xc1, 0xb6, 0x6e, 0xfc, 0x58, 0xdd, 0x95, 0xe4, 0xff, 0xa2, 0xe9, 0x29, 0x94, 0xa5, 0xec, + 0xc1, 0xab, 0x3b, 0xe0, 0xbe, 0x2e, 0x87, 0x84, 0x9f, 0x20, 0xb5, 0x97, 0xe6, 0xd5, 0x7e, 0xb2, + 0x8a, 0x38, 0xdc, 0x34, 0xda, 0xd0, 0x17, 0x15, 0x28, 0x68, 0x0b, 0xa7, 0x6a, 0x7f, 0xb6, 0x21, + 0x4b, 0xd6, 0x0a, 0x11, 0x75, 0x58, 0x5f, 0xc4, 0x81, 0x0a, 0x8b, 0x16, 0x92, 0xaf, 0xeb, 0xf5, + 0x35, 0x49, 0x4d, 0xb1, 0x94, 0xfd, 0x17, 0x52, 0xe7, 0x55, 0xb0, 0x2b, 0xa0, 0xcd, 0x4e, 0x5a, + 0x4d, 0x0e, 0x53, 0x7f, 0x89, 0xba, 0xfb, 0x3b, 0x08, 0x46, 0x12, 0xc7, 0x2f, 0x9a, 0xb9, 0x47, + 0xe0, 0xb3, 0x99, 0x3c, 0x0a, 0x27, 0xef, 0xbf, 0x91, 0x7a, 0x67, 0x05, 0xbb, 0xc3, 0x75, 0xd7, + 0x9c, 0x4c, 0x75, 0x54, 0xd4, 0x22, 0x8f, 0x02, 0x97, 0xa9, 0x02, 0x17, 0x10, 0x3c, 0xc0, 0x91, + 0x1b, 0xa4, 0x9b, 0x5a, 0x41, 0x21, 0xaa, 0x07, 0xe0, 0x91, 0xb4, 0x1e, 0x0a, 0x4a, 0xe0, 0x2d, + 0x88, 0x16, 0x7b, 0x29, 0x19, 0x25, 0xe5, 0xc0, 0xc6, 0x2c, 0xde, 0x62, 0xba, 0xaf, 0x92, 0x73, + 0x27, 0x09, 0x6f, 0x13, 0xfa, 0xcd, 0xa0, 0xfa, 0x24, 0xb2, 0x8f, 0xd7, 0xab, 0x21, 0xf8, 0x54, + 0x26, 0x5b, 0x59, 0xe5, 0x7a, 0x17, 0xa9, 0x0b, 0xbd, 0xc1, 0x03, 0x28, 0xb3, 0x5e, 0x8f, 0x4f, + 0x10, 0x9a, 0x8d, 0xa0, 0xb0, 0x08, 0x68, 0x65, 0x25, 0xf9, 0xde, 0xa0, 0x06, 0x46, 0xad, 0x53, + 0xe9, 0xbd, 0xa1, 0xd0, 0xc9, 0x46, 0xf4, 0x59, 0xae, 0xd2, 0x9c, 0xe5, 0xab, 0xff, 0xd0, 0x58, + 0xfd, 0x27, 0x0a, 0x2a, 0x5e, 0xd1, 0x20, 0xdd, 0x23, 0x49, 0xf4, 0xa2, 0x25, 0xba, 0x0c, 0x9b, + 0x0b, 0x41, 0xa8, 0x7e, 0x06, 0x86, 0xba, 0x1f, 0x18, 0xce, 0x29, 0x6a, 0xef, 0xa3, 0x9d, 0x46, + 0xee, 0x42, 0x4d, 0x6d, 0x73, 0x14, 0x6a, 0x6e, 0xc8, 0x0a, 0xf5, 0xcb, 0x54, 0xe1, 0x2b, 0x48, + 0x9b, 0x91, 0x7a, 0xe8, 0x95, 0xf6, 0x23, 0xda, 0x2c, 0x94, 0xc4, 0xef, 0x21, 0x75, 0x01, 0x9d, + 0x63, 0x76, 0xf3, 0xdd, 0x24, 0xc4, 0x56, 0x8e, 0x7e, 0x53, 0x78, 0xda, 0xea, 0xcb, 0xb4, 0x63, + 0x62, 0xec, 0x24, 0x5a, 0x0c, 0xe9, 0xc0, 0xf1, 0xfb, 0x3b, 0x57, 0x32, 0x67, 0xad, 0xd1, 0x56, + 0xc4, 0x1b, 0x82, 0xcc, 0xf0, 0xa6, 0xab, 0x5f, 0x23, 0x75, 0xe1, 0x28, 0xa6, 0xb4, 0x03, 0xa0, + 0xfd, 0x52, 0x00, 0xdc, 0xb6, 0x30, 0xd4, 0x47, 0x09, 0xd4, 0x43, 0xfa, 0xf2, 0x04, 0x44, 0x36, + 0xeb, 0xd4, 0xc3, 0xbc, 0x59, 0xef, 0x97, 0x61, 0xe6, 0xae, 0x10, 0xcc, 0xe1, 0x97, 0x48, 0xed, + 0x1d, 0xc5, 0x75, 0x6b, 0x12, 0xdb, 0x01, 0xfc, 0xbe, 0x14, 0xf8, 0xc4, 0xb4, 0x30, 0xfa, 0x23, + 0x04, 0xfd, 0x81, 0x60, 0x8b, 0x1c, 0x42, 0x4f, 0xfa, 0xf4, 0xc0, 0x6f, 0xd2, 0xfb, 0xb2, 0xc1, + 0x73, 0x6b, 0xaf, 0xc8, 0xce, 0xad, 0x60, 0x37, 0xc0, 0xbd, 0x51, 0xb6, 0xf0, 0xf3, 0x4d, 0x84, + 0x45, 0xc3, 0x32, 0x29, 0x76, 0xfd, 0x34, 0xcd, 0x91, 0x69, 0xd8, 0x9e, 0x80, 0x26, 0x47, 0xe9, + 0x18, 0x81, 0x87, 0xb2, 0x68, 0xe4, 0x98, 0x1e, 0xff, 0x81, 0xd4, 0x05, 0x34, 0x79, 0xf3, 0x64, + 0x40, 0xb8, 0x1e, 0xf4, 0x65, 0xda, 0x31, 0xc7, 0x3c, 0x47, 0xa9, 0x9e, 0x46, 0x5a, 0x77, 0x5c, + 0x3d, 0xaf, 0x55, 0xb4, 0x19, 0xa0, 0xeb, 0x39, 0xf4, 0x8c, 0xa2, 0xf6, 0x8a, 0x0e, 0x1d, 0x31, + 0x5c, 0x03, 0xca, 0x79, 0x9c, 0xea, 0x59, 0x72, 0xee, 0x83, 0xf9, 0x1f, 0x60, 0x22, 0x9c, 0xa7, + 0x22, 0x7c, 0x0d, 0x05, 0x15, 0xbc, 0x61, 0xb8, 0x46, 0x41, 0xaf, 0xef, 0x85, 0x8a, 0xec, 0xe9, + 0xa2, 0xae, 0xbf, 0x8a, 0xd4, 0xf9, 0xde, 0x7c, 0xe8, 0x23, 0xce, 0x39, 0xbd, 0xae, 0x94, 0xba, + 0x9d, 0xcc, 0xae, 0x16, 0xa1, 0x69, 0xc2, 0xc6, 0x02, 0xae, 0x16, 0x97, 0x41, 0x45, 0x19, 0xc1, + 0x07, 0x48, 0x85, 0xa3, 0xd8, 0x6e, 0x99, 0xed, 0x50, 0x14, 0x6f, 0x90, 0xc2, 0xf4, 0x8d, 0x39, + 0xa3, 0x52, 0x1e, 0xd3, 0x68, 0x2c, 0x97, 0x6e, 0x20, 0x96, 0x4b, 0x33, 0x14, 0xcb, 0x7f, 0xa1, + 0xb1, 0x7c, 0xd0, 0x6a, 0xe0, 0x94, 0xc2, 0x1a, 0x6a, 0x16, 0x8a, 0xd3, 0xca, 0x54, 0x43, 0xfd, + 0xab, 0x0a, 0x61, 0xfa, 0x3f, 0x04, 0x6d, 0x8e, 0x36, 0xfc, 0x76, 0x96, 0xd2, 0xf5, 0xff, 0xad, + 0x45, 0x21, 0x87, 0x5a, 0x44, 0xfc, 0xa1, 0x86, 0x60, 0x5a, 0x26, 0xbd, 0x9b, 0x8d, 0xe9, 0xea, + 0x37, 0x10, 0x9c, 0x45, 0xb2, 0x31, 0xb9, 0x4c, 0xa1, 0x4e, 0x7c, 0xcd, 0x66, 0x1e, 0x0d, 0x5c, + 0xe9, 0x51, 0x3f, 0xe9, 0x25, 0xf3, 0x54, 0xdb, 0x68, 0x99, 0x75, 0x4f, 0x23, 0xff, 0x55, 0xcf, + 0xe6, 0xa8, 0x82, 0xc9, 0x76, 0x5c, 0xf0, 0xd8, 0xd4, 0x91, 0x68, 0xcb, 0x02, 0xed, 0xfb, 0x54, + 0xfe, 0xef, 0x2a, 0xf0, 0xe5, 0x9b, 0x2b, 0x7f, 0xb9, 0x41, 0x51, 0xd5, 0xf8, 0x6b, 0x80, 0xea, + 0x0b, 0x08, 0x9e, 0xbf, 0x65, 0xfc, 0x11, 0x83, 0x07, 0xff, 0x55, 0x54, 0xf0, 0x4a, 0x4f, 0x28, + 0x7c, 0x9d, 0xf8, 0xfa, 0x28, 0xd4, 0x2e, 0x56, 0xb4, 0x35, 0x99, 0x96, 0xfa, 0x25, 0xea, 0x8d, + 0xf3, 0x0a, 0x38, 0x52, 0x6f, 0xf8, 0xef, 0xa6, 0x24, 0xa4, 0x93, 0xdb, 0x7d, 0xea, 0xc9, 0xcd, + 0xb4, 0xec, 0x5f, 0x46, 0x70, 0x31, 0xdd, 0x03, 0xc9, 0x4f, 0x53, 0x3f, 0xcc, 0x1e, 0x30, 0xb8, + 0xf0, 0x71, 0x75, 0x59, 0x5c, 0xfb, 0x3d, 0x96, 0x4d, 0xbe, 0x40, 0x94, 0x53, 0x85, 0x65, 0x56, + 0x05, 0x3d, 0xf1, 0xe2, 0x1c, 0xe2, 0x89, 0xcb, 0x73, 0xe0, 0x07, 0x3d, 0x5c, 0x91, 0xfa, 0x71, + 0xb3, 0xd9, 0xb0, 0x71, 0xf4, 0xcb, 0x90, 0x53, 0x3e, 0x15, 0xfe, 0xa1, 0xc6, 0xe3, 0x28, 0xf4, + 0x8b, 0x44, 0x95, 0xc2, 0x8f, 0xfa, 0x82, 0x15, 0x7e, 0x92, 0x45, 0x79, 0x9e, 0xe7, 0xfc, 0x34, + 0x48, 0xb0, 0x66, 0xdf, 0x28, 0x52, 0x39, 0x70, 0x9b, 0x14, 0xb0, 0xdc, 0x44, 0x8a, 0x8a, 0x1b, + 0xf0, 0x7d, 0x54, 0x92, 0x8d, 0x8d, 0x5d, 0x7b, 0xaa, 0x66, 0xb8, 0x2e, 0x6e, 0x75, 0xdc, 0xe9, + 0xea, 0xf5, 0x1e, 0xb8, 0x16, 0x77, 0x17, 0x89, 0xdc, 0xc2, 0x6a, 0x91, 0xa0, 0xfe, 0xc8, 0xd3, + 0xb7, 0xa6, 0xa7, 0xe1, 0x74, 0x8f, 0xba, 0x28, 0xba, 0x26, 0x21, 0x6b, 0xec, 0x8d, 0x59, 0xeb, + 0x12, 0x71, 0x7d, 0xbd, 0x29, 0x9f, 0x31, 0x9b, 0x2b, 0x2f, 0xd0, 0xea, 0x7c, 0x56, 0xa8, 0xce, + 0x64, 0x75, 0x7c, 0x93, 0xd6, 0x2b, 0xdf, 0x44, 0x70, 0x0e, 0x85, 0x06, 0x96, 0x4f, 0x8a, 0x37, + 0x07, 0x12, 0xbc, 0x82, 0xd4, 0x05, 0xa3, 0x78, 0xdc, 0x74, 0x5c, 0x6c, 0x1f, 0xa6, 0x3d, 0xc6, + 0xf7, 0x75, 0xac, 0x81, 0xdb, 0x49, 0xf7, 0x75, 0x31, 0x3b, 0x26, 0xfb, 0x08, 0x51, 0xfd, 0x01, + 0xbd, 0x97, 0x73, 0x67, 0xd8, 0xc9, 0x7b, 0x8d, 0x3e, 0x5d, 0x8f, 0xfe, 0x4c, 0x65, 0xe1, 0x87, + 0x28, 0xf8, 0xdb, 0x8d, 0x6f, 0x23, 0x75, 0x1e, 0xdd, 0x30, 0x72, 0xa0, 0x4b, 0x24, 0x00, 0xb4, + 0x75, 0x92, 0x86, 0xc8, 0x7e, 0x73, 0x0f, 0xc1, 0xb5, 0x53, 0x5b, 0x1c, 0x03, 0x70, 0xca, 0x6c, + 0x90, 0x15, 0xf8, 0xdd, 0xda, 0xea, 0x64, 0x70, 0x7c, 0xe1, 0x42, 0x90, 0x3d, 0x8b, 0xd4, 0xb9, + 0xde, 0x9c, 0xc3, 0x46, 0x71, 0x40, 0x97, 0x8c, 0x9f, 0xfa, 0xad, 0x96, 0x3f, 0xad, 0xdf, 0x4b, + 0x60, 0x0d, 0x42, 0x4c, 0xae, 0xea, 0x0a, 0xd0, 0xe4, 0x70, 0xe0, 0x6d, 0xa4, 0xde, 0x19, 0xfe, + 0x0a, 0xba, 0x7b, 0x12, 0xb7, 0xdd, 0xf8, 0xea, 0x33, 0xb6, 0x07, 0x25, 0x76, 0x1c, 0xd8, 0x40, + 0x5e, 0x73, 0xa6, 0x62, 0x8d, 0xc0, 0x7d, 0x5c, 0x5f, 0xea, 0xef, 0x55, 0xbc, 0x66, 0x27, 0xfc, + 0x85, 0x74, 0x48, 0xdf, 0x1e, 0x69, 0x26, 0xb8, 0xe9, 0x59, 0x91, 0x78, 0xf4, 0x8b, 0xcf, 0xc2, + 0x6f, 0xfd, 0xb7, 0x6f, 0x24, 0xb9, 0x09, 0xa7, 0x0d, 0xa9, 0x79, 0x1f, 0xe2, 0x53, 0xca, 0x63, + 0xca, 0xb8, 0x54, 0x09, 0x97, 0xa3, 0xc1, 0x97, 0x23, 0x06, 0xd6, 0xcb, 0x17, 0xc2, 0x63, 0x87, + 0xbe, 0x4d, 0xce, 0x43, 0x9a, 0xc8, 0x84, 0xc6, 0x9f, 0x7c, 0x1a, 0xde, 0x6a, 0x46, 0x42, 0x83, + 0x34, 0xe5, 0xa3, 0x91, 0x64, 0xca, 0x68, 0x74, 0x08, 0x8d, 0x13, 0x31, 0x1a, 0xfe, 0xf9, 0x90, + 0x43, 0xfa, 0x3e, 0x39, 0x8d, 0x8e, 0x61, 0xe3, 0xb6, 0x5b, 0xcb, 0x3b, 0xbb, 0x12, 0x72, 0xaf, + 0xdf, 0x4e, 0x76, 0x98, 0x21, 0x4c, 0xf1, 0x1d, 0x66, 0xa8, 0x39, 0x6d, 0x87, 0x19, 0x32, 0xd4, + 0xff, 0x7e, 0x1b, 0xe1, 0x73, 0xfd, 0x36, 0x78, 0x5e, 0x09, 0x9d, 0xb3, 0x88, 0xd4, 0xc7, 0xdc, + 0x13, 0x79, 0x81, 0x99, 0x3b, 0xf7, 0x54, 0x9d, 0x31, 0x37, 0x27, 0x4e, 0xc6, 0x49, 0xb3, 0x6f, + 0x7c, 0xba, 0x4d, 0x9c, 0x5f, 0xe3, 0x4b, 0xa7, 0x57, 0x15, 0xb8, 0x2a, 0x95, 0x87, 0x4f, 0x34, + 0x79, 0x9c, 0xfa, 0xa1, 0x97, 0x12, 0xde, 0xef, 0xa1, 0x3b, 0xc5, 0x50, 0x18, 0x26, 0xec, 0x14, + 0x43, 0xed, 0xa9, 0xfb, 0x93, 0x98, 0xa5, 0x7e, 0xae, 0x87, 0x04, 0xf5, 0x99, 0x1e, 0xf8, 0x21, + 0x92, 0x06, 0x75, 0x6e, 0x37, 0xe4, 0xf5, 0x41, 0x3e, 0x07, 0xc8, 0xd5, 0xaf, 0xfe, 0x01, 0xc1, + 0x5b, 0x28, 0x35, 0xca, 0x72, 0x85, 0xd8, 0x2d, 0x47, 0x0c, 0xfe, 0x76, 0x3b, 0x59, 0x8f, 0x86, + 0x1c, 0x95, 0xbc, 0x1e, 0x8d, 0x56, 0xb1, 0xd4, 0xf5, 0x68, 0xb2, 0x31, 0xab, 0xd3, 0x1f, 0xd0, + 0xc2, 0xf6, 0xfe, 0x6d, 0x70, 0x45, 0x09, 0xad, 0x0b, 0x3f, 0xaa, 0x6e, 0xd1, 0xea, 0xf6, 0x3b, + 0x05, 0x5e, 0x53, 0x12, 0xd7, 0xce, 0x5d, 0xed, 0x02, 0x3f, 0xf4, 0x25, 0xee, 0x75, 0x45, 0x5d, + 0x19, 0x5a, 0x4d, 0x8f, 0x90, 0x2e, 0x87, 0xfd, 0xf3, 0xbf, 0xb0, 0x4d, 0xb2, 0x40, 0x8d, 0x1a, + 0x86, 0x3f, 0xf6, 0x6c, 0x2f, 0xf8, 0x14, 0xcb, 0x84, 0x5f, 0xd1, 0xd7, 0xe5, 0x3f, 0x43, 0xda, + 0xbe, 0xc8, 0x0a, 0x37, 0x7e, 0x4c, 0xba, 0x7c, 0x2a, 0x7c, 0x4a, 0x99, 0xc9, 0x23, 0xfc, 0xc8, + 0xe4, 0xf1, 0x56, 0x3a, 0x0d, 0xad, 0x96, 0xdd, 0x21, 0x09, 0x25, 0xe1, 0x79, 0x5a, 0xa3, 0xf2, + 0x8f, 0x02, 0xa7, 0x15, 0x55, 0xab, 0x60, 0x57, 0x26, 0xe5, 0x3d, 0x39, 0x45, 0x11, 0x96, 0x44, + 0x5b, 0x8a, 0x3c, 0xc2, 0x44, 0x7c, 0x9a, 0x68, 0xf8, 0x54, 0x70, 0x8e, 0x22, 0x45, 0xc2, 0xf8, + 0xf9, 0x8b, 0x9d, 0xc1, 0x91, 0x9c, 0x0c, 0xa5, 0x64, 0x27, 0x31, 0x2e, 0x2a, 0xea, 0xca, 0x11, + 0xdc, 0xc4, 0x37, 0x1e, 0x53, 0xb4, 0x97, 0xa2, 0x31, 0xc5, 0x9f, 0x62, 0x72, 0x3c, 0x4b, 0x63, + 0xea, 0xe9, 0x52, 0x57, 0x7a, 0x78, 0xc1, 0xb3, 0xab, 0x74, 0x83, 0x92, 0x78, 0xb1, 0xf1, 0x9c, + 0xa2, 0x2e, 0x09, 0x65, 0x9a, 0xa0, 0xc7, 0x80, 0x84, 0x99, 0x2c, 0xbb, 0xca, 0xb9, 0xed, 0x99, + 0x06, 0xe7, 0xa8, 0x06, 0x67, 0x90, 0x56, 0x8e, 0x32, 0xc9, 0x48, 0x28, 0x8f, 0xff, 0x61, 0xed, + 0x91, 0x19, 0x4c, 0x1e, 0x76, 0xee, 0x7c, 0x51, 0x90, 0x28, 0x82, 0x12, 0x1b, 0x33, 0x99, 0x09, + 0xc9, 0xb1, 0x29, 0x9f, 0x31, 0xd3, 0x00, 0x13, 0x09, 0x6a, 0x70, 0x77, 0x9a, 0x02, 0x1c, 0x64, + 0xf5, 0xbe, 0xe0, 0x43, 0x7b, 0x21, 0x9f, 0xc3, 0x75, 0xa4, 0x2e, 0x09, 0x45, 0x7f, 0x21, 0x3f, + 0x87, 0x23, 0xbe, 0x9c, 0xdb, 0x9e, 0x71, 0x3c, 0x49, 0x38, 0xe2, 0x52, 0x2e, 0x8e, 0x64, 0x43, + 0x5e, 0xea, 0x8e, 0x26, 0x39, 0xeb, 0xa0, 0xa8, 0x4b, 0x69, 0x9c, 0xf1, 0x57, 0x03, 0x02, 0x55, + 0xe9, 0x27, 0x72, 0x59, 0x4c, 0x0f, 0xe6, 0x7f, 0x80, 0x91, 0x7d, 0x97, 0x06, 0xf5, 0xdb, 0x48, + 0xab, 0xc6, 0xce, 0xfb, 0x76, 0x31, 0x4d, 0x84, 0x7e, 0xe3, 0x1d, 0x11, 0x91, 0x5c, 0xcd, 0x4a, + 0x1b, 0xe0, 0x86, 0xa6, 0x0d, 0xe9, 0xa8, 0xf0, 0x15, 0x45, 0x5d, 0x2c, 0x1c, 0x14, 0x17, 0xb4, + 0xdd, 0x94, 0x2d, 0x95, 0x90, 0x25, 0x9b, 0x73, 0x5a, 0x33, 0x55, 0xbf, 0x4e, 0x55, 0x7d, 0x06, + 0xc1, 0x8e, 0x54, 0x55, 0x63, 0x45, 0x2e, 0xf8, 0xf2, 0x33, 0x5d, 0xdd, 0x0d, 0xbb, 0x32, 0x15, + 0x93, 0xd4, 0x4a, 0xa1, 0x1b, 0xb8, 0xa0, 0xa8, 0x4b, 0x69, 0x84, 0x77, 0x17, 0x61, 0xe1, 0x6c, + 0x1a, 0xcc, 0xff, 0x00, 0x7f, 0x51, 0x4c, 0xb5, 0x38, 0x8b, 0x4a, 0xdd, 0x6b, 0xe1, 0x05, 0xd0, + 0xc3, 0xa5, 0x99, 0x90, 0xc3, 0x0b, 0x8a, 0xd7, 0x90, 0xba, 0xc4, 0xdb, 0x37, 0x1e, 0xe0, 0x37, + 0xa4, 0xd2, 0xaa, 0x8b, 0xc4, 0x50, 0x5a, 0x5d, 0xa4, 0xf6, 0x4c, 0x8e, 0x83, 0x44, 0x8d, 0x87, + 0xc1, 0x3f, 0x81, 0x16, 0xdc, 0xd3, 0x0a, 0xd8, 0x88, 0xd7, 0x3c, 0x92, 0xda, 0x85, 0x37, 0x95, + 0xef, 0xb1, 0x13, 0xa9, 0xc1, 0x01, 0x52, 0x13, 0x3b, 0xf1, 0xf7, 0xce, 0xc2, 0xf9, 0x52, 0x71, + 0x73, 0xbd, 0x3a, 0xc3, 0x2e, 0x29, 0x92, 0xbd, 0xc5, 0x71, 0x83, 0x5e, 0xd2, 0x32, 0x3d, 0xc7, + 0xf1, 0xeb, 0x65, 0x35, 0x77, 0xaa, 0x83, 0x93, 0xe4, 0x17, 0x23, 0x39, 0xf2, 0xb0, 0xe0, 0xb5, + 0xcc, 0x6e, 0xbc, 0x95, 0xe1, 0xfc, 0x0a, 0x16, 0x98, 0x4e, 0xc5, 0xaf, 0xa6, 0x08, 0x8d, 0x42, + 0xfa, 0x2e, 0x4f, 0x31, 0xd3, 0x7f, 0x4c, 0x29, 0xbe, 0x8c, 0x60, 0x6f, 0x5e, 0x8a, 0xd9, 0x87, + 0x86, 0x3e, 0x0f, 0x8f, 0xa5, 0x51, 0xf6, 0x37, 0xef, 0x85, 0x7b, 0x86, 0x17, 0x15, 0x75, 0x21, + 0x2d, 0xe2, 0xa2, 0x10, 0xfd, 0x29, 0x0c, 0xc3, 0x73, 0xc4, 0x86, 0x1c, 0x96, 0x2c, 0x56, 0x7f, + 0x4a, 0x95, 0xf9, 0x11, 0xd2, 0x66, 0x4e, 0x19, 0x2f, 0x95, 0x9f, 0xd0, 0x66, 0x49, 0x1c, 0x2f, + 0xbb, 0x9b, 0xaa, 0x5a, 0xc1, 0xee, 0x63, 0x74, 0xbf, 0x16, 0xbf, 0x1e, 0x18, 0xb4, 0x49, 0xaf, + 0x07, 0x8a, 0x26, 0x4c, 0x89, 0x25, 0x44, 0x88, 0x85, 0xb0, 0x80, 0xe3, 0x66, 0xfb, 0x41, 0xf8, + 0xb9, 0x42, 0x16, 0x61, 0x23, 0xc1, 0xf5, 0x45, 0xe6, 0x90, 0xec, 0xdb, 0x0d, 0x31, 0x68, 0xb1, + 0x4e, 0xf4, 0x77, 0xa8, 0x03, 0xde, 0x44, 0xe0, 0xef, 0xbc, 0x62, 0x17, 0x25, 0x89, 0x1b, 0xc8, + 0xa6, 0xb4, 0xa0, 0x58, 0xe1, 0xfb, 0x0f, 0x6d, 0x68, 0xa6, 0x0e, 0x11, 0x3b, 0x2a, 0x75, 0x63, + 0xe3, 0xc1, 0xab, 0x3d, 0xb4, 0x08, 0x47, 0x49, 0x9b, 0x49, 0x73, 0x73, 0x4c, 0x19, 0xb1, 0x86, + 0xad, 0xcb, 0x65, 0xad, 0xff, 0x91, 0x7e, 0xb0, 0x7c, 0x43, 0x81, 0x43, 0xe9, 0x5a, 0x16, 0x4f, + 0xf6, 0x2f, 0xc2, 0x17, 0x0a, 0x68, 0x57, 0xbc, 0xff, 0x0a, 0xec, 0x9e, 0x11, 0xc8, 0xd5, 0xa3, + 0x30, 0x3a, 0xf3, 0x40, 0xe1, 0x92, 0x42, 0x6e, 0xb9, 0xf8, 0xaf, 0xe6, 0x0e, 0x60, 0xd7, 0x36, + 0xeb, 0x0e, 0x6c, 0xc9, 0x73, 0x60, 0x93, 0x19, 0x73, 0x67, 0x6e, 0x2d, 0xf4, 0x0c, 0xcb, 0xce, + 0x84, 0xdb, 0xb3, 0x2d, 0x6a, 0x52, 0xf0, 0xa8, 0xa7, 0x70, 0x7b, 0x36, 0xa1, 0x83, 0x82, 0xa7, + 0x05, 0x1f, 0x7a, 0xb0, 0x7a, 0xff, 0xb8, 0xe9, 0x1e, 0x9f, 0x18, 0x1b, 0xa8, 0x5b, 0xad, 0x32, + 0xe1, 0xe5, 0x75, 0x41, 0xfe, 0x28, 0xfb, 0xf7, 0x9f, 0xc7, 0x71, 0xbb, 0xdc, 0x19, 0xdb, 0x3c, + 0x6e, 0x95, 0xa3, 0xf7, 0xe8, 0xc7, 0x3e, 0x46, 0xae, 0x40, 0x6f, 0xfd, 0x7f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xf3, 0x38, 0xe9, 0xaa, 0x62, 0x3f, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -232,6 +274,8 @@ type AdminServiceClient interface { TerminateExecution(ctx context.Context, in *admin.ExecutionTerminateRequest, opts ...grpc.CallOption) (*admin.ExecutionTerminateResponse, error) // Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. GetNodeExecution(ctx context.Context, in *admin.NodeExecutionGetRequest, opts ...grpc.CallOption) (*admin.NodeExecution, error) + // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + GetDynamicNodeWorkflow(ctx context.Context, in *admin.GetDynamicNodeWorkflowRequest, opts ...grpc.CallOption) (*admin.DynamicNodeWorkflowResponse, error) // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. ListNodeExecutions(ctx context.Context, in *admin.NodeExecutionListRequest, opts ...grpc.CallOption) (*admin.NodeExecutionList, error) // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. @@ -517,6 +561,15 @@ func (c *adminServiceClient) GetNodeExecution(ctx context.Context, in *admin.Nod return out, nil } +func (c *adminServiceClient) GetDynamicNodeWorkflow(ctx context.Context, in *admin.GetDynamicNodeWorkflowRequest, opts ...grpc.CallOption) (*admin.DynamicNodeWorkflowResponse, error) { + out := new(admin.DynamicNodeWorkflowResponse) + err := c.cc.Invoke(ctx, "/flyteidl.service.AdminService/GetDynamicNodeWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + func (c *adminServiceClient) ListNodeExecutions(ctx context.Context, in *admin.NodeExecutionListRequest, opts ...grpc.CallOption) (*admin.NodeExecutionList, error) { out := new(admin.NodeExecutionList) err := c.cc.Invoke(ctx, "/flyteidl.service.AdminService/ListNodeExecutions", in, out, opts...) @@ -832,6 +885,8 @@ type AdminServiceServer interface { TerminateExecution(context.Context, *admin.ExecutionTerminateRequest) (*admin.ExecutionTerminateResponse, error) // Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. GetNodeExecution(context.Context, *admin.NodeExecutionGetRequest) (*admin.NodeExecution, error) + // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + GetDynamicNodeWorkflow(context.Context, *admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. ListNodeExecutions(context.Context, *admin.NodeExecutionListRequest) (*admin.NodeExecutionList, error) // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. @@ -969,6 +1024,9 @@ func (*UnimplementedAdminServiceServer) TerminateExecution(ctx context.Context, func (*UnimplementedAdminServiceServer) GetNodeExecution(ctx context.Context, req *admin.NodeExecutionGetRequest) (*admin.NodeExecution, error) { return nil, status.Errorf(codes.Unimplemented, "method GetNodeExecution not implemented") } +func (*UnimplementedAdminServiceServer) GetDynamicNodeWorkflow(ctx context.Context, req *admin.GetDynamicNodeWorkflowRequest) (*admin.DynamicNodeWorkflowResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetDynamicNodeWorkflow not implemented") +} func (*UnimplementedAdminServiceServer) ListNodeExecutions(ctx context.Context, req *admin.NodeExecutionListRequest) (*admin.NodeExecutionList, error) { return nil, status.Errorf(codes.Unimplemented, "method ListNodeExecutions not implemented") } @@ -1493,6 +1551,24 @@ func _AdminService_GetNodeExecution_Handler(srv interface{}, ctx context.Context return interceptor(ctx, in, info, handler) } +func _AdminService_GetDynamicNodeWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(admin.GetDynamicNodeWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdminServiceServer).GetDynamicNodeWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/flyteidl.service.AdminService/GetDynamicNodeWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdminServiceServer).GetDynamicNodeWorkflow(ctx, req.(*admin.GetDynamicNodeWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + func _AdminService_ListNodeExecutions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(admin.NodeExecutionListRequest) if err := dec(in); err != nil { @@ -2115,6 +2191,10 @@ var _AdminService_serviceDesc = grpc.ServiceDesc{ MethodName: "GetNodeExecution", Handler: _AdminService_GetNodeExecution_Handler, }, + { + MethodName: "GetDynamicNodeWorkflow", + Handler: _AdminService_GetDynamicNodeWorkflow_Handler, + }, { MethodName: "ListNodeExecutions", Handler: _AdminService_ListNodeExecutions_Handler, diff --git a/flyteidl/gen/pb-go/flyteidl/service/admin.pb.gw.go b/flyteidl/gen/pb-go/flyteidl/service/admin.pb.gw.go index ea8c99459c..f012ef0cd6 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/admin.pb.gw.go +++ b/flyteidl/gen/pb-go/flyteidl/service/admin.pb.gw.go @@ -47,6 +47,41 @@ func request_AdminService_CreateTask_0(ctx context.Context, marshaler runtime.Ma } +func request_AdminService_CreateTask_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskCreateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + msg, err := client.CreateTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + var ( filter_AdminService_GetTask_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3, "version": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} ) @@ -119,11 +154,11 @@ func request_AdminService_GetTask_0(ctx context.Context, marshaler runtime.Marsh } var ( - filter_AdminService_ListTaskIds_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} + filter_AdminService_GetTask_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4, "version": 5}, Base: []int{1, 1, 1, 2, 3, 4, 5, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 2, 3, 4, 5, 6, 7}} ) -func request_AdminService_ListTaskIds_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NamedEntityIdentifierListRequest +func request_AdminService_GetTask_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ObjectGetRequest var metadata runtime.ServerMetadata var ( @@ -133,55 +168,17 @@ func request_AdminService_ListTaskIds_0(ctx context.Context, marshaler runtime.M _ = err ) - val, ok = pathParams["project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") - } - - protoReq.Project, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) - } - - val, ok = pathParams["domain"] + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - protoReq.Domain, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTaskIds_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - msg, err := client.ListTaskIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_AdminService_ListTasks_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} -) - -func request_AdminService_ListTasks_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ResourceListRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -215,24 +212,35 @@ func request_AdminService_ListTasks_0(ctx context.Context, marshaler runtime.Mar return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } + val, ok = pathParams["id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + } + if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTasks_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetTask_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListTasks_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} + filter_AdminService_ListTaskIds_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} ) -func request_AdminService_ListTasks_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ResourceListRequest +func request_AdminService_ListTaskIds_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityIdentifierListRequest var metadata runtime.ServerMetadata var ( @@ -242,63 +250,46 @@ func request_AdminService_ListTasks_1(ctx context.Context, marshaler runtime.Mar _ = err ) - val, ok = pathParams["id.project"] + val, ok = pathParams["project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + protoReq.Project, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) } - val, ok = pathParams["id.domain"] + val, ok = pathParams["domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + protoReq.Domain, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTasks_1); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.ListTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func request_AdminService_CreateWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowCreateRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTaskIds_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.CreateWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListTaskIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetWorkflow_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3, "version": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} + filter_AdminService_ListTaskIds_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} ) -func request_AdminService_GetWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ObjectGetRequest +func request_AdminService_ListTaskIds_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityIdentifierListRequest var metadata runtime.ServerMetadata var ( @@ -308,68 +299,57 @@ func request_AdminService_GetWorkflow_0(ctx context.Context, marshaler runtime.M _ = err ) - val, ok = pathParams["id.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) - } - - val, ok = pathParams["id.domain"] + val, ok = pathParams["org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + protoReq.Org, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) } - val, ok = pathParams["id.name"] + val, ok = pathParams["project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + protoReq.Project, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) } - val, ok = pathParams["id.version"] + val, ok = pathParams["domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + protoReq.Domain, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetWorkflow_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTaskIds_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListTaskIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListWorkflowIds_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} + filter_AdminService_ListTasks_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} ) -func request_AdminService_ListWorkflowIds_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NamedEntityIdentifierListRequest +func request_AdminService_ListTasks_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata var ( @@ -379,45 +359,56 @@ func request_AdminService_ListWorkflowIds_0(ctx context.Context, marshaler runti _ = err ) - val, ok = pathParams["project"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - protoReq.Project, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["domain"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - protoReq.Domain, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflowIds_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTasks_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListWorkflowIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListWorkflows_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} + filter_AdminService_ListTasks_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} ) -func request_AdminService_ListWorkflows_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { +func request_AdminService_ListTasks_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata @@ -428,6 +419,17 @@ func request_AdminService_ListWorkflows_0(ctx context.Context, marshaler runtime _ = err ) + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -464,20 +466,20 @@ func request_AdminService_ListWorkflows_0(ctx context.Context, marshaler runtime if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflows_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTasks_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListWorkflows(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListWorkflows_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} + filter_AdminService_ListTasks_2 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} ) -func request_AdminService_ListWorkflows_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { +func request_AdminService_ListTasks_2(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata @@ -513,38 +515,21 @@ func request_AdminService_ListWorkflows_1(ctx context.Context, marshaler runtime if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflows_1); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.ListWorkflows(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func request_AdminService_CreateLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.LaunchPlanCreateRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTasks_2); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.CreateLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetLaunchPlan_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3, "version": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} + filter_AdminService_ListTasks_3 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} ) -func request_AdminService_GetLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ObjectGetRequest +func request_AdminService_ListTasks_3(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata var ( @@ -554,6 +539,17 @@ func request_AdminService_GetLaunchPlan_0(ctx context.Context, marshaler runtime _ = err ) + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -576,9 +572,110 @@ func request_AdminService_GetLaunchPlan_0(ctx context.Context, marshaler runtime return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["id.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTasks_3); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListTasks(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowCreateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateWorkflow_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowCreateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + msg, err := client.CreateWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetWorkflow_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3, "version": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} +) + +func request_AdminService_GetWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ObjectGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) @@ -601,21 +698,21 @@ func request_AdminService_GetLaunchPlan_0(ctx context.Context, marshaler runtime if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetLaunchPlan_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetWorkflow_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetActiveLaunchPlan_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} + filter_AdminService_GetWorkflow_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4, "version": 5}, Base: []int{1, 1, 1, 2, 3, 4, 5, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 2, 3, 4, 5, 6, 7}} ) -func request_AdminService_GetActiveLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ActiveLaunchPlanRequest +func request_AdminService_GetWorkflow_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ObjectGetRequest var metadata runtime.ServerMetadata var ( @@ -625,6 +722,17 @@ func request_AdminService_GetActiveLaunchPlan_0(ctx context.Context, marshaler r _ = err ) + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -658,24 +766,35 @@ func request_AdminService_GetActiveLaunchPlan_0(ctx context.Context, marshaler r return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } + val, ok = pathParams["id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + } + if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetActiveLaunchPlan_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetWorkflow_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetActiveLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListActiveLaunchPlans_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} + filter_AdminService_ListWorkflowIds_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} ) -func request_AdminService_ListActiveLaunchPlans_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ActiveLaunchPlanListRequest +func request_AdminService_ListWorkflowIds_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityIdentifierListRequest var metadata runtime.ServerMetadata var ( @@ -710,20 +829,20 @@ func request_AdminService_ListActiveLaunchPlans_0(ctx context.Context, marshaler if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListActiveLaunchPlans_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflowIds_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListActiveLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListWorkflowIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListLaunchPlanIds_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} + filter_AdminService_ListWorkflowIds_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} ) -func request_AdminService_ListLaunchPlanIds_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { +func request_AdminService_ListWorkflowIds_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq admin.NamedEntityIdentifierListRequest var metadata runtime.ServerMetadata @@ -734,6 +853,17 @@ func request_AdminService_ListLaunchPlanIds_0(ctx context.Context, marshaler run _ = err ) + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + val, ok = pathParams["project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") @@ -759,20 +889,20 @@ func request_AdminService_ListLaunchPlanIds_0(ctx context.Context, marshaler run if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlanIds_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflowIds_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListLaunchPlanIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListWorkflowIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListLaunchPlans_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} + filter_AdminService_ListWorkflows_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} ) -func request_AdminService_ListLaunchPlans_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { +func request_AdminService_ListWorkflows_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata @@ -819,20 +949,20 @@ func request_AdminService_ListLaunchPlans_0(ctx context.Context, marshaler runti if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlans_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflows_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListWorkflows(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListLaunchPlans_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} + filter_AdminService_ListWorkflows_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} ) -func request_AdminService_ListLaunchPlans_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { +func request_AdminService_ListWorkflows_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata @@ -843,6 +973,17 @@ func request_AdminService_ListLaunchPlans_1(ctx context.Context, marshaler runti _ = err ) + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -865,29 +1006,36 @@ func request_AdminService_ListLaunchPlans_1(ctx context.Context, marshaler runti return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlans_1); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflows_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListWorkflows(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_UpdateLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.LaunchPlanUpdateRequest - var metadata runtime.ServerMetadata +var ( + filter_AdminService_ListWorkflows_2 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} +) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } +func request_AdminService_ListWorkflows_2(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest + var metadata runtime.ServerMetadata var ( val string @@ -918,35 +1066,80 @@ func request_AdminService_UpdateLaunchPlan_0(ctx context.Context, marshaler runt return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["id.name"] + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflows_2); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListWorkflows(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListWorkflows_3 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} +) + +func request_AdminService_ListWorkflows_3(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - val, ok = pathParams["id.version"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - msg, err := client.UpdateLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListWorkflows_3); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListWorkflows(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_CreateExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ExecutionCreateRequest +func request_AdminService_CreateLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.LaunchPlanCreateRequest var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) @@ -957,13 +1150,13 @@ func request_AdminService_CreateExecution_0(ctx context.Context, marshaler runti return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.CreateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.CreateLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_RelaunchExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ExecutionRelaunchRequest +func request_AdminService_CreateLaunchPlan_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.LaunchPlanCreateRequest var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) @@ -974,34 +1167,35 @@ func request_AdminService_RelaunchExecution_0(ctx context.Context, marshaler run return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.RelaunchExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err + var ( + val string + ok bool + err error + _ = err + ) -} + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } -func request_AdminService_RecoverExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ExecutionRecoverRequest - var metadata runtime.ServerMetadata + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - msg, err := client.RecoverExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.CreateLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetExecution_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} + filter_AdminService_GetLaunchPlan_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3, "version": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} ) -func request_AdminService_GetExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowExecutionGetRequest +func request_AdminService_GetLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ObjectGetRequest var metadata runtime.ServerMetadata var ( @@ -1044,29 +1238,36 @@ func request_AdminService_GetExecution_0(ctx context.Context, marshaler runtime. return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } + val, ok = pathParams["id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + } + if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecution_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetLaunchPlan_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_UpdateExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ExecutionUpdateRequest - var metadata runtime.ServerMetadata +var ( + filter_AdminService_GetLaunchPlan_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4, "version": 5}, Base: []int{1, 1, 1, 2, 3, 4, 5, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 2, 3, 4, 5, 6, 7}} +) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } +func request_AdminService_GetLaunchPlan_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ObjectGetRequest + var metadata runtime.ServerMetadata var ( val string @@ -1075,6 +1276,17 @@ func request_AdminService_UpdateExecution_0(ctx context.Context, marshaler runti _ = err ) + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -1108,17 +1320,35 @@ func request_AdminService_UpdateExecution_0(ctx context.Context, marshaler runti return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - msg, err := client.UpdateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + val, ok = pathParams["id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetLaunchPlan_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetExecutionData_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} + filter_AdminService_GetActiveLaunchPlan_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} ) -func request_AdminService_GetExecutionData_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowExecutionGetDataRequest +func request_AdminService_GetActiveLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ActiveLaunchPlanRequest var metadata runtime.ServerMetadata var ( @@ -1164,21 +1394,21 @@ func request_AdminService_GetExecutionData_0(ctx context.Context, marshaler runt if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecutionData_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetActiveLaunchPlan_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetActiveLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListExecutions_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} + filter_AdminService_GetActiveLaunchPlan_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} ) -func request_AdminService_ListExecutions_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ResourceListRequest +func request_AdminService_GetActiveLaunchPlan_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ActiveLaunchPlanRequest var metadata runtime.ServerMetadata var ( @@ -1188,6 +1418,17 @@ func request_AdminService_ListExecutions_0(ctx context.Context, marshaler runtim _ = err ) + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -1210,29 +1451,36 @@ func request_AdminService_ListExecutions_0(ctx context.Context, marshaler runtim return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListExecutions_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetActiveLaunchPlan_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetActiveLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_TerminateExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ExecutionTerminateRequest - var metadata runtime.ServerMetadata +var ( + filter_AdminService_ListActiveLaunchPlans_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} +) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } +func request_AdminService_ListActiveLaunchPlans_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ActiveLaunchPlanListRequest + var metadata runtime.ServerMetadata var ( val string @@ -1241,50 +1489,46 @@ func request_AdminService_TerminateExecution_0(ctx context.Context, marshaler ru _ = err ) - val, ok = pathParams["id.project"] + val, ok = pathParams["project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + protoReq.Project, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) } - val, ok = pathParams["id.domain"] + val, ok = pathParams["domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + protoReq.Domain, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) } - val, ok = pathParams["id.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - - err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListActiveLaunchPlans_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.TerminateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListActiveLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetNodeExecution_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "project": 2, "domain": 3, "name": 4, "node_id": 5}, Base: []int{1, 6, 1, 1, 2, 2, 5, 0, 0, 4, 0, 6, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 4, 6, 7, 10, 2, 12}} + filter_AdminService_ListActiveLaunchPlans_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} ) -func request_AdminService_GetNodeExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NodeExecutionGetRequest +func request_AdminService_ListActiveLaunchPlans_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ActiveLaunchPlanListRequest var metadata runtime.ServerMetadata var ( @@ -1294,68 +1538,57 @@ func request_AdminService_GetNodeExecution_0(ctx context.Context, marshaler runt _ = err ) - val, ok = pathParams["id.execution_id.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) - } - - val, ok = pathParams["id.execution_id.domain"] + val, ok = pathParams["org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) + protoReq.Org, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) } - val, ok = pathParams["id.execution_id.name"] + val, ok = pathParams["project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) + protoReq.Project, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) } - val, ok = pathParams["id.node_id"] + val, ok = pathParams["domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) + protoReq.Domain, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNodeExecution_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListActiveLaunchPlans_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetNodeExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListActiveLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListNodeExecutions_0 = &utilities.DoubleArray{Encoding: map[string]int{"workflow_execution_id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} + filter_AdminService_ListLaunchPlanIds_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} ) -func request_AdminService_ListNodeExecutions_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NodeExecutionListRequest +func request_AdminService_ListLaunchPlanIds_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityIdentifierListRequest var metadata runtime.ServerMetadata var ( @@ -1365,57 +1598,46 @@ func request_AdminService_ListNodeExecutions_0(ctx context.Context, marshaler ru _ = err ) - val, ok = pathParams["workflow_execution_id.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.project", err) - } - - val, ok = pathParams["workflow_execution_id.domain"] + val, ok = pathParams["project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") } - err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.domain", val) + protoReq.Project, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) } - val, ok = pathParams["workflow_execution_id.name"] + val, ok = pathParams["domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.name", val) + protoReq.Domain, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNodeExecutions_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlanIds_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListNodeExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListLaunchPlanIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListNodeExecutionsForTask_0 = &utilities.DoubleArray{Encoding: map[string]int{"task_execution_id": 0, "node_execution_id": 1, "execution_id": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6, "task_id": 7, "version": 8, "retry_attempt": 9}, Base: []int{1, 20, 1, 1, 1, 4, 3, 2, 7, 5, 3, 0, 0, 0, 9, 6, 0, 15, 9, 0, 17, 12, 0, 19, 15, 0, 19, 18, 0, 20, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 5, 8, 11, 2, 15, 16, 2, 18, 19, 2, 21, 22, 2, 24, 25, 2, 27, 28, 2, 30}} + filter_AdminService_ListLaunchPlanIds_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} ) -func request_AdminService_ListNodeExecutionsForTask_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NodeExecutionForTaskListRequest +func request_AdminService_ListLaunchPlanIds_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityIdentifierListRequest var metadata runtime.ServerMetadata var ( @@ -1425,123 +1647,117 @@ func request_AdminService_ListNodeExecutionsForTask_0(ctx context.Context, marsh _ = err ) - val, ok = pathParams["task_execution_id.node_execution_id.execution_id.project"] + val, ok = pathParams["org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.project", val) + protoReq.Org, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) } - val, ok = pathParams["task_execution_id.node_execution_id.execution_id.domain"] + val, ok = pathParams["project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.domain", val) + protoReq.Project, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) } - val, ok = pathParams["task_execution_id.node_execution_id.execution_id.name"] + val, ok = pathParams["domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.name", val) + protoReq.Domain, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.name", err) - } - - val, ok = pathParams["task_execution_id.node_execution_id.node_id"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.node_id") + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.node_id", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.node_id", err) + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - - val, ok = pathParams["task_execution_id.task_id.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.project") + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlanIds_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.project", val) + msg, err := client.ListLaunchPlanIds(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.project", err) - } +} - val, ok = pathParams["task_execution_id.task_id.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.domain") - } +var ( + filter_AdminService_ListLaunchPlans_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} +) - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.domain", val) +func request_AdminService_ListLaunchPlans_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest + var metadata runtime.ServerMetadata - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.domain", err) - } + var ( + val string + ok bool + err error + _ = err + ) - val, ok = pathParams["task_execution_id.task_id.name"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["task_execution_id.task_id.version"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.version") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.version", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.version", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["task_execution_id.retry_attempt"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.retry_attempt") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.retry_attempt", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.retry_attempt", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNodeExecutionsForTask_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlans_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListNodeExecutionsForTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetNodeExecutionData_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "project": 2, "domain": 3, "name": 4, "node_id": 5}, Base: []int{1, 6, 1, 1, 2, 2, 5, 0, 0, 4, 0, 6, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 4, 6, 7, 10, 2, 12}} + filter_AdminService_ListLaunchPlans_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} ) -func request_AdminService_GetNodeExecutionData_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NodeExecutionGetDataRequest +func request_AdminService_ListLaunchPlans_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata var ( @@ -1551,91 +1767,70 @@ func request_AdminService_GetNodeExecutionData_0(ctx context.Context, marshaler _ = err ) - val, ok = pathParams["id.execution_id.project"] + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - val, ok = pathParams["id.execution_id.domain"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["id.execution_id.name"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["id.node_id"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNodeExecutionData_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlans_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetNodeExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_RegisterProject_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectRegisterRequest - var metadata runtime.ServerMetadata - - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.RegisterProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} +var ( + filter_AdminService_ListLaunchPlans_2 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} +) -func request_AdminService_UpdateProject_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.Project +func request_AdminService_ListLaunchPlans_2(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - var ( val string ok bool @@ -1643,78 +1838,102 @@ func request_AdminService_UpdateProject_0(ctx context.Context, marshaler runtime _ = err ) - val, ok = pathParams["id"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - protoReq.Id, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - msg, err := client.UpdateProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } -var ( - filter_AdminService_ListProjects_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) -func request_AdminService_ListProjects_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectListRequest - var metadata runtime.ServerMetadata + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListProjects_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlans_2); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListProjects(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_CreateWorkflowEvent_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowExecutionEventRequest +var ( + filter_AdminService_ListLaunchPlans_3 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} +) + +func request_AdminService_ListLaunchPlans_3(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - msg, err := client.CreateWorkflowEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } -} + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) -func request_AdminService_CreateNodeEvent_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NodeExecutionEventRequest - var metadata runtime.ServerMetadata + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListLaunchPlans_3); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.CreateNodeEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListLaunchPlans(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_CreateTaskEvent_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.TaskExecutionEventRequest +func request_AdminService_UpdateLaunchPlan_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.LaunchPlanUpdateRequest var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) @@ -1725,19 +1944,6 @@ func request_AdminService_CreateTaskEvent_0(ctx context.Context, marshaler runti return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.CreateTaskEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_AdminService_GetTaskExecution_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "node_execution_id": 1, "execution_id": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6, "task_id": 7, "version": 8, "retry_attempt": 9}, Base: []int{1, 20, 1, 1, 1, 4, 3, 2, 7, 5, 3, 0, 0, 0, 9, 6, 0, 15, 9, 0, 17, 12, 0, 19, 15, 0, 19, 18, 0, 20, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 5, 8, 11, 2, 15, 16, 2, 18, 19, 2, 21, 22, 2, 24, 25, 2, 27, 28, 2, 30}} -) - -func request_AdminService_GetTaskExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.TaskExecutionGetRequest - var metadata runtime.ServerMetadata - var ( val string ok bool @@ -1745,125 +1951,166 @@ func request_AdminService_GetTaskExecution_0(ctx context.Context, marshaler runt _ = err ) - val, ok = pathParams["id.node_execution_id.execution_id.project"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["id.node_execution_id.execution_id.domain"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.domain", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["id.node_execution_id.execution_id.name"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - val, ok = pathParams["id.node_execution_id.node_id"] + val, ok = pathParams["id.version"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.node_id") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.node_id", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.node_id", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) } - val, ok = pathParams["id.task_id.project"] + msg, err := client.UpdateLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_UpdateLaunchPlan_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4, "version": 5}, Base: []int{1, 1, 1, 2, 3, 4, 5, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 2, 3, 4, 5, 6, 7}} +) + +func request_AdminService_UpdateLaunchPlan_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.LaunchPlanUpdateRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - val, ok = pathParams["id.task_id.domain"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.domain", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["id.task_id.name"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["id.task_id.version"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.version") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.version", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.version", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - val, ok = pathParams["id.retry_attempt"] + val, ok = pathParams["id.version"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.retry_attempt") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.retry_attempt", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.retry_attempt", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetTaskExecution_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_UpdateLaunchPlan_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetTaskExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.UpdateLaunchPlan(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -var ( - filter_AdminService_ListTaskExecutions_0 = &utilities.DoubleArray{Encoding: map[string]int{"node_execution_id": 0, "execution_id": 1, "project": 2, "domain": 3, "name": 4, "node_id": 5}, Base: []int{1, 6, 1, 1, 2, 2, 5, 0, 0, 4, 0, 6, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 4, 6, 7, 10, 2, 12}} -) +func request_AdminService_CreateExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionCreateRequest + var metadata runtime.ServerMetadata -func request_AdminService_ListTaskExecutions_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.TaskExecutionListRequest + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionCreateRequest var metadata runtime.ServerMetadata + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + var ( val string ok bool @@ -1871,70 +2118,103 @@ func request_AdminService_ListTaskExecutions_0(ctx context.Context, marshaler ru _ = err ) - val, ok = pathParams["node_execution_id.execution_id.project"] + val, ok = pathParams["org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") } - err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.project", val) + protoReq.Org, err = runtime.String(val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) } - val, ok = pathParams["node_execution_id.execution_id.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.domain") - } + msg, err := client.CreateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err - err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.domain", val) +} - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.domain", err) - } +func request_AdminService_RelaunchExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionRelaunchRequest + var metadata runtime.ServerMetadata - val, ok = pathParams["node_execution_id.execution_id.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.name") + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.name", val) + msg, err := client.RelaunchExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.name", err) +} + +func request_AdminService_RelaunchExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionRelaunchRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - val, ok = pathParams["node_execution_id.node_id"] + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.node_id") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.node_id", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.node_id", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + msg, err := client.RelaunchExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_RecoverExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionRecoverRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTaskExecutions_0); err != nil { + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListTaskExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.RecoverExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -var ( - filter_AdminService_GetTaskExecutionData_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "node_execution_id": 1, "execution_id": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6, "task_id": 7, "version": 8, "retry_attempt": 9}, Base: []int{1, 20, 1, 1, 1, 4, 3, 2, 7, 5, 3, 0, 0, 0, 9, 6, 0, 15, 9, 0, 17, 12, 0, 19, 15, 0, 19, 18, 0, 20, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 5, 8, 11, 2, 15, 16, 2, 18, 19, 2, 21, 22, 2, 24, 25, 2, 27, 28, 2, 30}} -) - -func request_AdminService_GetTaskExecutionData_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.TaskExecutionGetDataRequest +func request_AdminService_RecoverExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionRecoverRequest var metadata runtime.ServerMetadata + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + var ( val string ok bool @@ -1942,119 +2222,155 @@ func request_AdminService_GetTaskExecutionData_0(ctx context.Context, marshaler _ = err ) - val, ok = pathParams["id.node_execution_id.execution_id.project"] + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - val, ok = pathParams["id.node_execution_id.execution_id.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.domain") - } + msg, err := client.RecoverExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.domain", val) +} - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.domain", err) - } +var ( + filter_AdminService_GetExecution_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} +) - val, ok = pathParams["id.node_execution_id.execution_id.name"] +func request_AdminService_GetExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["id.node_execution_id.node_id"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.node_id") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.node_id", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.node_id", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["id.task_id.project"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - val, ok = pathParams["id.task_id.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.domain") - } + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecution_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.domain", val) + msg, err := client.GetExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetExecution_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} +) + +func request_AdminService_GetExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - val, ok = pathParams["id.task_id.name"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["id.task_id.version"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.version") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.version", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.version", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["id.retry_attempt"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.retry_attempt") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.retry_attempt", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.retry_attempt", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetTaskExecutionData_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecution_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetTaskExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_UpdateProjectDomainAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectDomainAttributesUpdateRequest +func request_AdminService_UpdateExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionUpdateRequest var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) @@ -2072,84 +2388,46 @@ func request_AdminService_UpdateProjectDomainAttributes_0(ctx context.Context, m _ = err ) - val, ok = pathParams["attributes.project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) - } - - val, ok = pathParams["attributes.domain"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "attributes.domain", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - msg, err := client.UpdateProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_AdminService_GetProjectDomainAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} -) - -func request_AdminService_GetProjectDomainAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectDomainAttributesGetRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["project"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - protoReq.Project, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["domain"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - protoReq.Domain, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) - } - - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetProjectDomainAttributes_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - msg, err := client.GetProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.UpdateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_DeleteProjectDomainAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectDomainAttributesDeleteRequest +func request_AdminService_UpdateExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionUpdateRequest var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) @@ -2167,74 +2445,61 @@ func request_AdminService_DeleteProjectDomainAttributes_0(ctx context.Context, m _ = err ) - val, ok = pathParams["project"] + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - protoReq.Project, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - val, ok = pathParams["domain"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - protoReq.Domain, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - msg, err := client.DeleteProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } -func request_AdminService_UpdateProjectAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectAttributesUpdateRequest - var metadata runtime.ServerMetadata + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["attributes.project"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - msg, err := client.UpdateProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.UpdateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetProjectAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} + filter_AdminService_GetExecutionData_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} ) -func request_AdminService_GetProjectAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectAttributesGetRequest +func request_AdminService_GetExecutionData_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionGetDataRequest var metadata runtime.ServerMetadata var ( @@ -2244,75 +2509,58 @@ func request_AdminService_GetProjectAttributes_0(ctx context.Context, marshaler _ = err ) - val, ok = pathParams["project"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - protoReq.Project, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetProjectAttributes_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - msg, err := client.GetProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func request_AdminService_DeleteProjectAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ProjectAttributesDeleteRequest - var metadata runtime.ServerMetadata + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["project"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - protoReq.Project, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - msg, err := client.DeleteProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecutionData_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_UpdateWorkflowAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowAttributesUpdateRequest - var metadata runtime.ServerMetadata +var ( + filter_AdminService_GetExecutionData_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} +) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } +func request_AdminService_GetExecutionData_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionGetDataRequest + var metadata runtime.ServerMetadata var ( val string @@ -2321,115 +2569,69 @@ func request_AdminService_UpdateWorkflowAttributes_0(ctx context.Context, marsha _ = err ) - val, ok = pathParams["attributes.project"] + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) - } - - val, ok = pathParams["attributes.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.domain") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "attributes.domain", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.domain", err) - } - - val, ok = pathParams["attributes.workflow"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.workflow") - } - - err = runtime.PopulateFieldFromPath(&protoReq, "attributes.workflow", val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.workflow", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - msg, err := client.UpdateWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_AdminService_GetWorkflowAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1, "workflow": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} -) - -func request_AdminService_GetWorkflowAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowAttributesGetRequest - var metadata runtime.ServerMetadata - - var ( - val string - ok bool - err error - _ = err - ) - - val, ok = pathParams["project"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - protoReq.Project, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["domain"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - protoReq.Domain, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - val, ok = pathParams["workflow"] + val, ok = pathParams["id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") } - protoReq.Workflow, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetWorkflowAttributes_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecutionData_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_DeleteWorkflowAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowAttributesDeleteRequest - var metadata runtime.ServerMetadata +var ( + filter_AdminService_ListExecutions_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 1, 2, 0, 0}, Check: []int{0, 1, 2, 2, 3, 4}} +) - newReader, berr := utilities.IOReaderFactory(req.Body) - if berr != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) - } - if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } +func request_AdminService_ListExecutions_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest + var metadata runtime.ServerMetadata var ( val string @@ -2438,156 +2640,119 @@ func request_AdminService_DeleteWorkflowAttributes_0(ctx context.Context, marsha _ = err ) - val, ok = pathParams["project"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") - } - - protoReq.Project, err = runtime.String(val) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) - } - - val, ok = pathParams["domain"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - protoReq.Domain, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["workflow"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - protoReq.Workflow, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } - msg, err := client.DeleteWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -var ( - filter_AdminService_ListMatchableAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} -) - -func request_AdminService_ListMatchableAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ListMatchableAttributesRequest - var metadata runtime.ServerMetadata - if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListMatchableAttributes_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListExecutions_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListMatchableAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListNamedEntities_0 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} + filter_AdminService_ListExecutions_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} ) -func request_AdminService_ListNamedEntities_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NamedEntityListRequest +func request_AdminService_ListExecutions_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ResourceListRequest var metadata runtime.ServerMetadata var ( val string - e int32 ok bool err error _ = err ) - val, ok = pathParams["resource_type"] + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - e, err = runtime.Enum(val, core.ResourceType_value) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - protoReq.ResourceType = core.ResourceType(e) - - val, ok = pathParams["project"] + val, ok = pathParams["id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") } - protoReq.Project, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) } - val, ok = pathParams["domain"] + val, ok = pathParams["id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") } - protoReq.Domain, err = runtime.String(val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNamedEntities_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListExecutions_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListNamedEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.ListExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -var ( - filter_AdminService_GetNamedEntity_0 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "id": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 1, 3, 3, 3, 2, 4, 5, 6}} -) - -func request_AdminService_GetNamedEntity_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NamedEntityGetRequest +func request_AdminService_TerminateExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionTerminateRequest var metadata runtime.ServerMetadata + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + var ( val string - e int32 ok bool err error _ = err ) - val, ok = pathParams["resource_type"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") - } - - e, err = runtime.Enum(val, core.ResourceType_value) - - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) - } - - protoReq.ResourceType = core.ResourceType(e) - val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -2621,20 +2786,13 @@ func request_AdminService_GetNamedEntity_0(ctx context.Context, marshaler runtim return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNamedEntity_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - - msg, err := client.GetNamedEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.TerminateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } -func request_AdminService_UpdateNamedEntity_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.NamedEntityUpdateRequest +func request_AdminService_TerminateExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ExecutionTerminateRequest var metadata runtime.ServerMetadata newReader, berr := utilities.IOReaderFactory(req.Body) @@ -2647,25 +2805,22 @@ func request_AdminService_UpdateNamedEntity_0(ctx context.Context, marshaler run var ( val string - e int32 ok bool err error _ = err ) - val, ok = pathParams["resource_type"] + val, ok = pathParams["id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") } - e, err = runtime.Enum(val, core.ResourceType_value) + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) } - protoReq.ResourceType = core.ResourceType(e) - val, ok = pathParams["id.project"] if !ok { return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") @@ -2699,248 +2854,323 @@ func request_AdminService_UpdateNamedEntity_0(ctx context.Context, marshaler run return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) } - msg, err := client.UpdateNamedEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func request_AdminService_GetVersion_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.GetVersionRequest - var metadata runtime.ServerMetadata - - msg, err := client.GetVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.TerminateExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetDescriptionEntity_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "resource_type": 1, "project": 2, "domain": 3, "name": 4, "version": 5}, Base: []int{1, 1, 1, 2, 3, 4, 5, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 2, 3, 4, 5, 6, 7}} + filter_AdminService_GetNodeExecution_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "project": 2, "domain": 3, "name": 4, "node_id": 5}, Base: []int{1, 6, 1, 1, 2, 2, 5, 0, 0, 4, 0, 6, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 4, 6, 7, 10, 2, 12}} ) -func request_AdminService_GetDescriptionEntity_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.ObjectGetRequest +func request_AdminService_GetNodeExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionGetRequest var metadata runtime.ServerMetadata var ( val string - e int32 ok bool err error _ = err ) - val, ok = pathParams["id.resource_type"] + val, ok = pathParams["id.execution_id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.resource_type") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.resource_type", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.resource_type", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) } - protoReq.Id.ResourceType = core.ResourceType(e) - - val, ok = pathParams["id.project"] + val, ok = pathParams["id.execution_id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) } - val, ok = pathParams["id.domain"] + val, ok = pathParams["id.execution_id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) } - val, ok = pathParams["id.name"] + val, ok = pathParams["id.node_id"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) } - val, ok = pathParams["id.version"] + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNodeExecution_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetNodeExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetNodeExecution_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "org": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6}, Base: []int{1, 8, 1, 1, 2, 2, 3, 3, 0, 0, 0, 7, 6, 0, 8, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 7, 4, 6, 8, 2, 12, 13, 2, 15}} +) + +func request_AdminService_GetNodeExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.execution_id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.org") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.org", err) + } + + val, ok = pathParams["id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) + } + + val, ok = pathParams["id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) + } + + val, ok = pathParams["id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) + } + + val, ok = pathParams["id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetDescriptionEntity_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNodeExecution_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.GetDescriptionEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetNodeExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListDescriptionEntities_0 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "id": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 1, 3, 3, 3, 2, 4, 5, 6}} + filter_AdminService_GetDynamicNodeWorkflow_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "project": 2, "domain": 3, "name": 4, "node_id": 5}, Base: []int{1, 6, 1, 1, 2, 2, 5, 0, 0, 4, 0, 6, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 4, 6, 7, 10, 2, 12}} ) -func request_AdminService_ListDescriptionEntities_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.DescriptionEntityListRequest +func request_AdminService_GetDynamicNodeWorkflow_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.GetDynamicNodeWorkflowRequest var metadata runtime.ServerMetadata var ( val string - e int32 ok bool err error _ = err ) - val, ok = pathParams["resource_type"] + val, ok = pathParams["id.execution_id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") } - e, err = runtime.Enum(val, core.ResourceType_value) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) } - protoReq.ResourceType = core.ResourceType(e) - - val, ok = pathParams["id.project"] + val, ok = pathParams["id.execution_id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) } - val, ok = pathParams["id.domain"] + val, ok = pathParams["id.execution_id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) } - val, ok = pathParams["id.name"] + val, ok = pathParams["id.node_id"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListDescriptionEntities_0); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetDynamicNodeWorkflow_0); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListDescriptionEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetDynamicNodeWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_ListDescriptionEntities_1 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "id": 1, "project": 2, "domain": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 3, 3, 2, 4, 5}} + filter_AdminService_GetDynamicNodeWorkflow_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "org": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6}, Base: []int{1, 8, 1, 1, 2, 2, 3, 3, 0, 0, 0, 7, 6, 0, 8, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 7, 4, 6, 8, 2, 12, 13, 2, 15}} ) -func request_AdminService_ListDescriptionEntities_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.DescriptionEntityListRequest +func request_AdminService_GetDynamicNodeWorkflow_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.GetDynamicNodeWorkflowRequest var metadata runtime.ServerMetadata var ( val string - e int32 ok bool err error _ = err ) - val, ok = pathParams["resource_type"] + val, ok = pathParams["id.execution_id.org"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.org") } - e, err = runtime.Enum(val, core.ResourceType_value) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.org", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.org", err) } - protoReq.ResourceType = core.ResourceType(e) + val, ok = pathParams["id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") + } - val, ok = pathParams["id.project"] + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) + } + + val, ok = pathParams["id.execution_id.domain"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) } - val, ok = pathParams["id.domain"] + val, ok = pathParams["id.execution_id.name"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) + } + + val, ok = pathParams["id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) } if err := req.ParseForm(); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListDescriptionEntities_1); err != nil { + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetDynamicNodeWorkflow_1); err != nil { return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } - msg, err := client.ListDescriptionEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + msg, err := client.GetDynamicNodeWorkflow(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err } var ( - filter_AdminService_GetExecutionMetrics_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} + filter_AdminService_ListNodeExecutions_0 = &utilities.DoubleArray{Encoding: map[string]int{"workflow_execution_id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} ) -func request_AdminService_GetExecutionMetrics_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq admin.WorkflowExecutionGetMetricsRequest +func request_AdminService_ListNodeExecutions_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionListRequest var metadata runtime.ServerMetadata var ( @@ -2950,90 +3180,4832 @@ func request_AdminService_GetExecutionMetrics_0(ctx context.Context, marshaler r _ = err ) - val, ok = pathParams["id.project"] + val, ok = pathParams["workflow_execution_id.project"] if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.project", err) + } + + val, ok = pathParams["workflow_execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.domain", err) + } + + val, ok = pathParams["workflow_execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNodeExecutions_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNodeExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListNodeExecutions_1 = &utilities.DoubleArray{Encoding: map[string]int{"workflow_execution_id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} +) + +func request_AdminService_ListNodeExecutions_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["workflow_execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.org", err) + } + + val, ok = pathParams["workflow_execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.project", err) + } + + val, ok = pathParams["workflow_execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.domain") } - err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.domain", err) + } + + val, ok = pathParams["workflow_execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNodeExecutions_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNodeExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListNodeExecutionsForTask_0 = &utilities.DoubleArray{Encoding: map[string]int{"task_execution_id": 0, "node_execution_id": 1, "execution_id": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6, "task_id": 7, "version": 8, "retry_attempt": 9}, Base: []int{1, 20, 1, 1, 1, 4, 3, 2, 7, 5, 3, 0, 0, 0, 9, 6, 0, 15, 9, 0, 17, 12, 0, 19, 15, 0, 19, 18, 0, 20, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 5, 8, 11, 2, 15, 16, 2, 18, 19, 2, 21, 22, 2, 24, 25, 2, 27, 28, 2, 30}} +) + +func request_AdminService_ListNodeExecutionsForTask_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionForTaskListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["task_execution_id.node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["task_execution_id.node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["task_execution_id.node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["task_execution_id.node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.node_id", err) + } + + val, ok = pathParams["task_execution_id.task_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.project", err) + } + + val, ok = pathParams["task_execution_id.task_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.domain", err) + } + + val, ok = pathParams["task_execution_id.task_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.name", err) + } + + val, ok = pathParams["task_execution_id.task_id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.version", err) + } + + val, ok = pathParams["task_execution_id.retry_attempt"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.retry_attempt") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.retry_attempt", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.retry_attempt", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNodeExecutionsForTask_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNodeExecutionsForTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListNodeExecutionsForTask_1 = &utilities.DoubleArray{Encoding: map[string]int{"task_execution_id": 0, "node_execution_id": 1, "execution_id": 2, "org": 3, "project": 4, "domain": 5, "name": 6, "node_id": 7, "task_id": 8, "version": 9, "retry_attempt": 10}, Base: []int{1, 23, 1, 1, 1, 4, 3, 2, 7, 5, 3, 13, 0, 0, 0, 10, 6, 0, 12, 9, 0, 18, 12, 0, 20, 15, 0, 22, 18, 0, 22, 21, 0, 23, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 2, 5, 8, 11, 12, 16, 17, 2, 19, 20, 2, 22, 23, 2, 25, 26, 2, 28, 29, 2, 31, 32, 2, 34}} +) + +func request_AdminService_ListNodeExecutionsForTask_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionForTaskListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["task_execution_id.node_execution_id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.org", err) + } + + val, ok = pathParams["task_execution_id.node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["task_execution_id.node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["task_execution_id.node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["task_execution_id.node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.node_execution_id.node_id", err) + } + + val, ok = pathParams["task_execution_id.task_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.project", err) + } + + val, ok = pathParams["task_execution_id.task_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.domain", err) + } + + val, ok = pathParams["task_execution_id.task_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.name", err) + } + + val, ok = pathParams["task_execution_id.task_id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.task_id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.task_id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.task_id.version", err) + } + + val, ok = pathParams["task_execution_id.retry_attempt"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "task_execution_id.retry_attempt") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "task_execution_id.retry_attempt", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "task_execution_id.retry_attempt", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNodeExecutionsForTask_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNodeExecutionsForTask(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetNodeExecutionData_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "project": 2, "domain": 3, "name": 4, "node_id": 5}, Base: []int{1, 6, 1, 1, 2, 2, 5, 0, 0, 4, 0, 6, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 4, 6, 7, 10, 2, 12}} +) + +func request_AdminService_GetNodeExecutionData_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionGetDataRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) + } + + val, ok = pathParams["id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) + } + + val, ok = pathParams["id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) + } + + val, ok = pathParams["id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNodeExecutionData_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetNodeExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetNodeExecutionData_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "execution_id": 1, "org": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6}, Base: []int{1, 8, 1, 1, 2, 2, 3, 3, 0, 0, 0, 7, 6, 0, 8, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 7, 4, 6, 8, 2, 12, 13, 2, 15}} +) + +func request_AdminService_GetNodeExecutionData_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionGetDataRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.org", err) + } + + val, ok = pathParams["id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.project", err) + } + + val, ok = pathParams["id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.domain", err) + } + + val, ok = pathParams["id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.name", err) + } + + val, ok = pathParams["id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_id", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNodeExecutionData_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetNodeExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_RegisterProject_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectRegisterRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.RegisterProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_RegisterProject_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectRegisterRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["project.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "project.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project.org", err) + } + + msg, err := client.RegisterProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateProject_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.Project + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + + protoReq.Id, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + + msg, err := client.UpdateProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateProject_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.Project + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + + protoReq.Id, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + + msg, err := client.UpdateProject(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListProjects_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_AdminService_ListProjects_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectListRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListProjects_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListProjects(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListProjects_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + +func request_AdminService_ListProjects_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListProjects_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListProjects(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateWorkflowEvent_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateWorkflowEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateWorkflowEvent_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["event.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "event.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "event.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "event.execution_id.org", err) + } + + msg, err := client.CreateWorkflowEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateNodeEvent_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateNodeEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateNodeEvent_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NodeExecutionEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["event.id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "event.id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "event.id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "event.id.execution_id.org", err) + } + + msg, err := client.CreateNodeEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateTaskEvent_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreateTaskEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_CreateTaskEvent_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionEventRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["event.parent_node_execution_id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "event.parent_node_execution_id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "event.parent_node_execution_id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "event.parent_node_execution_id.execution_id.org", err) + } + + msg, err := client.CreateTaskEvent(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetTaskExecution_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "node_execution_id": 1, "execution_id": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6, "task_id": 7, "version": 8, "retry_attempt": 9}, Base: []int{1, 20, 1, 1, 1, 4, 3, 2, 7, 5, 3, 0, 0, 0, 9, 6, 0, 15, 9, 0, 17, 12, 0, 19, 15, 0, 19, 18, 0, 20, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 5, 8, 11, 2, 15, 16, 2, 18, 19, 2, 21, 22, 2, 24, 25, 2, 27, 28, 2, 30}} +) + +func request_AdminService_GetTaskExecution_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["id.node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.node_id", err) + } + + val, ok = pathParams["id.task_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.project", err) + } + + val, ok = pathParams["id.task_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.domain", err) + } + + val, ok = pathParams["id.task_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.name", err) + } + + val, ok = pathParams["id.task_id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.version", err) + } + + val, ok = pathParams["id.retry_attempt"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.retry_attempt") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.retry_attempt", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.retry_attempt", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetTaskExecution_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetTaskExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetTaskExecution_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "node_execution_id": 1, "execution_id": 2, "org": 3, "project": 4, "domain": 5, "name": 6, "node_id": 7, "task_id": 8, "version": 9, "retry_attempt": 10}, Base: []int{1, 23, 1, 1, 1, 4, 3, 2, 7, 5, 3, 13, 0, 0, 0, 10, 6, 0, 12, 9, 0, 18, 12, 0, 20, 15, 0, 22, 18, 0, 22, 21, 0, 23, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 2, 5, 8, 11, 12, 16, 17, 2, 19, 20, 2, 22, 23, 2, 25, 26, 2, 28, 29, 2, 31, 32, 2, 34}} +) + +func request_AdminService_GetTaskExecution_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.node_execution_id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.org", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["id.node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.node_id", err) + } + + val, ok = pathParams["id.task_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.project", err) + } + + val, ok = pathParams["id.task_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.domain", err) + } + + val, ok = pathParams["id.task_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.name", err) + } + + val, ok = pathParams["id.task_id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.version", err) + } + + val, ok = pathParams["id.retry_attempt"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.retry_attempt") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.retry_attempt", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.retry_attempt", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetTaskExecution_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetTaskExecution(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListTaskExecutions_0 = &utilities.DoubleArray{Encoding: map[string]int{"node_execution_id": 0, "execution_id": 1, "project": 2, "domain": 3, "name": 4, "node_id": 5}, Base: []int{1, 6, 1, 1, 2, 2, 5, 0, 0, 4, 0, 6, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 4, 6, 7, 10, 2, 12}} +) + +func request_AdminService_ListTaskExecutions_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.node_id", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTaskExecutions_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListTaskExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListTaskExecutions_1 = &utilities.DoubleArray{Encoding: map[string]int{"node_execution_id": 0, "execution_id": 1, "org": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6}, Base: []int{1, 8, 1, 1, 2, 2, 3, 3, 0, 0, 0, 7, 6, 0, 8, 0}, Check: []int{0, 1, 2, 3, 2, 5, 2, 7, 4, 6, 8, 2, 12, 13, 2, 15}} +) + +func request_AdminService_ListTaskExecutions_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["node_execution_id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.org", err) + } + + val, ok = pathParams["node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "node_execution_id.node_id", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListTaskExecutions_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListTaskExecutions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetTaskExecutionData_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "node_execution_id": 1, "execution_id": 2, "project": 3, "domain": 4, "name": 5, "node_id": 6, "task_id": 7, "version": 8, "retry_attempt": 9}, Base: []int{1, 20, 1, 1, 1, 4, 3, 2, 7, 5, 3, 0, 0, 0, 9, 6, 0, 15, 9, 0, 17, 12, 0, 19, 15, 0, 19, 18, 0, 20, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 5, 8, 11, 2, 15, 16, 2, 18, 19, 2, 21, 22, 2, 24, 25, 2, 27, 28, 2, 30}} +) + +func request_AdminService_GetTaskExecutionData_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionGetDataRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["id.node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.node_id", err) + } + + val, ok = pathParams["id.task_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.project", err) + } + + val, ok = pathParams["id.task_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.domain", err) + } + + val, ok = pathParams["id.task_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.name", err) + } + + val, ok = pathParams["id.task_id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.version", err) + } + + val, ok = pathParams["id.retry_attempt"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.retry_attempt") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.retry_attempt", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.retry_attempt", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetTaskExecutionData_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetTaskExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetTaskExecutionData_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "node_execution_id": 1, "execution_id": 2, "org": 3, "project": 4, "domain": 5, "name": 6, "node_id": 7, "task_id": 8, "version": 9, "retry_attempt": 10}, Base: []int{1, 23, 1, 1, 1, 4, 3, 2, 7, 5, 3, 13, 0, 0, 0, 10, 6, 0, 12, 9, 0, 18, 12, 0, 20, 15, 0, 22, 18, 0, 22, 21, 0, 23, 0}, Check: []int{0, 1, 2, 3, 4, 2, 6, 7, 2, 9, 10, 2, 5, 8, 11, 12, 16, 17, 2, 19, 20, 2, 22, 23, 2, 25, 26, 2, 28, 29, 2, 31, 32, 2, 34}} +) + +func request_AdminService_GetTaskExecutionData_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.TaskExecutionGetDataRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.node_execution_id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.org", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.project", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.domain", err) + } + + val, ok = pathParams["id.node_execution_id.execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.execution_id.name", err) + } + + val, ok = pathParams["id.node_execution_id.node_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.node_execution_id.node_id") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.node_execution_id.node_id", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.node_execution_id.node_id", err) + } + + val, ok = pathParams["id.task_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.project", err) + } + + val, ok = pathParams["id.task_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.domain", err) + } + + val, ok = pathParams["id.task_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.name", err) + } + + val, ok = pathParams["id.task_id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.task_id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.task_id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.task_id.version", err) + } + + val, ok = pathParams["id.retry_attempt"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.retry_attempt") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.retry_attempt", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.retry_attempt", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetTaskExecutionData_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetTaskExecutionData(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateProjectDomainAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectDomainAttributesUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["attributes.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) + } + + val, ok = pathParams["attributes.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.domain", err) + } + + msg, err := client.UpdateProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateProjectDomainAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectDomainAttributesUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["attributes.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.org", err) + } + + val, ok = pathParams["attributes.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) + } + + val, ok = pathParams["attributes.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.domain", err) + } + + msg, err := client.UpdateProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetProjectDomainAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} +) + +func request_AdminService_GetProjectDomainAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectDomainAttributesGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetProjectDomainAttributes_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetProjectDomainAttributes_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} +) + +func request_AdminService_GetProjectDomainAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectDomainAttributesGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetProjectDomainAttributes_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_DeleteProjectDomainAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectDomainAttributesDeleteRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + msg, err := client.DeleteProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_DeleteProjectDomainAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectDomainAttributesDeleteRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + msg, err := client.DeleteProjectDomainAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateProjectAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectAttributesUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["attributes.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) + } + + msg, err := client.UpdateProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateProjectAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectAttributesUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["attributes.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.org", err) + } + + val, ok = pathParams["attributes.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) + } + + msg, err := client.UpdateProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetProjectAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + +func request_AdminService_GetProjectAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectAttributesGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetProjectAttributes_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetProjectAttributes_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "project": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}} +) + +func request_AdminService_GetProjectAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectAttributesGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetProjectAttributes_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_DeleteProjectAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectAttributesDeleteRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + msg, err := client.DeleteProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_DeleteProjectAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ProjectAttributesDeleteRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + msg, err := client.DeleteProjectAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateWorkflowAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowAttributesUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["attributes.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) + } + + val, ok = pathParams["attributes.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.domain", err) + } + + val, ok = pathParams["attributes.workflow"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.workflow") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.workflow", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.workflow", err) + } + + msg, err := client.UpdateWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateWorkflowAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowAttributesUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["attributes.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.org", err) + } + + val, ok = pathParams["attributes.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.project", err) + } + + val, ok = pathParams["attributes.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.domain", err) + } + + val, ok = pathParams["attributes.workflow"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "attributes.workflow") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "attributes.workflow", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "attributes.workflow", err) + } + + msg, err := client.UpdateWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetWorkflowAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{"project": 0, "domain": 1, "workflow": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} +) + +func request_AdminService_GetWorkflowAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowAttributesGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + val, ok = pathParams["workflow"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow") + } + + protoReq.Workflow, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetWorkflowAttributes_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetWorkflowAttributes_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "project": 1, "domain": 2, "workflow": 3}, Base: []int{1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 1, 1, 1, 2, 3, 4, 5}} +) + +func request_AdminService_GetWorkflowAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowAttributesGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + val, ok = pathParams["workflow"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow") + } + + protoReq.Workflow, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetWorkflowAttributes_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_DeleteWorkflowAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowAttributesDeleteRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + val, ok = pathParams["workflow"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow") + } + + protoReq.Workflow, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow", err) + } + + msg, err := client.DeleteWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_DeleteWorkflowAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowAttributesDeleteRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + val, ok = pathParams["workflow"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow") + } + + protoReq.Workflow, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow", err) + } + + msg, err := client.DeleteWorkflowAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListMatchableAttributes_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_AdminService_ListMatchableAttributes_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ListMatchableAttributesRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListMatchableAttributes_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListMatchableAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListMatchableAttributes_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0}, Base: []int{1, 1, 0}, Check: []int{0, 1, 2}} +) + +func request_AdminService_ListMatchableAttributes_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ListMatchableAttributesRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListMatchableAttributes_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListMatchableAttributes(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListNamedEntities_0 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "project": 1, "domain": 2}, Base: []int{1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 1, 2, 3, 4}} +) + +func request_AdminService_ListNamedEntities_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityListRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNamedEntities_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNamedEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListNamedEntities_1 = &utilities.DoubleArray{Encoding: map[string]int{"org": 0, "resource_type": 1, "project": 2, "domain": 3}, Base: []int{1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 1, 1, 1, 2, 3, 4, 5}} +) + +func request_AdminService_ListNamedEntities_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityListRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "org") + } + + protoReq.Org, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "org", err) + } + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "project") + } + + protoReq.Project, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "project", err) + } + + val, ok = pathParams["domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "domain") + } + + protoReq.Domain, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListNamedEntities_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListNamedEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetNamedEntity_0 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "id": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 1, 3, 3, 3, 2, 4, 5, 6}} +) + +func request_AdminService_GetNamedEntity_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNamedEntity_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetNamedEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetNamedEntity_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "resource_type": 2, "project": 3, "domain": 4, "name": 5}, Base: []int{1, 1, 1, 5, 2, 3, 4, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 1, 2, 2, 2, 3, 5, 6, 7, 4}} +) + +func request_AdminService_GetNamedEntity_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetNamedEntity_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetNamedEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateNamedEntity_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + msg, err := client.UpdateNamedEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_UpdateNamedEntity_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.NamedEntityUpdateRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + msg, err := client.UpdateNamedEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_AdminService_GetVersion_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.GetVersionRequest + var metadata runtime.ServerMetadata + + msg, err := client.GetVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetDescriptionEntity_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "resource_type": 1, "project": 2, "domain": 3, "name": 4, "version": 5}, Base: []int{1, 1, 1, 2, 3, 4, 5, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 2, 3, 4, 5, 6, 7}} +) + +func request_AdminService_GetDescriptionEntity_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ObjectGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.resource_type") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.resource_type", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.resource_type", err) + } + + protoReq.Id.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + val, ok = pathParams["id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetDescriptionEntity_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetDescriptionEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetDescriptionEntity_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "resource_type": 2, "project": 3, "domain": 4, "name": 5, "version": 6}, Base: []int{1, 1, 1, 2, 3, 4, 5, 6, 0, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 2, 2, 3, 4, 5, 6, 7, 8}} +) + +func request_AdminService_GetDescriptionEntity_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.ObjectGetRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + val, ok = pathParams["id.resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.resource_type") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.resource_type", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.resource_type", err) + } + + protoReq.Id.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + val, ok = pathParams["id.version"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.version") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.version", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.version", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetDescriptionEntity_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetDescriptionEntity(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListDescriptionEntities_0 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "id": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 1, 3, 3, 3, 2, 4, 5, 6}} +) + +func request_AdminService_ListDescriptionEntities_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.DescriptionEntityListRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListDescriptionEntities_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListDescriptionEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListDescriptionEntities_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "resource_type": 2, "project": 3, "domain": 4, "name": 5}, Base: []int{1, 1, 1, 5, 2, 3, 4, 0, 0, 0, 0, 0}, Check: []int{0, 1, 2, 1, 2, 2, 2, 3, 5, 6, 7, 4}} +) + +func request_AdminService_ListDescriptionEntities_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.DescriptionEntityListRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListDescriptionEntities_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListDescriptionEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListDescriptionEntities_2 = &utilities.DoubleArray{Encoding: map[string]int{"resource_type": 0, "id": 1, "project": 2, "domain": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 1, 3, 3, 2, 4, 5}} +) + +func request_AdminService_ListDescriptionEntities_2(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.DescriptionEntityListRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListDescriptionEntities_2); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListDescriptionEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_ListDescriptionEntities_3 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "resource_type": 2, "project": 3, "domain": 4}, Base: []int{1, 1, 1, 4, 2, 3, 0, 0, 0, 0}, Check: []int{0, 1, 2, 1, 2, 2, 3, 5, 6, 4}} +) + +func request_AdminService_ListDescriptionEntities_3(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.DescriptionEntityListRequest + var metadata runtime.ServerMetadata + + var ( + val string + e int32 + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + val, ok = pathParams["resource_type"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "resource_type") + } + + e, err = runtime.Enum(val, core.ResourceType_value) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "resource_type", err) + } + + protoReq.ResourceType = core.ResourceType(e) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_ListDescriptionEntities_3); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListDescriptionEntities(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetExecutionMetrics_0 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "project": 1, "domain": 2, "name": 3}, Base: []int{1, 1, 1, 2, 3, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 3, 4, 5}} +) + +func request_AdminService_GetExecutionMetrics_0(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionGetMetricsRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecutionMetrics_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetExecutionMetrics(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_AdminService_GetExecutionMetrics_1 = &utilities.DoubleArray{Encoding: map[string]int{"id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} +) + +func request_AdminService_GetExecutionMetrics_1(ctx context.Context, marshaler runtime.Marshaler, client AdminServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.WorkflowExecutionGetMetricsRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.org", err) + } + + val, ok = pathParams["id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) + } + + val, ok = pathParams["id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) + } + + val, ok = pathParams["id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecutionMetrics_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.GetExecutionMetrics(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +// RegisterAdminServiceHandlerFromEndpoint is same as RegisterAdminServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterAdminServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterAdminServiceHandler(ctx, mux, conn) +} + +// RegisterAdminServiceHandler registers the http handlers for service AdminService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterAdminServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterAdminServiceHandlerClient(ctx, mux, NewAdminServiceClient(conn)) +} + +// RegisterAdminServiceHandlerClient registers the http handlers for service AdminService +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "AdminServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "AdminServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "AdminServiceClient" to call the correct interceptors. +func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client AdminServiceClient) error { + + mux.Handle("POST", pattern_AdminService_CreateTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateTask_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateTask_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_CreateTask_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateTask_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateTask_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetTask_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetTask_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetTask_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetTask_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetTask_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListTaskIds_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListTaskIds_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListTaskIds_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListTaskIds_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListTaskIds_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListTaskIds_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListTasks_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListTasks_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListTasks_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListTasks_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListTasks_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListTasks_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListTasks_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListTasks_2(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListTasks_2(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListTasks_3, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListTasks_3(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListTasks_3(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_CreateWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateWorkflow_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_CreateWorkflow_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateWorkflow_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateWorkflow_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetWorkflow_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetWorkflow_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetWorkflow_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetWorkflow_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListWorkflowIds_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListWorkflowIds_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListWorkflowIds_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListWorkflowIds_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListWorkflowIds_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListWorkflowIds_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListWorkflows_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListWorkflows_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListWorkflows_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListWorkflows_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListWorkflows_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListWorkflows_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListWorkflows_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListWorkflows_2(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListWorkflows_2(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListWorkflows_3, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListWorkflows_3(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListWorkflows_3(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_CreateLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_CreateLaunchPlan_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateLaunchPlan_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateLaunchPlan_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetLaunchPlan_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetLaunchPlan_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetLaunchPlan_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetActiveLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetActiveLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetActiveLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetActiveLaunchPlan_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetActiveLaunchPlan_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetActiveLaunchPlan_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListActiveLaunchPlans_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListActiveLaunchPlans_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListActiveLaunchPlans_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListActiveLaunchPlans_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListActiveLaunchPlans_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListActiveLaunchPlans_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListLaunchPlanIds_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListLaunchPlanIds_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListLaunchPlanIds_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListLaunchPlanIds_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListLaunchPlanIds_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListLaunchPlanIds_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListLaunchPlans_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListLaunchPlans_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListLaunchPlans_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListLaunchPlans_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListLaunchPlans_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListLaunchPlans_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListLaunchPlans_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListLaunchPlans_2(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListLaunchPlans_2(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListLaunchPlans_3, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListLaunchPlans_3(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListLaunchPlans_3(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PUT", pattern_AdminService_UpdateLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_UpdateLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_UpdateLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PUT", pattern_AdminService_UpdateLaunchPlan_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_UpdateLaunchPlan_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_UpdateLaunchPlan_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_CreateExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateExecution_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PUT", pattern_AdminService_CreateExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_CreateExecution_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_CreateExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_RelaunchExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_RelaunchExecution_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_RelaunchExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_RelaunchExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_RelaunchExecution_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_RelaunchExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_RecoverExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_RecoverExecution_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_RecoverExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_AdminService_RecoverExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_RecoverExecution_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_RecoverExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetExecution_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetExecution_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PUT", pattern_AdminService_UpdateExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_UpdateExecution_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_UpdateExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("PUT", pattern_AdminService_UpdateExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_UpdateExecution_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_UpdateExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetExecutionData_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetExecutionData_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetExecutionData_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetExecutionData_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetExecutionData_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetExecutionData_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListExecutions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListExecutions_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListExecutions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListExecutions_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListExecutions_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_ListExecutions_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("DELETE", pattern_AdminService_TerminateExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_TerminateExecution_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_TerminateExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("DELETE", pattern_AdminService_TerminateExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_TerminateExecution_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_TerminateExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_GetNodeExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetNodeExecution_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AdminService_GetNodeExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.project", err) - } + }) - val, ok = pathParams["id.domain"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.domain") - } + mux.Handle("GET", pattern_AdminService_GetNodeExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetNodeExecution_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } - err = runtime.PopulateFieldFromPath(&protoReq, "id.domain", val) + forward_AdminService_GetNodeExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.domain", err) - } + }) - val, ok = pathParams["id.name"] - if !ok { - return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.name") - } + mux.Handle("GET", pattern_AdminService_GetDynamicNodeWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetDynamicNodeWorkflow_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } - err = runtime.PopulateFieldFromPath(&protoReq, "id.name", val) + forward_AdminService_GetDynamicNodeWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - if err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.name", err) - } + }) - if err := req.ParseForm(); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } - if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_AdminService_GetExecutionMetrics_0); err != nil { - return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) - } + mux.Handle("GET", pattern_AdminService_GetDynamicNodeWorkflow_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_GetDynamicNodeWorkflow_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } - msg, err := client.GetExecutionMetrics(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err + forward_AdminService_GetDynamicNodeWorkflow_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) -} + }) -// RegisterAdminServiceHandlerFromEndpoint is same as RegisterAdminServiceHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterAdminServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { + mux.Handle("GET", pattern_AdminService_ListNodeExecutions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListNodeExecutions_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - return RegisterAdminServiceHandler(ctx, mux, conn) -} + forward_AdminService_ListNodeExecutions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) -// RegisterAdminServiceHandler registers the http handlers for service AdminService to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterAdminServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterAdminServiceHandlerClient(ctx, mux, NewAdminServiceClient(conn)) -} + }) -// RegisterAdminServiceHandlerClient registers the http handlers for service AdminService -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "AdminServiceClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "AdminServiceClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "AdminServiceClient" to call the correct interceptors. -func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client AdminServiceClient) error { + mux.Handle("GET", pattern_AdminService_ListNodeExecutions_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AdminService_ListNodeExecutions_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } - mux.Handle("POST", pattern_AdminService_CreateTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + forward_AdminService_ListNodeExecutions_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_AdminService_ListNodeExecutionsForTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3042,18 +8014,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_CreateTask_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListNodeExecutionsForTask_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_CreateTask_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListNodeExecutionsForTask_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListNodeExecutionsForTask_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3062,18 +8034,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetTask_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListNodeExecutionsForTask_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetTask_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListNodeExecutionsForTask_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListTaskIds_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetNodeExecutionData_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3082,18 +8054,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListTaskIds_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetNodeExecutionData_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListTaskIds_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetNodeExecutionData_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListTasks_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetNodeExecutionData_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3102,18 +8074,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListTasks_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetNodeExecutionData_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListTasks_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetNodeExecutionData_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListTasks_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_RegisterProject_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3122,18 +8094,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListTasks_1(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_RegisterProject_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListTasks_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_RegisterProject_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_CreateWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_RegisterProject_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3142,18 +8114,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_CreateWorkflow_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_RegisterProject_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_CreateWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_RegisterProject_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetWorkflow_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateProject_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3162,18 +8134,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetWorkflow_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateProject_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetWorkflow_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateProject_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListWorkflowIds_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateProject_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3182,18 +8154,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListWorkflowIds_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateProject_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListWorkflowIds_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateProject_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListWorkflows_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListProjects_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3202,18 +8174,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListWorkflows_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListProjects_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListWorkflows_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListProjects_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListWorkflows_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListProjects_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3222,18 +8194,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListWorkflows_1(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListProjects_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListWorkflows_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListProjects_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_CreateLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_CreateWorkflowEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3242,18 +8214,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_CreateLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_CreateWorkflowEvent_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_CreateLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_CreateWorkflowEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_CreateWorkflowEvent_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3262,18 +8234,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_CreateWorkflowEvent_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_CreateWorkflowEvent_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetActiveLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_CreateNodeEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3282,18 +8254,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetActiveLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_CreateNodeEvent_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetActiveLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_CreateNodeEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListActiveLaunchPlans_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_CreateNodeEvent_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3302,18 +8274,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListActiveLaunchPlans_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_CreateNodeEvent_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListActiveLaunchPlans_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_CreateNodeEvent_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListLaunchPlanIds_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_CreateTaskEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3322,18 +8294,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListLaunchPlanIds_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_CreateTaskEvent_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListLaunchPlanIds_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_CreateTaskEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListLaunchPlans_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("POST", pattern_AdminService_CreateTaskEvent_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3342,18 +8314,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListLaunchPlans_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_CreateTaskEvent_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListLaunchPlans_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_CreateTaskEvent_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListLaunchPlans_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetTaskExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3362,18 +8334,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListLaunchPlans_1(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetTaskExecution_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListLaunchPlans_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetTaskExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("PUT", pattern_AdminService_UpdateLaunchPlan_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetTaskExecution_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3382,18 +8354,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_UpdateLaunchPlan_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetTaskExecution_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_UpdateLaunchPlan_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetTaskExecution_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_CreateExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListTaskExecutions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3402,18 +8374,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_CreateExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListTaskExecutions_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_CreateExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListTaskExecutions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_RelaunchExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListTaskExecutions_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3422,18 +8394,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_RelaunchExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListTaskExecutions_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_RelaunchExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListTaskExecutions_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_RecoverExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetTaskExecutionData_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3442,18 +8414,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_RecoverExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetTaskExecutionData_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_RecoverExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetTaskExecutionData_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetTaskExecutionData_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3462,18 +8434,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetTaskExecutionData_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetTaskExecutionData_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("PUT", pattern_AdminService_UpdateExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateProjectDomainAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3482,18 +8454,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_UpdateExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateProjectDomainAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_UpdateExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateProjectDomainAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetExecutionData_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateProjectDomainAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3502,18 +8474,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetExecutionData_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateProjectDomainAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetExecutionData_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateProjectDomainAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListExecutions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetProjectDomainAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3522,18 +8494,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListExecutions_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetProjectDomainAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListExecutions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetProjectDomainAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("DELETE", pattern_AdminService_TerminateExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetProjectDomainAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3542,18 +8514,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_TerminateExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetProjectDomainAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_TerminateExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetProjectDomainAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetNodeExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("DELETE", pattern_AdminService_DeleteProjectDomainAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3562,18 +8534,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetNodeExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_DeleteProjectDomainAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetNodeExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_DeleteProjectDomainAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListNodeExecutions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("DELETE", pattern_AdminService_DeleteProjectDomainAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3582,18 +8554,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListNodeExecutions_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_DeleteProjectDomainAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListNodeExecutions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_DeleteProjectDomainAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListNodeExecutionsForTask_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateProjectAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3602,18 +8574,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListNodeExecutionsForTask_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateProjectAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListNodeExecutionsForTask_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateProjectAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetNodeExecutionData_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateProjectAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3622,18 +8594,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetNodeExecutionData_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateProjectAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetNodeExecutionData_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateProjectAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_RegisterProject_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetProjectAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3642,18 +8614,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_RegisterProject_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetProjectAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_RegisterProject_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetProjectAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("PUT", pattern_AdminService_UpdateProject_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetProjectAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3662,18 +8634,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_UpdateProject_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetProjectAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_UpdateProject_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetProjectAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListProjects_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("DELETE", pattern_AdminService_DeleteProjectAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3682,18 +8654,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListProjects_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_DeleteProjectAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListProjects_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_DeleteProjectAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_CreateWorkflowEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("DELETE", pattern_AdminService_DeleteProjectAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3702,18 +8674,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_CreateWorkflowEvent_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_DeleteProjectAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_CreateWorkflowEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_DeleteProjectAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_CreateNodeEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateWorkflowAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3722,18 +8694,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_CreateNodeEvent_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateWorkflowAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_CreateNodeEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateWorkflowAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("POST", pattern_AdminService_CreateTaskEvent_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateWorkflowAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3742,18 +8714,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_CreateTaskEvent_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateWorkflowAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_CreateTaskEvent_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateWorkflowAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetTaskExecution_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetWorkflowAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3762,18 +8734,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetTaskExecution_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetWorkflowAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetTaskExecution_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetWorkflowAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListTaskExecutions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetWorkflowAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3782,18 +8754,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListTaskExecutions_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetWorkflowAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListTaskExecutions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetWorkflowAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetTaskExecutionData_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("DELETE", pattern_AdminService_DeleteWorkflowAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3802,18 +8774,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetTaskExecutionData_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_DeleteWorkflowAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetTaskExecutionData_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_DeleteWorkflowAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("PUT", pattern_AdminService_UpdateProjectDomainAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("DELETE", pattern_AdminService_DeleteWorkflowAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3822,18 +8794,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_UpdateProjectDomainAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_DeleteWorkflowAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_UpdateProjectDomainAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_DeleteWorkflowAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetProjectDomainAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListMatchableAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3842,18 +8814,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetProjectDomainAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListMatchableAttributes_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetProjectDomainAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListMatchableAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("DELETE", pattern_AdminService_DeleteProjectDomainAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListMatchableAttributes_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3862,18 +8834,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_DeleteProjectDomainAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListMatchableAttributes_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_DeleteProjectDomainAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListMatchableAttributes_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("PUT", pattern_AdminService_UpdateProjectAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListNamedEntities_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3882,18 +8854,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_UpdateProjectAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListNamedEntities_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_UpdateProjectAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListNamedEntities_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetProjectAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListNamedEntities_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3902,18 +8874,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetProjectAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListNamedEntities_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetProjectAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListNamedEntities_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("DELETE", pattern_AdminService_DeleteProjectAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetNamedEntity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3922,18 +8894,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_DeleteProjectAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetNamedEntity_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_DeleteProjectAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetNamedEntity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("PUT", pattern_AdminService_UpdateWorkflowAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetNamedEntity_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3942,18 +8914,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_UpdateWorkflowAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetNamedEntity_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_UpdateWorkflowAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetNamedEntity_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetWorkflowAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateNamedEntity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3962,18 +8934,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetWorkflowAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateNamedEntity_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetWorkflowAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateNamedEntity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("DELETE", pattern_AdminService_DeleteWorkflowAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("PUT", pattern_AdminService_UpdateNamedEntity_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -3982,18 +8954,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_DeleteWorkflowAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_UpdateNamedEntity_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_DeleteWorkflowAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_UpdateNamedEntity_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListMatchableAttributes_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4002,18 +8974,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListMatchableAttributes_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetVersion_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListMatchableAttributes_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListNamedEntities_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetDescriptionEntity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4022,18 +8994,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListNamedEntities_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetDescriptionEntity_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListNamedEntities_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetDescriptionEntity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetNamedEntity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetDescriptionEntity_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4042,18 +9014,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetNamedEntity_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetDescriptionEntity_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetNamedEntity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetDescriptionEntity_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("PUT", pattern_AdminService_UpdateNamedEntity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListDescriptionEntities_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4062,18 +9034,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_UpdateNamedEntity_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListDescriptionEntities_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_UpdateNamedEntity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListDescriptionEntities_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListDescriptionEntities_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4082,18 +9054,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetVersion_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListDescriptionEntities_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListDescriptionEntities_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetDescriptionEntity_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListDescriptionEntities_2, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4102,18 +9074,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetDescriptionEntity_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListDescriptionEntities_2(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetDescriptionEntity_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListDescriptionEntities_2(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListDescriptionEntities_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_ListDescriptionEntities_3, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4122,18 +9094,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListDescriptionEntities_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_ListDescriptionEntities_3(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListDescriptionEntities_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_ListDescriptionEntities_3(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_ListDescriptionEntities_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetExecutionMetrics_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4142,18 +9114,18 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_ListDescriptionEntities_1(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetExecutionMetrics_0(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_ListDescriptionEntities_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetExecutionMetrics_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) - mux.Handle("GET", pattern_AdminService_GetExecutionMetrics_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + mux.Handle("GET", pattern_AdminService_GetExecutionMetrics_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) @@ -4162,14 +9134,14 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - resp, md, err := request_AdminService_GetExecutionMetrics_0(rctx, inboundMarshaler, client, req, pathParams) + resp, md, err := request_AdminService_GetExecutionMetrics_1(rctx, inboundMarshaler, client, req, pathParams) ctx = runtime.NewServerMetadataContext(ctx, md) if err != nil { runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) return } - forward_AdminService_GetExecutionMetrics_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + forward_AdminService_GetExecutionMetrics_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) }) @@ -4179,231 +9151,463 @@ func RegisterAdminServiceHandlerClient(ctx context.Context, mux *runtime.ServeMu var ( pattern_AdminService_CreateTask_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "tasks"}, "")) + pattern_AdminService_CreateTask_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "tasks", "org", "id.org"}, "")) + pattern_AdminService_GetTask_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "tasks", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_GetTask_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "tasks", "org", "id.org", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_ListTaskIds_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "task_ids", "project", "domain"}, "")) + pattern_AdminService_ListTaskIds_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "tasks", "org", "project", "domain"}, "")) + pattern_AdminService_ListTasks_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "tasks", "id.project", "id.domain", "id.name"}, "")) - pattern_AdminService_ListTasks_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "tasks", "id.project", "id.domain"}, "")) + pattern_AdminService_ListTasks_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "tasks", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + + pattern_AdminService_ListTasks_2 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "tasks", "id.project", "id.domain"}, "")) + + pattern_AdminService_ListTasks_3 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "tasks", "org", "id.org", "id.project", "id.domain"}, "")) pattern_AdminService_CreateWorkflow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "workflows"}, "")) + pattern_AdminService_CreateWorkflow_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "workflows", "org", "id.org"}, "")) + pattern_AdminService_GetWorkflow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "workflows", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_GetWorkflow_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "workflows", "org", "id.org", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_ListWorkflowIds_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "workflow_ids", "project", "domain"}, "")) + pattern_AdminService_ListWorkflowIds_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "workflows", "org", "project", "domain"}, "")) + pattern_AdminService_ListWorkflows_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "workflows", "id.project", "id.domain", "id.name"}, "")) - pattern_AdminService_ListWorkflows_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "workflows", "id.project", "id.domain"}, "")) + pattern_AdminService_ListWorkflows_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "workflows", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + + pattern_AdminService_ListWorkflows_2 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "workflows", "id.project", "id.domain"}, "")) + + pattern_AdminService_ListWorkflows_3 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "workflows", "org", "id.org", "id.project", "id.domain"}, "")) pattern_AdminService_CreateLaunchPlan_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "launch_plans"}, "")) + pattern_AdminService_CreateLaunchPlan_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "launch_plans", "org", "id.org"}, "")) + pattern_AdminService_GetLaunchPlan_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "launch_plans", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_GetLaunchPlan_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "launch_plans", "org", "id.org", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_GetActiveLaunchPlan_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "active_launch_plans", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_GetActiveLaunchPlan_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "active_launch_plans", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_ListActiveLaunchPlans_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "active_launch_plans", "project", "domain"}, "")) + pattern_AdminService_ListActiveLaunchPlans_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "active_launch_plans", "org", "project", "domain"}, "")) + pattern_AdminService_ListLaunchPlanIds_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "launch_plan_ids", "project", "domain"}, "")) + pattern_AdminService_ListLaunchPlanIds_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "launch_plan_ids", "org", "project", "domain"}, "")) + pattern_AdminService_ListLaunchPlans_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "launch_plans", "id.project", "id.domain", "id.name"}, "")) - pattern_AdminService_ListLaunchPlans_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "launch_plans", "id.project", "id.domain"}, "")) + pattern_AdminService_ListLaunchPlans_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "launch_plans", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + + pattern_AdminService_ListLaunchPlans_2 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "launch_plans", "id.project", "id.domain"}, "")) + + pattern_AdminService_ListLaunchPlans_3 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "launch_plans", "org", "id.org", "id.project", "id.domain"}, "")) pattern_AdminService_UpdateLaunchPlan_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "launch_plans", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_UpdateLaunchPlan_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "launch_plans", "org", "id.org", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_CreateExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "executions"}, "")) + pattern_AdminService_CreateExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "executions", "org"}, "")) + pattern_AdminService_RelaunchExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "executions", "relaunch"}, "")) + pattern_AdminService_RelaunchExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"api", "v1", "executions", "org", "id.org", "relaunch"}, "")) + pattern_AdminService_RecoverExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "executions", "recover"}, "")) + pattern_AdminService_RecoverExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"api", "v1", "executions", "org", "id.org", "recover"}, "")) + pattern_AdminService_GetExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "executions", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_GetExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "executions", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_UpdateExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "executions", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_UpdateExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "executions", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_GetExecutionData_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "data", "executions", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_GetExecutionData_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "data", "executions", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_ListExecutions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "executions", "id.project", "id.domain"}, "")) + pattern_AdminService_ListExecutions_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "executions", "org", "id.org", "id.project", "id.domain"}, "")) + pattern_AdminService_TerminateExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "executions", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_TerminateExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "executions", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_GetNodeExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "node_executions", "id.execution_id.project", "id.execution_id.domain", "id.execution_id.name", "id.node_id"}, "")) + pattern_AdminService_GetNodeExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "node_executions", "org", "id.execution_id.org", "id.execution_id.project", "id.execution_id.domain", "id.execution_id.name", "id.node_id"}, "")) + + pattern_AdminService_GetDynamicNodeWorkflow_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 2, 7}, []string{"api", "v1", "node_executions", "id.execution_id.project", "id.execution_id.domain", "id.execution_id.name", "id.node_id", "dynamic_workflow"}, "")) + + pattern_AdminService_GetDynamicNodeWorkflow_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 2, 9}, []string{"api", "v1", "node_executions", "org", "id.execution_id.org", "id.execution_id.project", "id.execution_id.domain", "id.execution_id.name", "id.node_id", "dynamic_workflow"}, "")) + pattern_AdminService_ListNodeExecutions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "node_executions", "workflow_execution_id.project", "workflow_execution_id.domain", "workflow_execution_id.name"}, "")) + pattern_AdminService_ListNodeExecutions_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "node_executions", "org", "workflow_execution_id.org", "workflow_execution_id.project", "workflow_execution_id.domain", "workflow_execution_id.name"}, "")) + pattern_AdminService_ListNodeExecutionsForTask_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9, 1, 0, 4, 1, 5, 10, 1, 0, 4, 1, 5, 11, 1, 0, 4, 1, 5, 12}, []string{"api", "v1", "children", "task_executions", "task_execution_id.node_execution_id.execution_id.project", "task_execution_id.node_execution_id.execution_id.domain", "task_execution_id.node_execution_id.execution_id.name", "task_execution_id.node_execution_id.node_id", "task_execution_id.task_id.project", "task_execution_id.task_id.domain", "task_execution_id.task_id.name", "task_execution_id.task_id.version", "task_execution_id.retry_attempt"}, "")) + pattern_AdminService_ListNodeExecutionsForTask_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9, 1, 0, 4, 1, 5, 10, 1, 0, 4, 1, 5, 11, 1, 0, 4, 1, 5, 12, 1, 0, 4, 1, 5, 13, 1, 0, 4, 1, 5, 14}, []string{"api", "v1", "children", "org", "task_execution_id.node_execution_id.execution_id.org", "task_executions", "task_execution_id.node_execution_id.execution_id.project", "task_execution_id.node_execution_id.execution_id.domain", "task_execution_id.node_execution_id.execution_id.name", "task_execution_id.node_execution_id.node_id", "task_execution_id.task_id.project", "task_execution_id.task_id.domain", "task_execution_id.task_id.name", "task_execution_id.task_id.version", "task_execution_id.retry_attempt"}, "")) + pattern_AdminService_GetNodeExecutionData_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "data", "node_executions", "id.execution_id.project", "id.execution_id.domain", "id.execution_id.name", "id.node_id"}, "")) + pattern_AdminService_GetNodeExecutionData_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9}, []string{"api", "v1", "data", "org", "id.execution_id.org", "node_executions", "id.execution_id.project", "id.execution_id.domain", "id.execution_id.name", "id.node_id"}, "")) + pattern_AdminService_RegisterProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "projects"}, "")) + pattern_AdminService_RegisterProject_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "projects", "org", "project.org"}, "")) + pattern_AdminService_UpdateProject_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "projects", "id"}, "")) + pattern_AdminService_UpdateProject_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "projects", "org", "id"}, "")) + pattern_AdminService_ListProjects_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "projects"}, "")) + pattern_AdminService_ListProjects_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "projects", "org"}, "")) + pattern_AdminService_CreateWorkflowEvent_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "events", "workflows"}, "")) + pattern_AdminService_CreateWorkflowEvent_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"api", "v1", "events", "org", "event.execution_id.org", "workflows"}, "")) + pattern_AdminService_CreateNodeEvent_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "events", "nodes"}, "")) + pattern_AdminService_CreateNodeEvent_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"api", "v1", "events", "org", "event.id.execution_id.org", "nodes"}, "")) + pattern_AdminService_CreateTaskEvent_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"api", "v1", "events", "tasks"}, "")) + pattern_AdminService_CreateTaskEvent_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5}, []string{"api", "v1", "events", "org", "event.parent_node_execution_id.execution_id.org", "tasks"}, "")) + pattern_AdminService_GetTaskExecution_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9, 1, 0, 4, 1, 5, 10, 1, 0, 4, 1, 5, 11}, []string{"api", "v1", "task_executions", "id.node_execution_id.execution_id.project", "id.node_execution_id.execution_id.domain", "id.node_execution_id.execution_id.name", "id.node_execution_id.node_id", "id.task_id.project", "id.task_id.domain", "id.task_id.name", "id.task_id.version", "id.retry_attempt"}, "")) + pattern_AdminService_GetTaskExecution_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9, 1, 0, 4, 1, 5, 10, 1, 0, 4, 1, 5, 11, 1, 0, 4, 1, 5, 12, 1, 0, 4, 1, 5, 13}, []string{"api", "v1", "task_executions", "org", "id.node_execution_id.execution_id.org", "id.node_execution_id.execution_id.project", "id.node_execution_id.execution_id.domain", "id.node_execution_id.execution_id.name", "id.node_execution_id.node_id", "id.task_id.project", "id.task_id.domain", "id.task_id.name", "id.task_id.version", "id.retry_attempt"}, "")) + pattern_AdminService_ListTaskExecutions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "task_executions", "node_execution_id.execution_id.project", "node_execution_id.execution_id.domain", "node_execution_id.execution_id.name", "node_execution_id.node_id"}, "")) + pattern_AdminService_ListTaskExecutions_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "task_executions", "org", "node_execution_id.execution_id.org", "node_execution_id.execution_id.project", "node_execution_id.execution_id.domain", "node_execution_id.execution_id.name", "node_execution_id.node_id"}, "")) + pattern_AdminService_GetTaskExecutionData_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9, 1, 0, 4, 1, 5, 10, 1, 0, 4, 1, 5, 11, 1, 0, 4, 1, 5, 12}, []string{"api", "v1", "data", "task_executions", "id.node_execution_id.execution_id.project", "id.node_execution_id.execution_id.domain", "id.node_execution_id.execution_id.name", "id.node_execution_id.node_id", "id.task_id.project", "id.task_id.domain", "id.task_id.name", "id.task_id.version", "id.retry_attempt"}, "")) + pattern_AdminService_GetTaskExecutionData_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 2, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9, 1, 0, 4, 1, 5, 10, 1, 0, 4, 1, 5, 11, 1, 0, 4, 1, 5, 12, 1, 0, 4, 1, 5, 13, 1, 0, 4, 1, 5, 14}, []string{"api", "v1", "data", "org", "id.node_execution_id.execution_id.org", "task_executions", "id.node_execution_id.execution_id.project", "id.node_execution_id.execution_id.domain", "id.node_execution_id.execution_id.name", "id.node_execution_id.node_id", "id.task_id.project", "id.task_id.domain", "id.task_id.name", "id.task_id.version", "id.retry_attempt"}, "")) + pattern_AdminService_UpdateProjectDomainAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "project_domain_attributes", "attributes.project", "attributes.domain"}, "")) + pattern_AdminService_UpdateProjectDomainAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "project_domain_attributes", "org", "attributes.org", "attributes.project", "attributes.domain"}, "")) + pattern_AdminService_GetProjectDomainAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "project_domain_attributes", "project", "domain"}, "")) + pattern_AdminService_GetProjectDomainAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "project_domain_attributes", "org", "project", "domain"}, "")) + pattern_AdminService_DeleteProjectDomainAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "project_domain_attributes", "project", "domain"}, "")) + pattern_AdminService_DeleteProjectDomainAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "project_domain_attributes", "org", "project", "domain"}, "")) + pattern_AdminService_UpdateProjectAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "project_attributes", "attributes.project"}, "")) + pattern_AdminService_UpdateProjectAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "project_domain_attributes", "org", "attributes.org", "attributes.project"}, "")) + pattern_AdminService_GetProjectAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "project_attributes", "project"}, "")) + pattern_AdminService_GetProjectAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "project_domain_attributes", "org", "project"}, "")) + pattern_AdminService_DeleteProjectAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "project_attributes", "project"}, "")) + pattern_AdminService_DeleteProjectAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "project_domain_attributes", "org", "project"}, "")) + pattern_AdminService_UpdateWorkflowAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "workflow_attributes", "attributes.project", "attributes.domain", "attributes.workflow"}, "")) + pattern_AdminService_UpdateWorkflowAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "workflow_attributes", "org", "attributes.org", "attributes.project", "attributes.domain", "attributes.workflow"}, "")) + pattern_AdminService_GetWorkflowAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "workflow_attributes", "project", "domain", "workflow"}, "")) + pattern_AdminService_GetWorkflowAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "workflow_attributes", "org", "project", "domain", "workflow"}, "")) + pattern_AdminService_DeleteWorkflowAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "workflow_attributes", "project", "domain", "workflow"}, "")) + pattern_AdminService_DeleteWorkflowAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "workflow_attributes", "org", "project", "domain", "workflow"}, "")) + pattern_AdminService_ListMatchableAttributes_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "matchable_attributes"}, "")) + pattern_AdminService_ListMatchableAttributes_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3}, []string{"api", "v1", "matchable_attributes", "org"}, "")) + pattern_AdminService_ListNamedEntities_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "named_entities", "resource_type", "project", "domain"}, "")) + pattern_AdminService_ListNamedEntities_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "named_entities", "org", "resource_type", "project", "domain"}, "")) + pattern_AdminService_GetNamedEntity_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "named_entities", "resource_type", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_GetNamedEntity_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "named_entities", "org", "id.org", "resource_type", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_UpdateNamedEntity_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "named_entities", "resource_type", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_UpdateNamedEntity_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "named_entities", "org", "id.org", "resource_type", "id.project", "id.domain", "id.name"}, "")) + pattern_AdminService_GetVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "version"}, "")) pattern_AdminService_GetDescriptionEntity_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "description_entities", "id.resource_type", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_GetDescriptionEntity_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8, 1, 0, 4, 1, 5, 9}, []string{"api", "v1", "description_entities", "org", "id.org", "id.resource_type", "id.project", "id.domain", "id.name", "id.version"}, "")) + pattern_AdminService_ListDescriptionEntities_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "description_entities", "resource_type", "id.project", "id.domain", "id.name"}, "")) - pattern_AdminService_ListDescriptionEntities_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "description_entities", "resource_type", "id.project", "id.domain"}, "")) + pattern_AdminService_ListDescriptionEntities_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "description_entities", "org", "id.org", "resource_type", "id.project", "id.domain", "id.name"}, "")) + + pattern_AdminService_ListDescriptionEntities_2 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "description_entities", "resource_type", "id.project", "id.domain"}, "")) + + pattern_AdminService_ListDescriptionEntities_3 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "description_entities", "org", "id.org", "resource_type", "id.project", "id.domain"}, "")) pattern_AdminService_GetExecutionMetrics_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6}, []string{"api", "v1", "metrics", "executions", "id.project", "id.domain", "id.name"}, "")) + + pattern_AdminService_GetExecutionMetrics_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 2, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7, 1, 0, 4, 1, 5, 8}, []string{"api", "v1", "metrics", "executions", "org", "id.org", "id.project", "id.domain", "id.name"}, "")) ) var ( forward_AdminService_CreateTask_0 = runtime.ForwardResponseMessage + forward_AdminService_CreateTask_1 = runtime.ForwardResponseMessage + forward_AdminService_GetTask_0 = runtime.ForwardResponseMessage + forward_AdminService_GetTask_1 = runtime.ForwardResponseMessage + forward_AdminService_ListTaskIds_0 = runtime.ForwardResponseMessage + forward_AdminService_ListTaskIds_1 = runtime.ForwardResponseMessage + forward_AdminService_ListTasks_0 = runtime.ForwardResponseMessage forward_AdminService_ListTasks_1 = runtime.ForwardResponseMessage + forward_AdminService_ListTasks_2 = runtime.ForwardResponseMessage + + forward_AdminService_ListTasks_3 = runtime.ForwardResponseMessage + forward_AdminService_CreateWorkflow_0 = runtime.ForwardResponseMessage + forward_AdminService_CreateWorkflow_1 = runtime.ForwardResponseMessage + forward_AdminService_GetWorkflow_0 = runtime.ForwardResponseMessage + forward_AdminService_GetWorkflow_1 = runtime.ForwardResponseMessage + forward_AdminService_ListWorkflowIds_0 = runtime.ForwardResponseMessage + forward_AdminService_ListWorkflowIds_1 = runtime.ForwardResponseMessage + forward_AdminService_ListWorkflows_0 = runtime.ForwardResponseMessage forward_AdminService_ListWorkflows_1 = runtime.ForwardResponseMessage + forward_AdminService_ListWorkflows_2 = runtime.ForwardResponseMessage + + forward_AdminService_ListWorkflows_3 = runtime.ForwardResponseMessage + forward_AdminService_CreateLaunchPlan_0 = runtime.ForwardResponseMessage + forward_AdminService_CreateLaunchPlan_1 = runtime.ForwardResponseMessage + forward_AdminService_GetLaunchPlan_0 = runtime.ForwardResponseMessage + forward_AdminService_GetLaunchPlan_1 = runtime.ForwardResponseMessage + forward_AdminService_GetActiveLaunchPlan_0 = runtime.ForwardResponseMessage + forward_AdminService_GetActiveLaunchPlan_1 = runtime.ForwardResponseMessage + forward_AdminService_ListActiveLaunchPlans_0 = runtime.ForwardResponseMessage + forward_AdminService_ListActiveLaunchPlans_1 = runtime.ForwardResponseMessage + forward_AdminService_ListLaunchPlanIds_0 = runtime.ForwardResponseMessage + forward_AdminService_ListLaunchPlanIds_1 = runtime.ForwardResponseMessage + forward_AdminService_ListLaunchPlans_0 = runtime.ForwardResponseMessage forward_AdminService_ListLaunchPlans_1 = runtime.ForwardResponseMessage + forward_AdminService_ListLaunchPlans_2 = runtime.ForwardResponseMessage + + forward_AdminService_ListLaunchPlans_3 = runtime.ForwardResponseMessage + forward_AdminService_UpdateLaunchPlan_0 = runtime.ForwardResponseMessage + forward_AdminService_UpdateLaunchPlan_1 = runtime.ForwardResponseMessage + forward_AdminService_CreateExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_CreateExecution_1 = runtime.ForwardResponseMessage + forward_AdminService_RelaunchExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_RelaunchExecution_1 = runtime.ForwardResponseMessage + forward_AdminService_RecoverExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_RecoverExecution_1 = runtime.ForwardResponseMessage + forward_AdminService_GetExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_GetExecution_1 = runtime.ForwardResponseMessage + forward_AdminService_UpdateExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_UpdateExecution_1 = runtime.ForwardResponseMessage + forward_AdminService_GetExecutionData_0 = runtime.ForwardResponseMessage + forward_AdminService_GetExecutionData_1 = runtime.ForwardResponseMessage + forward_AdminService_ListExecutions_0 = runtime.ForwardResponseMessage + forward_AdminService_ListExecutions_1 = runtime.ForwardResponseMessage + forward_AdminService_TerminateExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_TerminateExecution_1 = runtime.ForwardResponseMessage + forward_AdminService_GetNodeExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_GetNodeExecution_1 = runtime.ForwardResponseMessage + + forward_AdminService_GetDynamicNodeWorkflow_0 = runtime.ForwardResponseMessage + + forward_AdminService_GetDynamicNodeWorkflow_1 = runtime.ForwardResponseMessage + forward_AdminService_ListNodeExecutions_0 = runtime.ForwardResponseMessage + forward_AdminService_ListNodeExecutions_1 = runtime.ForwardResponseMessage + forward_AdminService_ListNodeExecutionsForTask_0 = runtime.ForwardResponseMessage + forward_AdminService_ListNodeExecutionsForTask_1 = runtime.ForwardResponseMessage + forward_AdminService_GetNodeExecutionData_0 = runtime.ForwardResponseMessage + forward_AdminService_GetNodeExecutionData_1 = runtime.ForwardResponseMessage + forward_AdminService_RegisterProject_0 = runtime.ForwardResponseMessage + forward_AdminService_RegisterProject_1 = runtime.ForwardResponseMessage + forward_AdminService_UpdateProject_0 = runtime.ForwardResponseMessage + forward_AdminService_UpdateProject_1 = runtime.ForwardResponseMessage + forward_AdminService_ListProjects_0 = runtime.ForwardResponseMessage + forward_AdminService_ListProjects_1 = runtime.ForwardResponseMessage + forward_AdminService_CreateWorkflowEvent_0 = runtime.ForwardResponseMessage + forward_AdminService_CreateWorkflowEvent_1 = runtime.ForwardResponseMessage + forward_AdminService_CreateNodeEvent_0 = runtime.ForwardResponseMessage + forward_AdminService_CreateNodeEvent_1 = runtime.ForwardResponseMessage + forward_AdminService_CreateTaskEvent_0 = runtime.ForwardResponseMessage + forward_AdminService_CreateTaskEvent_1 = runtime.ForwardResponseMessage + forward_AdminService_GetTaskExecution_0 = runtime.ForwardResponseMessage + forward_AdminService_GetTaskExecution_1 = runtime.ForwardResponseMessage + forward_AdminService_ListTaskExecutions_0 = runtime.ForwardResponseMessage + forward_AdminService_ListTaskExecutions_1 = runtime.ForwardResponseMessage + forward_AdminService_GetTaskExecutionData_0 = runtime.ForwardResponseMessage + forward_AdminService_GetTaskExecutionData_1 = runtime.ForwardResponseMessage + forward_AdminService_UpdateProjectDomainAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_UpdateProjectDomainAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_GetProjectDomainAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_GetProjectDomainAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_DeleteProjectDomainAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_DeleteProjectDomainAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_UpdateProjectAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_UpdateProjectAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_GetProjectAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_GetProjectAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_DeleteProjectAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_DeleteProjectAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_UpdateWorkflowAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_UpdateWorkflowAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_GetWorkflowAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_GetWorkflowAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_DeleteWorkflowAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_DeleteWorkflowAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_ListMatchableAttributes_0 = runtime.ForwardResponseMessage + forward_AdminService_ListMatchableAttributes_1 = runtime.ForwardResponseMessage + forward_AdminService_ListNamedEntities_0 = runtime.ForwardResponseMessage + forward_AdminService_ListNamedEntities_1 = runtime.ForwardResponseMessage + forward_AdminService_GetNamedEntity_0 = runtime.ForwardResponseMessage + forward_AdminService_GetNamedEntity_1 = runtime.ForwardResponseMessage + forward_AdminService_UpdateNamedEntity_0 = runtime.ForwardResponseMessage + forward_AdminService_UpdateNamedEntity_1 = runtime.ForwardResponseMessage + forward_AdminService_GetVersion_0 = runtime.ForwardResponseMessage forward_AdminService_GetDescriptionEntity_0 = runtime.ForwardResponseMessage + forward_AdminService_GetDescriptionEntity_1 = runtime.ForwardResponseMessage + forward_AdminService_ListDescriptionEntities_0 = runtime.ForwardResponseMessage forward_AdminService_ListDescriptionEntities_1 = runtime.ForwardResponseMessage + forward_AdminService_ListDescriptionEntities_2 = runtime.ForwardResponseMessage + + forward_AdminService_ListDescriptionEntities_3 = runtime.ForwardResponseMessage + forward_AdminService_GetExecutionMetrics_0 = runtime.ForwardResponseMessage + + forward_AdminService_GetExecutionMetrics_1 = runtime.ForwardResponseMessage ) diff --git a/flyteidl/gen/pb-go/flyteidl/service/admin.swagger.json b/flyteidl/gen/pb-go/flyteidl/service/admin.swagger.json index b2d41e5616..c1af15f895 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/admin.swagger.json +++ b/flyteidl/gen/pb-go/flyteidl/service/admin.swagger.json @@ -15,6 +15,127 @@ "application/json" ], "paths": { + "/api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`.", + "operationId": "GetActiveLaunchPlan2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlan" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/active_launch_plans/org/{org}/{project}/{domain}": { + "get": { + "summary": "List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`.", + "operationId": "ListActiveLaunchPlans2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanList" + } + } + }, + "parameters": [ + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "project", + "description": "Name of the project that contains the identifiers.\n+required.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "domain", + "description": "Name of the domain the identifiers belongs to within the project.\n+required.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, "/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}": { "get": { "summary": "Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`.", @@ -48,6 +169,13 @@ "in": "path", "required": true, "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -115,6 +243,13 @@ "ASCENDING" ], "default": "DESCENDING" + }, + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -122,10 +257,10 @@ ] } }, - "/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}": { + "/api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}": { "get": { "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`.", - "operationId": "ListNodeExecutionsForTask", + "operationId": "ListNodeExecutionsForTask2", "responses": { "200": { "description": "A successful response.", @@ -135,6 +270,13 @@ } }, "parameters": [ + { + "name": "task_execution_id.node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, { "name": "task_execution_id.node_execution_id.execution_id.project", "description": "Name of the project the resource belongs to.", @@ -212,6 +354,13 @@ ], "default": "UNSPECIFIED" }, + { + "name": "task_execution_id.task_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, { "name": "limit", "description": "Indicates the number of resources to be returned.\n+required.", @@ -259,169 +408,83 @@ ] } }, - "/api/v1/data/executions/{id.project}/{id.domain}/{id.name}": { - "get": { - "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`.", - "operationId": "GetExecutionData", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminWorkflowExecutionGetDataResponse" - } - } - }, - "parameters": [ - { - "name": "id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.name", - "description": "User or system provided value for the resource.", - "in": "path", - "required": true, - "type": "string" - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}": { - "get": { - "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`.", - "operationId": "GetNodeExecutionData", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminNodeExecutionGetDataResponse" - } - } - }, - "parameters": [ - { - "name": "id.execution_id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.execution_id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.execution_id.name", - "description": "User or system provided value for the resource.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.node_id", - "in": "path", - "required": true, - "type": "string" - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}": { + "/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}": { "get": { - "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`.", - "operationId": "GetTaskExecutionData", + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`.", + "operationId": "ListNodeExecutionsForTask", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminTaskExecutionGetDataResponse" + "$ref": "#/definitions/adminNodeExecutionList" } } }, "parameters": [ { - "name": "id.node_execution_id.execution_id.project", + "name": "task_execution_id.node_execution_id.execution_id.project", "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "id.node_execution_id.execution_id.domain", + "name": "task_execution_id.node_execution_id.execution_id.domain", "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.node_execution_id.execution_id.name", + "name": "task_execution_id.node_execution_id.execution_id.name", "description": "User or system provided value for the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.node_execution_id.node_id", + "name": "task_execution_id.node_execution_id.node_id", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.project", + "name": "task_execution_id.task_id.project", "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.domain", + "name": "task_execution_id.task_id.domain", "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.name", + "name": "task_execution_id.task_id.name", "description": "User provided value for the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.version", + "name": "task_execution_id.task_id.version", "description": "Specific version of the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.retry_attempt", + "name": "task_execution_id.retry_attempt", "in": "path", "required": true, "type": "integer", "format": "int64" }, { - "name": "id.task_id.resource_type", + "name": "task_execution_id.task_id.resource_type", "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", "in": "query", "required": false, @@ -434,39 +497,87 @@ "DATASET" ], "default": "UNSPECIFIED" - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}": { + }, + { + "name": "task_execution_id.task_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "task_execution_id.node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}": { "get": { - "summary": "Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object.", - "operationId": "GetDescriptionEntity", + "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "GetExecutionData2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminDescriptionEntity" + "$ref": "#/definitions/adminWorkflowExecutionGetDataResponse" } } }, "parameters": [ { - "name": "id.resource_type", - "description": "Identifies the specific type of resource that this identifier corresponds to.", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ] + "type": "string" }, { "name": "id.project", @@ -484,14 +595,7 @@ }, { "name": "id.name", - "description": "User provided value for the resource.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.version", - "description": "Specific version of the resource.", + "description": "User or system provided value for the resource.", "in": "path", "required": true, "type": "string" @@ -502,33 +606,19 @@ ] } }, - "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}": { + "/api/v1/data/executions/{id.project}/{id.domain}/{id.name}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions.", - "operationId": "ListDescriptionEntities2", + "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "GetExecutionData", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminDescriptionEntityList" + "$ref": "#/definitions/adminWorkflowExecutionGetDataResponse" } } }, "parameters": [ - { - "name": "resource_type", - "description": "Identifies the specific type of resource that this identifier corresponds to.", - "in": "path", - "required": true, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ] - }, { "name": "id.project", "description": "Name of the project the resource belongs to.", @@ -545,51 +635,70 @@ }, { "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", - "in": "query", - "required": false, + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, "type": "string" }, { - "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "query", "required": false, - "type": "integer", - "format": "int64" + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}": { + "get": { + "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`.", + "operationId": "GetNodeExecutionData", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNodeExecutionGetDataResponse" + } + } + }, + "parameters": [ + { + "name": "id.execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" }, { - "name": "token", - "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", - "in": "query", - "required": false, + "name": "id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, "type": "string" }, { - "name": "filters", - "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", - "in": "query", - "required": false, + "name": "id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, "type": "string" }, { - "name": "sort_by.key", - "description": "Indicates an attribute to sort the response values.\n+required.", - "in": "query", - "required": false, + "name": "id.node_id", + "in": "path", + "required": true, "type": "string" }, { - "name": "sort_by.direction", - "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "name": "id.execution_id.org", + "description": "Optional, org key applied to the resource.", "in": "query", "required": false, - "type": "string", - "enum": [ - "DESCENDING", - "ASCENDING" - ], - "default": "DESCENDING" + "type": "string" } ], "tags": [ @@ -597,94 +706,52 @@ ] } }, - "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}": { + "/api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions.", - "operationId": "ListDescriptionEntities", + "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`.", + "operationId": "GetNodeExecutionData2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminDescriptionEntityList" + "$ref": "#/definitions/adminNodeExecutionGetDataResponse" } } }, "parameters": [ { - "name": "resource_type", - "description": "Identifies the specific type of resource that this identifier corresponds to.", + "name": "id.execution_id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ] + "type": "string" }, { - "name": "id.project", + "name": "id.execution_id.project", "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", + "name": "id.execution_id.domain", "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "name": "id.execution_id.name", + "description": "User or system provided value for the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "token", - "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "filters", - "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.key", - "description": "Indicates an attribute to sort the response values.\n+required.", - "in": "query", - "required": false, + "name": "id.node_id", + "in": "path", + "required": true, "type": "string" - }, - { - "name": "sort_by.direction", - "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "DESCENDING", - "ASCENDING" - ], - "default": "DESCENDING" } ], "tags": [ @@ -692,52 +759,3485 @@ ] } }, - "/api/v1/events/nodes": { - "post": { - "summary": "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred.", - "operationId": "CreateNodeEvent", + "/api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}": { + "get": { + "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`.", + "operationId": "GetTaskExecutionData2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminNodeExecutionEventResponse" + "$ref": "#/definitions/adminTaskExecutionGetDataResponse" } } }, "parameters": [ { - "name": "body", - "in": "body", + "name": "id.node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminNodeExecutionEventRequest" - } - } + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.node_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.retry_attempt", + "in": "path", + "required": true, + "type": "integer", + "format": "int64" + }, + { + "name": "id.task_id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" + }, + { + "name": "id.task_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}": { + "get": { + "summary": "Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`.", + "operationId": "GetTaskExecutionData", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminTaskExecutionGetDataResponse" + } + } + }, + "parameters": [ + { + "name": "id.node_execution_id.execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.node_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.retry_attempt", + "in": "path", + "required": true, + "type": "integer", + "format": "int64" + }, + { + "name": "id.task_id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" + }, + { + "name": "id.task_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}": { + "get": { + "summary": "Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object.", + "operationId": "GetDescriptionEntity2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDescriptionEntity" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions.", + "operationId": "ListDescriptionEntities4", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDescriptionEntityList" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions.", + "operationId": "ListDescriptionEntities2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDescriptionEntityList" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}": { + "get": { + "summary": "Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object.", + "operationId": "GetDescriptionEntity", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDescriptionEntity" + } + } + }, + "parameters": [ + { + "name": "id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions.", + "operationId": "ListDescriptionEntities3", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDescriptionEntityList" + } + } + }, + "parameters": [ + { + "name": "resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions.", + "operationId": "ListDescriptionEntities", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDescriptionEntityList" + } + } + }, + "parameters": [ + { + "name": "resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/events/nodes": { + "post": { + "summary": "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred.", + "operationId": "CreateNodeEvent", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNodeExecutionEventResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminNodeExecutionEventRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/events/org/{event.execution_id.org}/workflows": { + "post": { + "summary": "Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred.", + "operationId": "CreateWorkflowEvent2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminWorkflowExecutionEventResponse" + } + } + }, + "parameters": [ + { + "name": "event.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminWorkflowExecutionEventRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/events/org/{event.id.execution_id.org}/nodes": { + "post": { + "summary": "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred.", + "operationId": "CreateNodeEvent2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNodeExecutionEventResponse" + } + } + }, + "parameters": [ + { + "name": "event.id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminNodeExecutionEventRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks": { + "post": { + "summary": "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred.", + "operationId": "CreateTaskEvent2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminTaskExecutionEventResponse" + } + } + }, + "parameters": [ + { + "name": "event.parent_node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminTaskExecutionEventRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/events/tasks": { + "post": { + "summary": "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred.", + "operationId": "CreateTaskEvent", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminTaskExecutionEventResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminTaskExecutionEventRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/events/workflows": { + "post": { + "summary": "Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred.", + "operationId": "CreateWorkflowEvent", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminWorkflowExecutionEventResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminWorkflowExecutionEventRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions": { + "post": { + "summary": "Triggers the creation of a :ref:`ref_flyteidl.admin.Execution`", + "operationId": "CreateExecution", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionCreateResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionCreateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/org/{id.org}/recover": { + "post": { + "summary": "Recreates a previously-run workflow execution that will only start executing from the last known failure point.\nIn Recover mode, users cannot change any input parameters or update the version of the execution.\nThis is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures,\ndownstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again.\nSee :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details.", + "operationId": "RecoverExecution2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionCreateResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionRecoverRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/org/{id.org}/relaunch": { + "post": { + "summary": "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`", + "operationId": "RelaunchExecution2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionCreateResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionRelaunchRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "ListExecutions2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionList" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetches a :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "GetExecution2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecution" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + }, + "delete": { + "summary": "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "TerminateExecution2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionTerminateResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionTerminateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + }, + "put": { + "summary": "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "UpdateExecution2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionUpdateResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionUpdateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/org/{org}": { + "put": { + "summary": "Triggers the creation of a :ref:`ref_flyteidl.admin.Execution`", + "operationId": "CreateExecution2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionCreateResponse" + } + } + }, + "parameters": [ + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionCreateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/recover": { + "post": { + "summary": "Recreates a previously-run workflow execution that will only start executing from the last known failure point.\nIn Recover mode, users cannot change any input parameters or update the version of the execution.\nThis is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures,\ndownstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again.\nSee :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details.", + "operationId": "RecoverExecution", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionCreateResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionRecoverRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/relaunch": { + "post": { + "summary": "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`", + "operationId": "RelaunchExecution", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionCreateResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionRelaunchRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/{id.project}/{id.domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "ListExecutions", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionList" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/executions/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetches a :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "GetExecution", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecution" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + }, + "delete": { + "summary": "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "TerminateExecution", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionTerminateResponse" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionTerminateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + }, + "put": { + "summary": "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "UpdateExecution", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminExecutionUpdateResponse" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminExecutionUpdateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plan_ids/org/{org}/{project}/{domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects.", + "operationId": "ListLaunchPlanIds2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntityIdentifierList" + } + } + }, + "parameters": [ + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "project", + "description": "Name of the project that contains the identifiers.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "domain", + "description": "Name of the domain the identifiers belongs to within the project.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\n+optional.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plan_ids/{project}/{domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects.", + "operationId": "ListLaunchPlanIds", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntityIdentifierList" + } + } + }, + "parameters": [ + { + "name": "project", + "description": "Name of the project that contains the identifiers.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "domain", + "description": "Name of the domain the identifiers belongs to within the project.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans": { + "post": { + "summary": "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition", + "operationId": "CreateLaunchPlan", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanCreateResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminLaunchPlanCreateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans/org/{id.org}": { + "post": { + "summary": "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition", + "operationId": "CreateLaunchPlan2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanCreateResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminLaunchPlanCreateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions.", + "operationId": "ListLaunchPlans4", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanList" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions.", + "operationId": "ListLaunchPlans2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanList" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}": { + "get": { + "summary": "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition.", + "operationId": "GetLaunchPlan2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlan" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" + } + ], + "tags": [ + "AdminService" + ] + }, + "put": { + "summary": "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`.", + "operationId": "UpdateLaunchPlan2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanUpdateResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans/{id.project}/{id.domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions.", + "operationId": "ListLaunchPlans3", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanList" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions.", + "operationId": "ListLaunchPlans", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanList" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}": { + "get": { + "summary": "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition.", + "operationId": "GetLaunchPlan", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlan" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + }, + "put": { + "summary": "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`.", + "operationId": "UpdateLaunchPlan", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminLaunchPlanUpdateResponse" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminLaunchPlanUpdateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/matchable_attributes": { + "get": { + "summary": "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type.", + "operationId": "ListMatchableAttributes", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminListMatchableAttributesResponse" + } + } + }, + "parameters": [ + { + "name": "resource_type", + "description": "+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" + ], + "default": "TASK_RESOURCE" + }, + { + "name": "org", + "description": "Optional, org filter applied to list project requests.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/matchable_attributes/org/{org}": { + "get": { + "summary": "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type.", + "operationId": "ListMatchableAttributes2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminListMatchableAttributesResponse" + } + } + }, + "parameters": [ + { + "name": "org", + "description": "Optional, org filter applied to list project requests.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" + ], + "default": "TASK_RESOURCE" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "GetExecutionMetrics2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminWorkflowExecutionGetMetricsResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "depth", + "description": "depth defines the number of Flyte entity levels to traverse when breaking down execution details.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`.", + "operationId": "GetExecutionMetrics", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminWorkflowExecutionGetMetricsResponse" + } + } + }, + "parameters": [ + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "depth", + "description": "depth defines the number of Flyte entity levels to traverse when breaking down execution details.", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object.", + "operationId": "GetNamedEntity2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntity" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "Resource type of the metadata to get. One of Task, Workflow or LaunchPlan.\n+required", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + }, + "put": { + "summary": "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object.", + "operationId": "UpdateNamedEntity2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntityUpdateResponse" + } + } + }, + "parameters": [ + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "Resource type of the metadata to update\n+required", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminNamedEntityUpdateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain}": { + "get": { + "summary": "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects.", + "operationId": "ListNamedEntities2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntityList" + } + } + }, + "parameters": [ + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "Resource type of the metadata to query. One of Task, Workflow or LaunchPlan.\n+required", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "project", + "description": "Name of the project that contains the identifiers.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "domain", + "description": "Name of the domain the identifiers belongs to within the project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\n+optional.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object.", + "operationId": "GetNamedEntity", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntity" + } + } + }, + "parameters": [ + { + "name": "resource_type", + "description": "Resource type of the metadata to get. One of Task, Workflow or LaunchPlan.\n+required", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + }, + "put": { + "summary": "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object.", + "operationId": "UpdateNamedEntity", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntityUpdateResponse" + } + } + }, + "parameters": [ + { + "name": "resource_type", + "description": "Resource type of the metadata to update\n+required", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminNamedEntityUpdateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/named_entities/{resource_type}/{project}/{domain}": { + "get": { + "summary": "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects.", + "operationId": "ListNamedEntities", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNamedEntityList" + } + } + }, + "parameters": [ + { + "name": "resource_type", + "description": "Resource type of the metadata to query. One of Task, Workflow or LaunchPlan.\n+required", + "in": "path", + "required": true, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ] + }, + { + "name": "project", + "description": "Name of the project that contains the identifiers.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "domain", + "description": "Name of the domain the identifiers belongs to within the project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}": { + "get": { + "summary": "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`.", + "operationId": "GetNodeExecution2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/flyteidladminNodeExecution" + } + } + }, + "parameters": [ + { + "name": "id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow": { + "get": { + "summary": "Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`.", + "operationId": "GetDynamicNodeWorkflow2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDynamicNodeWorkflowResponse" + } + } + }, + "parameters": [ + { + "name": "id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`.", + "operationId": "ListNodeExecutions2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNodeExecutionList" + } + } + }, + "parameters": [ + { + "name": "workflow_execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + }, + { + "name": "unique_parent_id", + "description": "Unique identifier of the parent node in the execution\n+optional.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}": { + "get": { + "summary": "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`.", + "operationId": "GetNodeExecution", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/flyteidladminNodeExecution" + } + } + }, + "parameters": [ + { + "name": "id.execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow": { + "get": { + "summary": "Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`.", + "operationId": "GetDynamicNodeWorkflow", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminDynamicNodeWorkflowResponse" + } + } + }, + "parameters": [ + { + "name": "id.execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`.", + "operationId": "ListNodeExecutions", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminNodeExecutionList" + } + } + }, + "parameters": [ + { + "name": "workflow_execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + }, + { + "name": "unique_parent_id", + "description": "Unique identifier of the parent node in the execution\n+optional.", + "in": "query", + "required": false, + "type": "string" + } ], "tags": [ "AdminService" ] } }, - "/api/v1/events/tasks": { - "post": { - "summary": "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred.", - "operationId": "CreateTaskEvent", + "/api/v1/project_attributes/{attributes.project}": { + "put": { + "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level", + "operationId": "UpdateProjectAttributes", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminTaskExecutionEventResponse" + "$ref": "#/definitions/adminProjectAttributesUpdateResponse" } } }, "parameters": [ + { + "name": "attributes.project", + "description": "Unique project id for which this set of attributes will be applied.", + "in": "path", + "required": true, + "type": "string" + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminTaskExecutionEventRequest" + "$ref": "#/definitions/adminProjectAttributesUpdateRequest" } } ], @@ -746,52 +4246,81 @@ ] } }, - "/api/v1/events/workflows": { - "post": { - "summary": "Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred.", - "operationId": "CreateWorkflowEvent", + "/api/v1/project_attributes/{project}": { + "get": { + "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "GetProjectAttributes", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminWorkflowExecutionEventResponse" + "$ref": "#/definitions/adminProjectAttributesGetResponse" } } }, "parameters": [ { - "name": "body", - "in": "body", + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminWorkflowExecutionEventRequest" - } + "type": "string" + }, + { + "name": "resource_type", + "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" + ], + "default": "TASK_RESOURCE" + }, + { + "name": "org", + "description": "Optional, org key applied to the project.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ "AdminService" ] - } - }, - "/api/v1/executions": { - "post": { - "summary": "Triggers the creation of a :ref:`ref_flyteidl.admin.Execution`", - "operationId": "CreateExecution", + }, + "delete": { + "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "DeleteProjectAttributes", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminExecutionCreateResponse" + "$ref": "#/definitions/adminProjectAttributesDeleteResponse" } } }, "parameters": [ + { + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", + "in": "path", + "required": true, + "type": "string" + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminExecutionCreateRequest" + "$ref": "#/definitions/adminProjectAttributesDeleteRequest" } } ], @@ -800,25 +4329,39 @@ ] } }, - "/api/v1/executions/recover": { - "post": { - "summary": "Recreates a previously-run workflow execution that will only start executing from the last known failure point.\nIn Recover mode, users cannot change any input parameters or update the version of the execution.\nThis is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures,\ndownstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again.\nSee :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details.", - "operationId": "RecoverExecution", + "/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}": { + "put": { + "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level", + "operationId": "UpdateProjectAttributes2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminExecutionCreateResponse" + "$ref": "#/definitions/adminProjectAttributesUpdateResponse" } } }, "parameters": [ + { + "name": "attributes.org", + "description": "Optional, org key applied to the project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "attributes.project", + "description": "Unique project id for which this set of attributes will be applied.", + "in": "path", + "required": true, + "type": "string" + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminExecutionRecoverRequest" + "$ref": "#/definitions/adminProjectAttributesUpdateRequest" } } ], @@ -827,25 +4370,46 @@ ] } }, - "/api/v1/executions/relaunch": { - "post": { - "summary": "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`", - "operationId": "RelaunchExecution", + "/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}": { + "put": { + "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "UpdateProjectDomainAttributes2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminExecutionCreateResponse" + "$ref": "#/definitions/adminProjectDomainAttributesUpdateResponse" } } }, "parameters": [ + { + "name": "attributes.org", + "description": "Optional, org key applied to the attributes.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "attributes.project", + "description": "Unique project id for which this set of attributes will be applied.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "attributes.domain", + "description": "Unique domain id for which this set of attributes will be applied.", + "in": "path", + "required": true, + "type": "string" + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminExecutionRelaunchRequest" + "$ref": "#/definitions/adminProjectDomainAttributesUpdateRequest" } } ], @@ -854,80 +4418,89 @@ ] } }, - "/api/v1/executions/{id.project}/{id.domain}": { + "/api/v1/project_domain_attributes/org/{org}/{project}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Execution`.", - "operationId": "ListExecutions", + "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "GetProjectAttributes2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminProjectAttributesGetResponse" + } + } + }, + "parameters": [ + { + "name": "org", + "description": "Optional, org key applied to the project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" + ], + "default": "TASK_RESOURCE" + } + ], + "tags": [ + "AdminService" + ] + }, + "delete": { + "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "DeleteProjectAttributes2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminExecutionList" + "$ref": "#/definitions/adminProjectAttributesDeleteResponse" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", + "name": "org", + "description": "Optional, org key applied to the project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "token", - "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "filters", - "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.key", - "description": "Indicates an attribute to sort the response values.\n+required.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.direction", - "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "DESCENDING", - "ASCENDING" - ], - "default": "DESCENDING" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminProjectAttributesDeleteRequest" + } } ], "tags": [ @@ -935,39 +4508,57 @@ ] } }, - "/api/v1/executions/{id.project}/{id.domain}/{id.name}": { + "/api/v1/project_domain_attributes/org/{org}/{project}/{domain}": { "get": { - "summary": "Fetches a :ref:`ref_flyteidl.admin.Execution`.", - "operationId": "GetExecution", + "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "GetProjectDomainAttributes2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminExecution" + "$ref": "#/definitions/adminProjectDomainAttributesGetResponse" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", + "name": "org", + "description": "Optional, org key applied to the attributes.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User or system provided value for the resource.", + "name": "domain", + "description": "Unique domain id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" + }, + { + "name": "resource_type", + "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" + ], + "default": "TASK_RESOURCE" } ], "tags": [ @@ -975,34 +4566,34 @@ ] }, "delete": { - "summary": "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`.", - "operationId": "TerminateExecution", + "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "DeleteProjectDomainAttributes2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminExecutionTerminateResponse" + "$ref": "#/definitions/adminProjectDomainAttributesDeleteResponse" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", + "name": "org", + "description": "Optional, org key applied to the attributes.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User or system provided value for the resource.", + "name": "domain", + "description": "Unique domain id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" @@ -1012,43 +4603,38 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminExecutionTerminateRequest" + "$ref": "#/definitions/adminProjectDomainAttributesDeleteRequest" } } ], "tags": [ "AdminService" ] - }, + } + }, + "/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}": { "put": { - "summary": "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`.", - "operationId": "UpdateExecution", + "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "UpdateProjectDomainAttributes", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminExecutionUpdateResponse" + "$ref": "#/definitions/adminProjectDomainAttributesUpdateResponse" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "attributes.project", + "description": "Unique project id for which this set of attributes will be applied.", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User or system provided value for the resource.", + "name": "attributes.domain", + "description": "Unique domain id for which this set of attributes will be applied.", "in": "path", "required": true, "type": "string" @@ -1058,7 +4644,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminExecutionUpdateRequest" + "$ref": "#/definitions/adminProjectDomainAttributesUpdateRequest" } } ], @@ -1067,70 +4653,54 @@ ] } }, - "/api/v1/launch_plan_ids/{project}/{domain}": { + "/api/v1/project_domain_attributes/{project}/{domain}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects.", - "operationId": "ListLaunchPlanIds", + "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "GetProjectDomainAttributes", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminNamedEntityIdentifierList" + "$ref": "#/definitions/adminProjectDomainAttributesGetResponse" } } }, "parameters": [ { "name": "project", - "description": "Name of the project that contains the identifiers.\n+required", + "description": "Unique project id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { "name": "domain", - "description": "Name of the domain the identifiers belongs to within the project.\n+required", + "description": "Unique domain id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "token", - "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.key", - "description": "Indicates an attribute to sort the response values.\n+required.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.direction", - "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "name": "resource_type", + "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", "in": "query", "required": false, "type": "string", "enum": [ - "DESCENDING", - "ASCENDING" + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" ], - "default": "DESCENDING" + "default": "TASK_RESOURCE" }, { - "name": "filters", - "description": "Indicates a list of filters passed as string.\n+optional.", + "name": "org", + "description": "Optional, org key applied to the attributes.", "in": "query", "required": false, "type": "string" @@ -1139,27 +4709,39 @@ "tags": [ "AdminService" ] - } - }, - "/api/v1/launch_plans": { - "post": { - "summary": "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition", - "operationId": "CreateLaunchPlan", + }, + "delete": { + "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", + "operationId": "DeleteProjectDomainAttributes", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminLaunchPlanCreateResponse" + "$ref": "#/definitions/adminProjectDomainAttributesDeleteResponse" } } }, "parameters": [ + { + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "domain", + "description": "Unique domain id which this set of attributes references.\n+required", + "in": "path", + "required": true, + "type": "string" + }, { "name": "body", "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminLaunchPlanCreateRequest" + "$ref": "#/definitions/adminProjectDomainAttributesDeleteRequest" } } ], @@ -1168,43 +4750,22 @@ ] } }, - "/api/v1/launch_plans/{id.project}/{id.domain}": { + "/api/v1/projects": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions.", - "operationId": "ListLaunchPlans2", + "summary": "Fetches a list of :ref:`ref_flyteidl.admin.Project`", + "operationId": "ListProjects", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminLaunchPlanList" + "$ref": "#/definitions/adminProjects" } - } - }, - "parameters": [ - { - "name": "id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", - "in": "query", - "required": false, - "type": "string" - }, + } + }, + "parameters": [ { "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", + "description": "Indicates the number of projects to be returned.\n+required.", "in": "query", "required": false, "type": "integer", @@ -1242,6 +4803,38 @@ "ASCENDING" ], "default": "DESCENDING" + }, + { + "name": "org", + "description": "Optional, org filter applied to list project requests.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + }, + "post": { + "summary": "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment.", + "operationId": "RegisterProject", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminProjectRegisterResponse" + } + } + }, + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminProjectRegisterRequest" + } } ], "tags": [ @@ -1249,43 +4842,29 @@ ] } }, - "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}": { + "/api/v1/projects/org/{org}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions.", - "operationId": "ListLaunchPlans", + "summary": "Fetches a list of :ref:`ref_flyteidl.admin.Project`", + "operationId": "ListProjects2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminLaunchPlanList" + "$ref": "#/definitions/adminProjects" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "name": "org", + "description": "Optional, org filter applied to list project requests.", "in": "path", "required": true, "type": "string" }, { "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", + "description": "Indicates the number of projects to be returned.\n+required.", "in": "query", "required": false, "type": "integer", @@ -1330,151 +4909,218 @@ ] } }, - "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}": { - "get": { - "summary": "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition.", - "operationId": "GetLaunchPlan", + "/api/v1/projects/org/{org}/{id}": { + "put": { + "summary": "Updates an existing :ref:`ref_flyteidl.admin.Project`\nflyteidl.admin.Project should be passed but the domains property should be empty;\nit will be ignored in the handler as domains cannot be updated via this API.", + "operationId": "UpdateProject2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminLaunchPlan" + "$ref": "#/definitions/adminProjectUpdateResponse" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", + "name": "org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "id", + "description": "Globally unique project name.", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User provided value for the resource.", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminProject" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/projects/org/{project.org}": { + "post": { + "summary": "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment.", + "operationId": "RegisterProject2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminProjectRegisterResponse" + } + } + }, + "parameters": [ + { + "name": "project.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.version", - "description": "Specific version of the resource.", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminProjectRegisterRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/projects/{id}": { + "put": { + "summary": "Updates an existing :ref:`ref_flyteidl.admin.Project`\nflyteidl.admin.Project should be passed but the domains property should be empty;\nit will be ignored in the handler as domains cannot be updated via this API.", + "operationId": "UpdateProject", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminProjectUpdateResponse" + } + } + }, + "parameters": [ + { + "name": "id", + "description": "Globally unique project name.", "in": "path", "required": true, "type": "string" }, { - "name": "id.resource_type", - "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ], - "default": "UNSPECIFIED" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminProject" + } } ], "tags": [ "AdminService" ] - }, - "put": { - "summary": "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`.", - "operationId": "UpdateLaunchPlan", + } + }, + "/api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}": { + "get": { + "summary": "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`.", + "operationId": "GetTaskExecution2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminLaunchPlanUpdateResponse" + "$ref": "#/definitions/flyteidladminTaskExecution" } } }, "parameters": [ { - "name": "id.project", + "name": "id.node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.project", "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", + "name": "id.node_execution_id.execution_id.domain", "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", + "name": "id.node_execution_id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.node_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.name", "description": "User provided value for the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.version", + "name": "id.task_id.version", "description": "Specific version of the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "body", - "in": "body", + "name": "id.retry_attempt", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminLaunchPlanUpdateRequest" - } - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/matchable_attributes": { - "get": { - "summary": "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type.", - "operationId": "ListMatchableAttributes", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminListMatchableAttributesResponse" - } - } - }, - "parameters": [ + "type": "integer", + "format": "int64" + }, { - "name": "resource_type", - "description": "+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "name": "id.task_id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", "in": "query", "required": false, "type": "string", "enum": [ - "TASK_RESOURCE", - "CLUSTER_RESOURCE", - "EXECUTION_QUEUE", - "EXECUTION_CLUSTER_LABEL", - "QUALITY_OF_SERVICE_SPECIFICATION", - "PLUGIN_OVERRIDE", - "WORKFLOW_EXECUTION_CONFIG", - "CLUSTER_ASSIGNMENT" + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" ], - "default": "TASK_RESOURCE" + "default": "UNSPECIFIED" + }, + { + "name": "id.task_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -1482,47 +5128,93 @@ ] } }, - "/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}": { + "/api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}": { "get": { - "summary": "Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`.", - "operationId": "GetExecutionMetrics", + "summary": "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`.", + "operationId": "ListTaskExecutions2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminWorkflowExecutionGetMetricsResponse" + "$ref": "#/definitions/adminTaskExecutionList" } } }, "parameters": [ { - "name": "id.project", + "name": "node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "node_execution_id.execution_id.project", "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", + "name": "node_execution_id.execution_id.domain", "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", + "name": "node_execution_id.execution_id.name", "description": "User or system provided value for the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "depth", - "description": "depth defines the number of Flyte entity levels to traverse when breaking down execution details.", + "name": "node_execution_id.node_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", "in": "query", "required": false, "type": "integer", - "format": "int32" + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" } ], "tags": [ @@ -1530,113 +5222,203 @@ ] } }, - "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}": { + "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}": { "get": { - "summary": "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object.", - "operationId": "GetNamedEntity", + "summary": "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`.", + "operationId": "GetTaskExecution", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminNamedEntity" + "$ref": "#/definitions/flyteidladminTaskExecution" } } }, "parameters": [ { - "name": "resource_type", - "description": "Resource type of the metadata to get. One of Task, Workflow or LaunchPlan.\n+required", + "name": "id.node_execution_id.execution_id.project", + "description": "Name of the project the resource belongs to.", "in": "path", "required": true, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ] + "type": "string" }, { - "name": "id.project", + "name": "id.node_execution_id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.node_execution_id.node_id", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.project", "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", + "name": "id.task_id.domain", "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "name": "id.task_id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.task_id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.retry_attempt", "in": "path", "required": true, + "type": "integer", + "format": "int64" + }, + { + "name": "id.task_id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" + }, + { + "name": "id.task_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "id.node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, "type": "string" } ], "tags": [ "AdminService" ] - }, - "put": { - "summary": "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object.", - "operationId": "UpdateNamedEntity", + } + }, + "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}": { + "get": { + "summary": "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`.", + "operationId": "ListTaskExecutions", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminNamedEntityUpdateResponse" + "$ref": "#/definitions/adminTaskExecutionList" } } }, "parameters": [ { - "name": "resource_type", - "description": "Resource type of the metadata to update\n+required", + "name": "node_execution_id.execution_id.project", + "description": "Name of the project the resource belongs to.", "in": "path", "required": true, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ] + "type": "string" }, { - "name": "id.project", - "description": "Name of the project the resource belongs to.", + "name": "node_execution_id.execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "node_execution_id.execution_id.name", + "description": "User or system provided value for the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "name": "node_execution_id.node_id", "in": "path", "required": true, "type": "string" }, { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/adminNamedEntityUpdateRequest" - } + "name": "node_execution_id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" } ], "tags": [ @@ -1644,33 +5426,19 @@ ] } }, - "/api/v1/named_entities/{resource_type}/{project}/{domain}": { + "/api/v1/task_ids/{project}/{domain}": { "get": { - "summary": "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects.", - "operationId": "ListNamedEntities", + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects.", + "operationId": "ListTaskIds", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminNamedEntityList" + "$ref": "#/definitions/adminNamedEntityIdentifierList" } } - }, - "parameters": [ - { - "name": "resource_type", - "description": "Resource type of the metadata to query. One of Task, Workflow or LaunchPlan.\n+required", - "in": "path", - "required": true, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ] - }, + }, + "parameters": [ { "name": "project", "description": "Name of the project that contains the identifiers.\n+required", @@ -1680,14 +5448,14 @@ }, { "name": "domain", - "description": "Name of the domain the identifiers belongs to within the project.", + "description": "Name of the domain the identifiers belongs to within the project.\n+required", "in": "path", "required": true, "type": "string" }, { "name": "limit", - "description": "Indicates the number of resources to be returned.", + "description": "Indicates the number of resources to be returned.\n+required.", "in": "query", "required": false, "type": "integer", @@ -1725,6 +5493,13 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -1732,45 +5507,60 @@ ] } }, - "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}": { - "get": { - "summary": "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`.", - "operationId": "GetNodeExecution", + "/api/v1/tasks": { + "post": { + "summary": "Create and upload a :ref:`ref_flyteidl.admin.Task` definition", + "operationId": "CreateTask", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/flyteidladminNodeExecution" + "$ref": "#/definitions/flyteidladminTaskCreateResponse" } } }, "parameters": [ { - "name": "id.execution_id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.execution_id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", + "name": "body", + "in": "body", "required": true, - "type": "string" - }, + "schema": { + "$ref": "#/definitions/flyteidladminTaskCreateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/tasks/org/{id.org}": { + "post": { + "summary": "Create and upload a :ref:`ref_flyteidl.admin.Task` definition", + "operationId": "CreateTask2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/flyteidladminTaskCreateResponse" + } + } + }, + "parameters": [ { - "name": "id.execution_id.name", - "description": "User or system provided value for the resource.", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.node_id", - "in": "path", + "name": "body", + "in": "body", "required": true, - "type": "string" + "schema": { + "$ref": "#/definitions/flyteidladminTaskCreateRequest" + } } ], "tags": [ @@ -1778,40 +5568,47 @@ ] } }, - "/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}": { + "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`.", - "operationId": "ListNodeExecutions", + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions.", + "operationId": "ListTasks4", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminNodeExecutionList" + "$ref": "#/definitions/adminTaskList" } } }, "parameters": [ { - "name": "workflow_execution_id.project", - "description": "Name of the project the resource belongs to.", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "workflow_execution_id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "id.project", + "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "workflow_execution_id.name", - "description": "User or system provided value for the resource.", + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, + { + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, { "name": "limit", "description": "Indicates the number of resources to be returned.\n+required.", @@ -1822,6 +5619,7 @@ }, { "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", "in": "query", "required": false, "type": "string" @@ -1851,13 +5649,6 @@ "ASCENDING" ], "default": "DESCENDING" - }, - { - "name": "unique_parent_id", - "description": "Unique identifier of the parent node in the execution\n+optional.", - "in": "query", - "required": false, - "type": "string" } ], "tags": [ @@ -1865,150 +5656,237 @@ ] } }, - "/api/v1/project_attributes/{attributes.project}": { - "put": { - "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level", - "operationId": "UpdateProjectAttributes", + "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions.", + "operationId": "ListTasks2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectAttributesUpdateResponse" + "$ref": "#/definitions/adminTaskList" } } }, "parameters": [ { - "name": "attributes.project", - "description": "Unique project id for which this set of attributes will be applied.", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "body", - "in": "body", + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminProjectAttributesUpdateRequest" - } - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/project_attributes/{project}": { - "get": { - "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", - "operationId": "GetProjectAttributes", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminProjectAttributesGetResponse" - } - } - }, - "parameters": [ + "type": "string" + }, { - "name": "project", - "description": "Unique project id which this set of attributes references.\n+required", + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "resource_type", - "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", "in": "query", "required": false, "type": "string", "enum": [ - "TASK_RESOURCE", - "CLUSTER_RESOURCE", - "EXECUTION_QUEUE", - "EXECUTION_CLUSTER_LABEL", - "QUALITY_OF_SERVICE_SPECIFICATION", - "PLUGIN_OVERRIDE", - "WORKFLOW_EXECUTION_CONFIG", - "CLUSTER_ASSIGNMENT" + "DESCENDING", + "ASCENDING" ], - "default": "TASK_RESOURCE" + "default": "DESCENDING" } ], "tags": [ "AdminService" ] - }, - "delete": { - "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", - "operationId": "DeleteProjectAttributes", + } + }, + "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}": { + "get": { + "summary": "Fetch a :ref:`ref_flyteidl.admin.Task` definition.", + "operationId": "GetTask2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectAttributesDeleteResponse" + "$ref": "#/definitions/adminTask" } } }, "parameters": [ { - "name": "project", - "description": "Unique project id which this set of attributes references.\n+required", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "body", - "in": "body", + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminProjectAttributesDeleteRequest" - } + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" } ], "tags": [ "AdminService" ] } - }, - "/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}": { - "put": { - "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", - "operationId": "UpdateProjectDomainAttributes", + }, + "/api/v1/tasks/org/{org}/{project}/{domain}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects.", + "operationId": "ListTaskIds2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectDomainAttributesUpdateResponse" + "$ref": "#/definitions/adminNamedEntityIdentifierList" } } }, "parameters": [ { - "name": "attributes.project", - "description": "Unique project id for which this set of attributes will be applied.", + "name": "org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "attributes.domain", - "description": "Unique domain id for which this set of attributes will be applied.", + "name": "project", + "description": "Name of the project that contains the identifiers.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "body", - "in": "body", + "name": "domain", + "description": "Name of the domain the identifiers belongs to within the project.\n+required", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminProjectDomainAttributesUpdateRequest" - } + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\n+optional.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -2016,112 +5894,138 @@ ] } }, - "/api/v1/project_domain_attributes/{project}/{domain}": { + "/api/v1/tasks/{id.project}/{id.domain}": { "get": { - "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", - "operationId": "GetProjectDomainAttributes", + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions.", + "operationId": "ListTasks3", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectDomainAttributesGetResponse" + "$ref": "#/definitions/adminTaskList" } } }, "parameters": [ { - "name": "project", - "description": "Unique project id which this set of attributes references.\n+required", + "name": "id.project", + "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "domain", - "description": "Unique domain id which this set of attributes references.\n+required", + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "resource_type", - "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", "in": "query", "required": false, "type": "string", "enum": [ - "TASK_RESOURCE", - "CLUSTER_RESOURCE", - "EXECUTION_QUEUE", - "EXECUTION_CLUSTER_LABEL", - "QUALITY_OF_SERVICE_SPECIFICATION", - "PLUGIN_OVERRIDE", - "WORKFLOW_EXECUTION_CONFIG", - "CLUSTER_ASSIGNMENT" + "DESCENDING", + "ASCENDING" ], - "default": "TASK_RESOURCE" + "default": "DESCENDING" } ], "tags": [ "AdminService" ] - }, - "delete": { - "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain.", - "operationId": "DeleteProjectDomainAttributes", + } + }, + "/api/v1/tasks/{id.project}/{id.domain}/{id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions.", + "operationId": "ListTasks", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectDomainAttributesDeleteResponse" + "$ref": "#/definitions/adminTaskList" } } }, "parameters": [ { - "name": "project", - "description": "Unique project id which this set of attributes references.\n+required", + "name": "id.project", + "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "domain", - "description": "Unique domain id which this set of attributes references.\n+required", + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "body", - "in": "body", + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminProjectDomainAttributesDeleteRequest" - } - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/projects": { - "get": { - "summary": "Fetches a list of :ref:`ref_flyteidl.admin.Project`", - "operationId": "ListProjects", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminProjects" - } - } - }, - "parameters": [ + "type": "string" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, { "name": "limit", - "description": "Indicates the number of projects to be returned.\n+required.", + "description": "Indicates the number of resources to be returned.\n+required.", "in": "query", "required": false, "type": "integer", @@ -2164,49 +6068,130 @@ "tags": [ "AdminService" ] - }, - "post": { - "summary": "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment.", - "operationId": "RegisterProject", + } + }, + "/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}": { + "get": { + "summary": "Fetch a :ref:`ref_flyteidl.admin.Task` definition.", + "operationId": "GetTask", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectRegisterResponse" + "$ref": "#/definitions/adminTask" } } }, "parameters": [ { - "name": "body", - "in": "body", + "name": "id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", "required": true, + "type": "string" + }, + { + "name": "id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/version": { + "get": { + "operationId": "GetVersion", + "responses": { + "200": { + "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectRegisterRequest" + "$ref": "#/definitions/adminGetVersionResponse" } } - ], + }, "tags": [ "AdminService" ] } }, - "/api/v1/projects/{id}": { + "/api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow}": { "put": { - "summary": "Updates an existing :ref:`ref_flyteidl.admin.Project` \nflyteidl.admin.Project should be passed but the domains property should be empty;\nit will be ignored in the handler as domains cannot be updated via this API.", - "operationId": "UpdateProject", + "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", + "operationId": "UpdateWorkflowAttributes2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminProjectUpdateResponse" + "$ref": "#/definitions/adminWorkflowAttributesUpdateResponse" } } }, "parameters": [ { - "name": "id", - "description": "Globally unique project name.", + "name": "attributes.org", + "description": "Optional, org key applied to the attributes.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "attributes.project", + "description": "Unique project id for which this set of attributes will be applied.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "attributes.domain", + "description": "Unique domain id for which this set of attributes will be applied.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "attributes.workflow", + "description": "Workflow name for which this set of attributes will be applied.", "in": "path", "required": true, "type": "string" @@ -2216,7 +6201,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/adminProject" + "$ref": "#/definitions/adminWorkflowAttributesUpdateRequest" } } ], @@ -2225,95 +6210,117 @@ ] } }, - "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}": { + "/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}": { "get": { - "summary": "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`.", - "operationId": "GetTaskExecution", + "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", + "operationId": "GetWorkflowAttributes2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/flyteidladminTaskExecution" + "$ref": "#/definitions/adminWorkflowAttributesGetResponse" } } }, "parameters": [ { - "name": "id.node_execution_id.execution_id.project", - "description": "Name of the project the resource belongs to.", + "name": "org", + "description": "Optional, org key applied to the attributes.", "in": "path", "required": true, "type": "string" }, { - "name": "id.node_execution_id.execution_id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.node_execution_id.execution_id.name", - "description": "User or system provided value for the resource.", + "name": "domain", + "description": "Unique domain id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.node_execution_id.node_id", + "name": "workflow", + "description": "Workflow name which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.project", - "description": "Name of the project the resource belongs to.", + "name": "resource_type", + "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" + ], + "default": "TASK_RESOURCE" + } + ], + "tags": [ + "AdminService" + ] + }, + "delete": { + "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", + "operationId": "DeleteWorkflowAttributes2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminWorkflowAttributesDeleteResponse" + } + } + }, + "parameters": [ + { + "name": "org", + "description": "Optional, org key applied to the attributes.", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.name", - "description": "User provided value for the resource.", + "name": "domain", + "description": "Unique domain id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.task_id.version", - "description": "Specific version of the resource.", + "name": "workflow", + "description": "Workflow name which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "id.retry_attempt", - "in": "path", + "name": "body", + "in": "body", "required": true, - "type": "integer", - "format": "int64" - }, - { - "name": "id.task_id.resource_type", - "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ], - "default": "UNSPECIFIED" + "schema": { + "$ref": "#/definitions/adminWorkflowAttributesDeleteRequest" + } } ], "tags": [ @@ -2321,86 +6328,158 @@ ] } }, - "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}": { - "get": { - "summary": "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`.", - "operationId": "ListTaskExecutions", + "/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}": { + "put": { + "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", + "operationId": "UpdateWorkflowAttributes", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminTaskExecutionList" + "$ref": "#/definitions/adminWorkflowAttributesUpdateResponse" } } }, "parameters": [ { - "name": "node_execution_id.execution_id.project", - "description": "Name of the project the resource belongs to.", + "name": "attributes.project", + "description": "Unique project id for which this set of attributes will be applied.", "in": "path", "required": true, "type": "string" }, { - "name": "node_execution_id.execution_id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "name": "attributes.domain", + "description": "Unique domain id for which this set of attributes will be applied.", "in": "path", "required": true, "type": "string" }, { - "name": "node_execution_id.execution_id.name", - "description": "User or system provided value for the resource.", + "name": "attributes.workflow", + "description": "Workflow name for which this set of attributes will be applied.", "in": "path", "required": true, "type": "string" }, { - "name": "node_execution_id.node_id", + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminWorkflowAttributesUpdateRequest" + } + } + ], + "tags": [ + "AdminService" + ] + } + }, + "/api/v1/workflow_attributes/{project}/{domain}/{workflow}": { + "get": { + "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", + "operationId": "GetWorkflowAttributes", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminWorkflowAttributesGetResponse" + } + } + }, + "parameters": [ + { + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", "in": "path", "required": true, "type": "string" }, { - "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", + "name": "domain", + "description": "Unique domain id which this set of attributes references.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow", + "description": "Workflow name which this set of attributes references.\n+required", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "resource_type", + "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", "in": "query", "required": false, - "type": "integer", - "format": "int64" + "type": "string", + "enum": [ + "TASK_RESOURCE", + "CLUSTER_RESOURCE", + "EXECUTION_QUEUE", + "EXECUTION_CLUSTER_LABEL", + "QUALITY_OF_SERVICE_SPECIFICATION", + "PLUGIN_OVERRIDE", + "WORKFLOW_EXECUTION_CONFIG", + "CLUSTER_ASSIGNMENT" + ], + "default": "TASK_RESOURCE" }, { - "name": "token", - "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "name": "org", + "description": "Optional, org key applied to the attributes.", "in": "query", "required": false, "type": "string" + } + ], + "tags": [ + "AdminService" + ] + }, + "delete": { + "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", + "operationId": "DeleteWorkflowAttributes", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminWorkflowAttributesDeleteResponse" + } + } + }, + "parameters": [ + { + "name": "project", + "description": "Unique project id which this set of attributes references.\n+required", + "in": "path", + "required": true, + "type": "string" }, { - "name": "filters", - "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", - "in": "query", - "required": false, + "name": "domain", + "description": "Unique domain id which this set of attributes references.\n+required", + "in": "path", + "required": true, "type": "string" }, { - "name": "sort_by.key", - "description": "Indicates an attribute to sort the response values.\n+required.", - "in": "query", - "required": false, + "name": "workflow", + "description": "Workflow name which this set of attributes references.\n+required", + "in": "path", + "required": true, "type": "string" }, { - "name": "sort_by.direction", - "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "DESCENDING", - "ASCENDING" - ], - "default": "DESCENDING" + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminWorkflowAttributesDeleteRequest" + } } ], "tags": [ @@ -2408,10 +6487,10 @@ ] } }, - "/api/v1/task_ids/{project}/{domain}": { + "/api/v1/workflow_ids/{project}/{domain}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects.", - "operationId": "ListTaskIds", + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects.", + "operationId": "ListWorkflowIds", "responses": { "200": { "description": "A successful response.", @@ -2475,6 +6554,13 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -2482,15 +6568,15 @@ ] } }, - "/api/v1/tasks": { + "/api/v1/workflows": { "post": { - "summary": "Create and upload a :ref:`ref_flyteidl.admin.Task` definition", - "operationId": "CreateTask", + "summary": "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition", + "operationId": "CreateWorkflow", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/flyteidladminTaskCreateResponse" + "$ref": "#/definitions/adminWorkflowCreateResponse" } } }, @@ -2500,7 +6586,7 @@ "in": "body", "required": true, "schema": { - "$ref": "#/definitions/flyteidladminTaskCreateRequest" + "$ref": "#/definitions/adminWorkflowCreateRequest" } } ], @@ -2509,80 +6595,33 @@ ] } }, - "/api/v1/tasks/{id.project}/{id.domain}": { - "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions.", - "operationId": "ListTasks2", + "/api/v1/workflows/org/{id.org}": { + "post": { + "summary": "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition", + "operationId": "CreateWorkflow2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminTaskList" + "$ref": "#/definitions/adminWorkflowCreateResponse" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", + "name": "body", + "in": "body", "required": true, - "type": "string" - }, - { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "token", - "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "filters", - "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.key", - "description": "Indicates an attribute to sort the response values.\n+required.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.direction", - "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "DESCENDING", - "ASCENDING" - ], - "default": "DESCENDING" + "schema": { + "$ref": "#/definitions/adminWorkflowCreateRequest" + } } ], "tags": [ @@ -2590,100 +6629,26 @@ ] } }, - "/api/v1/tasks/{id.project}/{id.domain}/{id.name}": { + "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}": { "get": { - "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions.", - "operationId": "ListTasks", + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions.", + "operationId": "ListWorkflows4", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminTaskList" + "$ref": "#/definitions/adminWorkflowList" } } }, "parameters": [ { - "name": "id.project", - "description": "Name of the project the resource belongs to.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.domain", - "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.name", - "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, - { - "name": "limit", - "description": "Indicates the number of resources to be returned.\n+required.", - "in": "query", - "required": false, - "type": "integer", - "format": "int64" - }, - { - "name": "token", - "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "filters", - "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.key", - "description": "Indicates an attribute to sort the response values.\n+required.", - "in": "query", - "required": false, - "type": "string" - }, - { - "name": "sort_by.direction", - "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", - "in": "query", - "required": false, - "type": "string", - "enum": [ - "DESCENDING", - "ASCENDING" - ], - "default": "DESCENDING" - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}": { - "get": { - "summary": "Fetch a :ref:`ref_flyteidl.admin.Task` definition.", - "operationId": "GetTask", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminTask" - } - } - }, - "parameters": [ { "name": "id.project", "description": "Name of the project the resource belongs to.", @@ -2700,96 +6665,51 @@ }, { "name": "id.name", - "description": "User provided value for the resource.", - "in": "path", - "required": true, - "type": "string" - }, - { - "name": "id.version", - "description": "Specific version of the resource.", - "in": "path", - "required": true, + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'.", + "in": "query", + "required": false, "type": "string" }, { - "name": "id.resource_type", - "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", "in": "query", "required": false, - "type": "string", - "enum": [ - "UNSPECIFIED", - "TASK", - "WORKFLOW", - "LAUNCH_PLAN", - "DATASET" - ], - "default": "UNSPECIFIED" - } - ], - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/version": { - "get": { - "operationId": "GetVersion", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminGetVersionResponse" - } - } - }, - "tags": [ - "AdminService" - ] - } - }, - "/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}": { - "put": { - "summary": "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", - "operationId": "UpdateWorkflowAttributes", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminWorkflowAttributesUpdateResponse" - } - } - }, - "parameters": [ + "type": "integer", + "format": "int64" + }, { - "name": "attributes.project", - "description": "Unique project id for which this set of attributes will be applied.", - "in": "path", - "required": true, + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, "type": "string" }, { - "name": "attributes.domain", - "description": "Unique domain id for which this set of attributes will be applied.", - "in": "path", - "required": true, + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, "type": "string" }, { - "name": "attributes.workflow", - "description": "Workflow name for which this set of attributes will be applied.", - "in": "path", - "required": true, + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, "type": "string" }, { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/adminWorkflowAttributesUpdateRequest" - } + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" } ], "tags": [ @@ -2797,103 +6717,156 @@ ] } }, - "/api/v1/workflow_attributes/{project}/{domain}/{workflow}": { + "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}": { "get": { - "summary": "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", - "operationId": "GetWorkflowAttributes", + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions.", + "operationId": "ListWorkflows2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminWorkflowAttributesGetResponse" + "$ref": "#/definitions/adminWorkflowList" } } }, "parameters": [ { - "name": "project", - "description": "Unique project id which this set of attributes references.\n+required", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "domain", - "description": "Unique domain id which this set of attributes references.\n+required", + "name": "id.project", + "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "workflow", - "description": "Workflow name which this set of attributes references.\n+required", + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "resource_type", - "description": "Which type of matchable attributes to return.\n+required.\n\n - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run.", + "name": "id.name", + "description": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, this server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\nMore info on constructing filters : \u003cLink\u003e\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", "in": "query", "required": false, "type": "string", "enum": [ - "TASK_RESOURCE", - "CLUSTER_RESOURCE", - "EXECUTION_QUEUE", - "EXECUTION_CLUSTER_LABEL", - "QUALITY_OF_SERVICE_SPECIFICATION", - "PLUGIN_OVERRIDE", - "WORKFLOW_EXECUTION_CONFIG", - "CLUSTER_ASSIGNMENT" + "DESCENDING", + "ASCENDING" ], - "default": "TASK_RESOURCE" + "default": "DESCENDING" } ], "tags": [ "AdminService" ] - }, - "delete": { - "summary": "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow.", - "operationId": "DeleteWorkflowAttributes", + } + }, + "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}": { + "get": { + "summary": "Fetch a :ref:`ref_flyteidl.admin.Workflow` definition.", + "operationId": "GetWorkflow2", "responses": { "200": { "description": "A successful response.", "schema": { - "$ref": "#/definitions/adminWorkflowAttributesDeleteResponse" + "$ref": "#/definitions/adminWorkflow" } } }, "parameters": [ { - "name": "project", - "description": "Unique project id which this set of attributes references.\n+required", + "name": "id.org", + "description": "Optional, org key applied to the resource.", "in": "path", "required": true, "type": "string" }, { - "name": "domain", - "description": "Unique domain id which this set of attributes references.\n+required", + "name": "id.project", + "description": "Name of the project the resource belongs to.", "in": "path", "required": true, "type": "string" }, { - "name": "workflow", - "description": "Workflow name which this set of attributes references.\n+required", + "name": "id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", "in": "path", "required": true, "type": "string" }, { - "name": "body", - "in": "body", + "name": "id.name", + "description": "User provided value for the resource.", + "in": "path", "required": true, - "schema": { - "$ref": "#/definitions/adminWorkflowAttributesDeleteRequest" - } + "type": "string" + }, + { + "name": "id.version", + "description": "Specific version of the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "id.resource_type", + "description": "Identifies the specific type of resource that this identifier corresponds to.\n\n - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects.\nEventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects \nin a similar manner to other Flyte objects", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNSPECIFIED", + "TASK", + "WORKFLOW", + "LAUNCH_PLAN", + "DATASET" + ], + "default": "UNSPECIFIED" } ], "tags": [ @@ -2901,10 +6874,10 @@ ] } }, - "/api/v1/workflow_ids/{project}/{domain}": { + "/api/v1/workflows/org/{org}/{project}/{domain}": { "get": { "summary": "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects.", - "operationId": "ListWorkflowIds", + "operationId": "ListWorkflowIds2", "responses": { "200": { "description": "A successful response.", @@ -2914,6 +6887,13 @@ } }, "parameters": [ + { + "name": "org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, { "name": "project", "description": "Name of the project that contains the identifiers.\n+required", @@ -2975,37 +6955,10 @@ ] } }, - "/api/v1/workflows": { - "post": { - "summary": "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition", - "operationId": "CreateWorkflow", - "responses": { - "200": { - "description": "A successful response.", - "schema": { - "$ref": "#/definitions/adminWorkflowCreateResponse" - } - } - }, - "parameters": [ - { - "name": "body", - "in": "body", - "required": true, - "schema": { - "$ref": "#/definitions/adminWorkflowCreateRequest" - } - } - ], - "tags": [ - "AdminService" - ] - } - }, "/api/v1/workflows/{id.project}/{id.domain}": { "get": { "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions.", - "operationId": "ListWorkflows2", + "operationId": "ListWorkflows3", "responses": { "200": { "description": "A successful response.", @@ -3036,6 +6989,13 @@ "required": false, "type": "string" }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, { "name": "limit", "description": "Indicates the number of resources to be returned.\n+required.", @@ -3117,6 +7077,13 @@ "required": true, "type": "string" }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, { "name": "limit", "description": "Indicates the number of resources to be returned.\n+required.", @@ -3219,6 +7186,13 @@ "DATASET" ], "default": "UNSPECIFIED" + }, + { + "name": "id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -3696,6 +7670,14 @@ }, "description": "Namespace within a project commonly used to differentiate between different service instances.\ne.g. \"production\", \"development\", etc." }, + "adminDynamicNodeWorkflowResponse": { + "type": "object", + "properties": { + "compiled_workflow": { + "$ref": "#/definitions/coreCompiledWorkflowClosure" + } + } + }, "adminEmailNotification": { "type": "object", "properties": { @@ -3839,6 +7821,10 @@ "inputs": { "$ref": "#/definitions/coreLiteralMap", "title": "The inputs required to start the execution. All required inputs must be\nincluded in this map. If not required and not provided, defaults apply.\n+optional" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "description": "Request to launch an execution with the given project, domain and optionally-assigned name." @@ -4403,9 +8389,13 @@ }, "launch_plan": { "type": "string" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, - "description": "Represents a custom set of attributes applied for either a domain; a domain and project; or\ndomain, project and workflow name.\nThese are used to override system level defaults for kubernetes cluster resource management,\ndefault execution values, and more all across different levels of specificity." + "description": "Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org);\nor domain, project and workflow name (and optional org).\nThese are used to override system level defaults for kubernetes cluster resource management,\ndefault execution values, and more all across different levels of specificity." }, "adminMatchableResource": { "type": "string", @@ -4483,6 +8473,10 @@ "name": { "type": "string", "title": "User provided value for the resource.\nThe combination of project + domain + name uniquely identifies the resource.\n+optional - in certain contexts - like 'List API', 'Launch plans'" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "description": "Encapsulation of fields that identifies a Flyte resource.\nA Flyte resource can be a task, workflow or launch plan.\nA resource can internally have multiple versions and is uniquely identified\nby project, domain, and name." @@ -4817,6 +8811,10 @@ }, "state": { "$ref": "#/definitions/ProjectProjectState" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "description": "Top-level namespace used to classify different entities like workflows and executions." @@ -4830,6 +8828,10 @@ }, "matching_attributes": { "$ref": "#/definitions/adminMatchingAttributes" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the project." } }, "title": "Defines a set of custom matching attributes at the project level.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" @@ -4844,6 +8846,10 @@ "resource_type": { "$ref": "#/definitions/adminMatchableResource", "title": "Which type of matchable attributes to delete.\n+required" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the project." } }, "title": "Request to delete a set matchable project level attribute override.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" @@ -4888,6 +8894,10 @@ }, "matching_attributes": { "$ref": "#/definitions/adminMatchingAttributes" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the attributes." } }, "title": "Defines a set of custom matching attributes which defines resource defaults for a project and domain.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" @@ -4906,6 +8916,10 @@ "resource_type": { "$ref": "#/definitions/adminMatchableResource", "title": "Which type of matchable attributes to delete.\n+required" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the attributes." } }, "title": "Request to delete a set matchable project domain attribute override.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" @@ -5366,6 +9380,10 @@ }, "matching_attributes": { "$ref": "#/definitions/adminMatchingAttributes" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the attributes." } }, "title": "Defines a set of custom matching attributes which defines resource defaults for a project, domain and workflow.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" @@ -5388,6 +9406,10 @@ "resource_type": { "$ref": "#/definitions/adminMatchableResource", "title": "Which type of matchable attributes to delete.\n+required" + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the attributes." } }, "title": "Request to delete a set matchable workflow attribute override.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" @@ -5635,11 +9657,15 @@ "format": "int64" }, "partition_key": { - "type": "string", - "title": "These two fields are only relevant in the partition value case" + "type": "string" + }, + "bind_to_time_partition": { + "type": "boolean", + "format": "boolean" }, "transform": { - "type": "string" + "type": "string", + "title": "This is only relevant in the time partition case" } }, "title": "Only valid for triggers" @@ -5654,7 +9680,12 @@ "type": "string" }, "partitions": { - "$ref": "#/definitions/corePartitions" + "$ref": "#/definitions/corePartitions", + "description": "Think of a partition as a tag on an Artifact, except it's a key-value pair.\nDifferent partitions naturally have different versions (execution ids)." + }, + "time_partition": { + "$ref": "#/definitions/coreTimePartition", + "description": "There is no such thing as an empty time partition - if it's not set, then there is no time partition." } } }, @@ -5856,10 +9887,11 @@ "CACHE_POPULATED", "CACHE_LOOKUP_FAILURE", "CACHE_PUT_FAILURE", - "CACHE_SKIPPED" + "CACHE_SKIPPED", + "CACHE_EVICTED" ], "default": "CACHE_DISABLED", - "description": "- CACHE_DISABLED: Used to indicate that caching was disabled\n - CACHE_MISS: Used to indicate that the cache lookup resulted in no matches\n - CACHE_HIT: used to indicate that the associated artifact was a result of a previous execution\n - CACHE_POPULATED: used to indicate that the resultant artifact was added to the cache\n - CACHE_LOOKUP_FAILURE: Used to indicate that cache lookup failed because of an error\n - CACHE_PUT_FAILURE: Used to indicate that cache lookup failed because of an error\n - CACHE_SKIPPED: Used to indicate the cache lookup was skipped", + "description": "- CACHE_DISABLED: Used to indicate that caching was disabled\n - CACHE_MISS: Used to indicate that the cache lookup resulted in no matches\n - CACHE_HIT: used to indicate that the associated artifact was a result of a previous execution\n - CACHE_POPULATED: used to indicate that the resultant artifact was added to the cache\n - CACHE_LOOKUP_FAILURE: Used to indicate that cache lookup failed because of an error\n - CACHE_PUT_FAILURE: Used to indicate that cache lookup failed because of an error\n - CACHE_SKIPPED: Used to indicate the cache lookup was skipped\n - CACHE_EVICTED: Used to indicate that the cache was evicted", "title": "Indicates the status of CatalogCaching. The reason why this is not embedded in TaskNodeMetadata is, that we may use for other types of nodes as well in the future" }, "coreCatalogMetadata": { @@ -6200,6 +10232,10 @@ "version": { "type": "string", "description": "Specific version of the resource." + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "description": "Encapsulation of fields that uniquely identifies a Flyte resource." @@ -6327,7 +10363,13 @@ "type": "object", "properties": { "static_value": { - "type": "string" + "type": "string", + "title": "The string static value is for use in the Partitions object" + }, + "time_value": { + "type": "string", + "format": "date-time", + "title": "The time value is for use in the TimePartition case" }, "triggered_binding": { "$ref": "#/definitions/coreArtifactBindingData" @@ -7246,6 +11288,14 @@ }, "description": "A Task structure that uniquely identifies a task in the system\nTasks are registered as a first step in the system." }, + "coreTimePartition": { + "type": "object", + "properties": { + "value": { + "$ref": "#/definitions/coreLabelValue" + } + } + }, "coreTypeAnnotation": { "type": "object", "properties": { @@ -7370,6 +11420,10 @@ "name": { "type": "string", "description": "User or system provided value for the resource." + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "title": "Encapsulation of fields that uniquely identifies a Flyte workflow execution" diff --git a/flyteidl/gen/pb-go/flyteidl/service/agent.pb.go b/flyteidl/gen/pb-go/flyteidl/service/agent.pb.go index c816a1f3ac..478c62819c 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/agent.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/service/agent.pb.go @@ -29,28 +29,30 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package func init() { proto.RegisterFile("flyteidl/service/agent.proto", fileDescriptor_f7d1dfd1fb77d2ef) } var fileDescriptor_f7d1dfd1fb77d2ef = []byte{ - // 324 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xbd, 0x4e, 0xf3, 0x30, - 0x14, 0x86, 0xbf, 0xaf, 0x03, 0x20, 0x0f, 0xa8, 0x58, 0x55, 0x87, 0xa8, 0xe2, 0xa7, 0x3b, 0xb1, - 0x28, 0x33, 0x42, 0x05, 0xa4, 0x2e, 0x65, 0x01, 0xba, 0xb0, 0x9d, 0x26, 0x07, 0x63, 0x25, 0xb1, - 0x43, 0x7c, 0x5a, 0xa9, 0x42, 0x2c, 0xdc, 0x00, 0x03, 0x97, 0xc6, 0x2d, 0x70, 0x01, 0x5c, 0x02, - 0xc2, 0xf9, 0xa5, 0x10, 0xb6, 0x28, 0xcf, 0xeb, 0xe7, 0xf5, 0xcf, 0x61, 0x83, 0xbb, 0x78, 0x45, - 0xa8, 0xc2, 0x58, 0x58, 0xcc, 0x96, 0x2a, 0x40, 0x01, 0x12, 0x35, 0xf9, 0x69, 0x66, 0xc8, 0xf0, - 0x6e, 0x49, 0xfd, 0x82, 0x7a, 0x03, 0x69, 0x8c, 0x8c, 0x51, 0x40, 0xaa, 0x04, 0x68, 0x6d, 0x08, - 0x48, 0x19, 0x6d, 0xf3, 0xbc, 0xe7, 0x55, 0x36, 0x08, 0x13, 0xa5, 0x9b, 0xae, 0xd1, 0x4b, 0x87, - 0xed, 0x8c, 0xed, 0x4a, 0x07, 0xe3, 0xaf, 0x9f, 0xd7, 0xb9, 0x8f, 0xcf, 0x18, 0x3b, 0xcf, 0x10, - 0x08, 0x6f, 0xc0, 0x46, 0xfc, 0xc0, 0xaf, 0x0a, 0x9d, 0xc0, 0xaf, 0xd9, 0x15, 0x3e, 0x2c, 0xd0, - 0x92, 0x37, 0xfc, 0x2b, 0x62, 0x53, 0xa3, 0x2d, 0x0e, 0xff, 0xf1, 0x29, 0xdb, 0x9c, 0x20, 0x39, - 0xe7, 0xee, 0xfa, 0x82, 0x02, 0x94, 0xc2, 0xbd, 0x56, 0x5e, 0xd9, 0x66, 0x8c, 0x5d, 0x60, 0x8c, - 0x6d, 0x9b, 0xac, 0x59, 0xeb, 0x26, 0x9b, 0x91, 0x52, 0x3b, 0xfa, 0xf8, 0xcf, 0x7a, 0xee, 0x32, - 0x2e, 0x91, 0x20, 0x04, 0x82, 0xf2, 0x52, 0x22, 0xb6, 0x35, 0x41, 0x72, 0x88, 0xff, 0xb6, 0x3d, - 0x47, 0xca, 0xae, 0xfd, 0xf6, 0x40, 0xd1, 0x34, 0x78, 0x7e, 0x7b, 0x7f, 0xed, 0xf4, 0x79, 0xcf, - 0xbd, 0xda, 0xf2, 0x28, 0x7f, 0x16, 0xf1, 0xa8, 0x21, 0xc1, 0x27, 0x1e, 0x31, 0x36, 0x55, 0x36, - 0x5f, 0x62, 0x7f, 0x1e, 0xae, 0x66, 0xad, 0x87, 0x6b, 0x46, 0x8a, 0xca, 0xbe, 0xab, 0xec, 0xf2, - 0xed, 0x6f, 0x95, 0xf6, 0xec, 0xf4, 0xf6, 0x44, 0x2a, 0xba, 0x5f, 0xcc, 0xfd, 0xc0, 0x24, 0xc2, - 0x79, 0x4c, 0x26, 0xf3, 0x0f, 0x51, 0x0d, 0x8f, 0x44, 0x2d, 0xd2, 0xf9, 0xa1, 0x34, 0x62, 0x7d, - 0x3a, 0xe7, 0x1b, 0x6e, 0x98, 0x8e, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xf5, 0x2d, 0xd1, 0xd9, - 0xb8, 0x02, 0x00, 0x00, + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xcd, 0x4a, 0x33, 0x31, + 0x14, 0x86, 0xbf, 0x4f, 0x41, 0x25, 0x42, 0xa9, 0xa1, 0x74, 0x31, 0x14, 0x7f, 0x2a, 0xba, 0x73, + 0x82, 0x75, 0x2d, 0x52, 0x15, 0xba, 0x69, 0x37, 0x6a, 0x41, 0xdc, 0xa5, 0xd3, 0x63, 0x0c, 0x9d, + 0x26, 0xe3, 0xe4, 0xb4, 0x50, 0xc4, 0x8d, 0xb7, 0xe0, 0x6d, 0xb9, 0xf3, 0x16, 0xbc, 0x00, 0x2f, + 0x41, 0x9a, 0x99, 0x4c, 0x6b, 0x35, 0x75, 0x57, 0xfa, 0xbc, 0x79, 0xde, 0x30, 0xe7, 0x84, 0xd4, + 0xee, 0xe3, 0x09, 0x82, 0xec, 0xc7, 0xcc, 0x40, 0x3a, 0x96, 0x11, 0x30, 0x2e, 0x40, 0x61, 0x98, + 0xa4, 0x1a, 0x35, 0x2d, 0x3b, 0x1a, 0xe6, 0x34, 0xa8, 0x09, 0xad, 0x45, 0x0c, 0x8c, 0x27, 0x92, + 0x71, 0xa5, 0x34, 0x72, 0x94, 0x5a, 0x99, 0x2c, 0x1f, 0x04, 0x85, 0x8d, 0xf7, 0x87, 0x52, 0xcd, + 0xbb, 0x1a, 0x6f, 0xab, 0x64, 0xab, 0x69, 0x26, 0x2a, 0x6a, 0x4e, 0xff, 0xbc, 0xce, 0x7c, 0xb4, + 0x4b, 0xc8, 0x45, 0x0a, 0x1c, 0xe1, 0x86, 0x9b, 0x01, 0xdd, 0x0b, 0x8b, 0x42, 0x2b, 0x08, 0x67, + 0xec, 0x0a, 0x1e, 0x47, 0x60, 0x30, 0xa8, 0x2f, 0x8b, 0x98, 0x44, 0x2b, 0x03, 0xf5, 0x7f, 0xb4, + 0x4d, 0xd6, 0x5b, 0x80, 0xd6, 0xb9, 0xbd, 0x78, 0x20, 0x07, 0x4e, 0xb8, 0xe3, 0xe5, 0x85, 0xad, + 0x4b, 0xc8, 0x25, 0xc4, 0xe0, 0xbb, 0xe4, 0x8c, 0x79, 0x2f, 0x39, 0x1f, 0x29, 0xb4, 0x9c, 0x94, + 0xf2, 0xae, 0x0e, 0x60, 0x2a, 0x23, 0x43, 0x0f, 0x3c, 0x77, 0xc9, 0xb9, 0xd3, 0x1f, 0xfe, 0x15, + 0x2b, 0x2a, 0x6e, 0xc9, 0x66, 0xce, 0xda, 0x5a, 0x18, 0x5a, 0xf7, 0x1c, 0x9c, 0x42, 0x27, 0xdf, + 0x5f, 0x9a, 0x71, 0xe6, 0xc6, 0xe7, 0x7f, 0x52, 0xb1, 0x93, 0xec, 0x00, 0xf2, 0x3e, 0x47, 0xee, + 0x26, 0x3a, 0x20, 0x1b, 0x2d, 0x40, 0x8b, 0xe8, 0x6f, 0xdf, 0xd6, 0x12, 0x57, 0xb6, 0xeb, 0x0f, + 0xe4, 0x4d, 0xb5, 0x97, 0xf7, 0x8f, 0xd7, 0x95, 0x2a, 0xad, 0xd8, 0x95, 0x1b, 0x1f, 0x67, 0x3b, + 0xc5, 0x9e, 0x14, 0x1f, 0xc2, 0x33, 0x1d, 0x10, 0xd2, 0x96, 0x26, 0x3b, 0x62, 0x7e, 0x4e, 0x66, + 0xc6, 0xbc, 0x93, 0x99, 0x8f, 0xe4, 0x95, 0x55, 0x5b, 0x59, 0xa6, 0xa5, 0x6f, 0x95, 0xe6, 0xfc, + 0xec, 0xee, 0x54, 0x48, 0x7c, 0x18, 0xf5, 0xc2, 0x48, 0x0f, 0x99, 0xf5, 0xe8, 0x54, 0x64, 0x3f, + 0x58, 0xb1, 0xf9, 0x02, 0x14, 0x4b, 0x7a, 0x47, 0x42, 0xb3, 0xc5, 0xa7, 0xd5, 0x5b, 0xb3, 0x2f, + 0xe1, 0xe4, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x64, 0x33, 0x1f, 0x2d, 0x75, 0x03, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -71,6 +73,14 @@ type AsyncAgentServiceClient interface { GetTask(ctx context.Context, in *admin.GetTaskRequest, opts ...grpc.CallOption) (*admin.GetTaskResponse, error) // Delete the task resource. DeleteTask(ctx context.Context, in *admin.DeleteTaskRequest, opts ...grpc.CallOption) (*admin.DeleteTaskResponse, error) + // GetTaskMetrics returns one or more task execution metrics, if available. + // + // Errors include + // * OutOfRange if metrics are not available for the specified task time range + // * various other errors + GetTaskMetrics(ctx context.Context, in *admin.GetTaskMetricsRequest, opts ...grpc.CallOption) (*admin.GetTaskMetricsResponse, error) + // GetTaskLogs returns task execution logs, if available. + GetTaskLogs(ctx context.Context, in *admin.GetTaskLogsRequest, opts ...grpc.CallOption) (*admin.GetTaskLogsResponse, error) } type asyncAgentServiceClient struct { @@ -108,6 +118,24 @@ func (c *asyncAgentServiceClient) DeleteTask(ctx context.Context, in *admin.Dele return out, nil } +func (c *asyncAgentServiceClient) GetTaskMetrics(ctx context.Context, in *admin.GetTaskMetricsRequest, opts ...grpc.CallOption) (*admin.GetTaskMetricsResponse, error) { + out := new(admin.GetTaskMetricsResponse) + err := c.cc.Invoke(ctx, "/flyteidl.service.AsyncAgentService/GetTaskMetrics", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *asyncAgentServiceClient) GetTaskLogs(ctx context.Context, in *admin.GetTaskLogsRequest, opts ...grpc.CallOption) (*admin.GetTaskLogsResponse, error) { + out := new(admin.GetTaskLogsResponse) + err := c.cc.Invoke(ctx, "/flyteidl.service.AsyncAgentService/GetTaskLogs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // AsyncAgentServiceServer is the server API for AsyncAgentService service. type AsyncAgentServiceServer interface { // Send a task create request to the agent server. @@ -116,6 +144,14 @@ type AsyncAgentServiceServer interface { GetTask(context.Context, *admin.GetTaskRequest) (*admin.GetTaskResponse, error) // Delete the task resource. DeleteTask(context.Context, *admin.DeleteTaskRequest) (*admin.DeleteTaskResponse, error) + // GetTaskMetrics returns one or more task execution metrics, if available. + // + // Errors include + // * OutOfRange if metrics are not available for the specified task time range + // * various other errors + GetTaskMetrics(context.Context, *admin.GetTaskMetricsRequest) (*admin.GetTaskMetricsResponse, error) + // GetTaskLogs returns task execution logs, if available. + GetTaskLogs(context.Context, *admin.GetTaskLogsRequest) (*admin.GetTaskLogsResponse, error) } // UnimplementedAsyncAgentServiceServer can be embedded to have forward compatible implementations. @@ -131,6 +167,12 @@ func (*UnimplementedAsyncAgentServiceServer) GetTask(ctx context.Context, req *a func (*UnimplementedAsyncAgentServiceServer) DeleteTask(ctx context.Context, req *admin.DeleteTaskRequest) (*admin.DeleteTaskResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method DeleteTask not implemented") } +func (*UnimplementedAsyncAgentServiceServer) GetTaskMetrics(ctx context.Context, req *admin.GetTaskMetricsRequest) (*admin.GetTaskMetricsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTaskMetrics not implemented") +} +func (*UnimplementedAsyncAgentServiceServer) GetTaskLogs(ctx context.Context, req *admin.GetTaskLogsRequest) (*admin.GetTaskLogsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetTaskLogs not implemented") +} func RegisterAsyncAgentServiceServer(s *grpc.Server, srv AsyncAgentServiceServer) { s.RegisterService(&_AsyncAgentService_serviceDesc, srv) @@ -190,6 +232,42 @@ func _AsyncAgentService_DeleteTask_Handler(srv interface{}, ctx context.Context, return interceptor(ctx, in, info, handler) } +func _AsyncAgentService_GetTaskMetrics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(admin.GetTaskMetricsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AsyncAgentServiceServer).GetTaskMetrics(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/flyteidl.service.AsyncAgentService/GetTaskMetrics", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AsyncAgentServiceServer).GetTaskMetrics(ctx, req.(*admin.GetTaskMetricsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AsyncAgentService_GetTaskLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(admin.GetTaskLogsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AsyncAgentServiceServer).GetTaskLogs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/flyteidl.service.AsyncAgentService/GetTaskLogs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AsyncAgentServiceServer).GetTaskLogs(ctx, req.(*admin.GetTaskLogsRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _AsyncAgentService_serviceDesc = grpc.ServiceDesc{ ServiceName: "flyteidl.service.AsyncAgentService", HandlerType: (*AsyncAgentServiceServer)(nil), @@ -206,6 +284,14 @@ var _AsyncAgentService_serviceDesc = grpc.ServiceDesc{ MethodName: "DeleteTask", Handler: _AsyncAgentService_DeleteTask_Handler, }, + { + MethodName: "GetTaskMetrics", + Handler: _AsyncAgentService_GetTaskMetrics_Handler, + }, + { + MethodName: "GetTaskLogs", + Handler: _AsyncAgentService_GetTaskLogs_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "flyteidl/service/agent.proto", diff --git a/flyteidl/gen/pb-go/flyteidl/service/agent.swagger.json b/flyteidl/gen/pb-go/flyteidl/service/agent.swagger.json index b7eacff5f7..dace78dac7 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/agent.swagger.json +++ b/flyteidl/gen/pb-go/flyteidl/service/agent.swagger.json @@ -259,6 +259,36 @@ }, "description": "A response containing an agent." }, + "adminGetTaskLogsResponse": { + "type": "object", + "properties": { + "results": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The execution log results." + }, + "token": { + "type": "string", + "description": "In the case of multiple pages of results, the server-provided token can be used to fetch the next page\nin a query. If there are no more results, this value will be empty." + } + }, + "description": "A response containing the logs for a task execution." + }, + "adminGetTaskMetricsResponse": { + "type": "object", + "properties": { + "results": { + "type": "array", + "items": { + "$ref": "#/definitions/coreExecutionMetricResult" + }, + "description": "The execution metric results." + } + }, + "description": "A response containing a list of metrics for a task execution." + }, "adminGetTaskResponse": { "type": "object", "properties": { @@ -292,7 +322,7 @@ "properties": { "state": { "$ref": "#/definitions/flyteidladminState", - "description": "The state of the execution is used to control its visibility in the UI/CLI." + "description": "DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI." }, "outputs": { "$ref": "#/definitions/coreLiteralMap", @@ -308,6 +338,10 @@ "$ref": "#/definitions/coreTaskLog" }, "description": "log information for the task execution." + }, + "phase": { + "$ref": "#/definitions/coreTaskExecutionPhase", + "description": "The phase of the execution is used to determine the phase of the plugin's execution." } } }, @@ -319,11 +353,15 @@ "format": "int64" }, "partition_key": { - "type": "string", - "title": "These two fields are only relevant in the partition value case" + "type": "string" + }, + "bind_to_time_partition": { + "type": "boolean", + "format": "boolean" }, "transform": { - "type": "string" + "type": "string", + "title": "This is only relevant in the time partition case" } }, "title": "Only valid for triggers" @@ -338,7 +376,12 @@ "type": "string" }, "partitions": { - "$ref": "#/definitions/corePartitions" + "$ref": "#/definitions/corePartitions", + "description": "Think of a partition as a tag on an Artifact, except it's a key-value pair.\nDifferent partitions naturally have different versions (execution ids)." + }, + "time_partition": { + "$ref": "#/definitions/coreTimePartition", + "description": "There is no such thing as an empty time partition - if it's not set, then there is no time partition." } } }, @@ -533,6 +576,20 @@ }, "description": "Represents an error thrown from a node." }, + "coreExecutionMetricResult": { + "type": "object", + "properties": { + "metric": { + "type": "string", + "description": "The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG." + }, + "data": { + "$ref": "#/definitions/protobufStruct", + "title": "The result data in prometheus range query result format\nhttps://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.\nThis may include multiple time series, differentiated by their metric labels.\nStart time is greater of (execution attempt start, 48h ago)\nEnd time is lesser of (execution attempt end, now)" + } + }, + "description": "ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task." + }, "coreExtendedResources": { "type": "object", "properties": { @@ -597,6 +654,10 @@ "version": { "type": "string", "description": "Specific version of the resource." + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "description": "Encapsulation of fields that uniquely identifies a Flyte resource." @@ -687,7 +748,13 @@ "type": "object", "properties": { "static_value": { - "type": "string" + "type": "string", + "title": "The string static value is for use in the Partitions object" + }, + "time_value": { + "type": "string", + "format": "date-time", + "title": "The time value is for use in the TimePartition case" }, "triggered_binding": { "$ref": "#/definitions/coreArtifactBindingData" @@ -1155,6 +1222,21 @@ }, "description": "Encapsulation of fields that identify a Flyte task execution entity." }, + "coreTaskExecutionPhase": { + "type": "string", + "enum": [ + "UNDEFINED", + "QUEUED", + "RUNNING", + "SUCCEEDED", + "ABORTED", + "FAILED", + "INITIALIZING", + "WAITING_FOR_RESOURCES" + ], + "default": "UNDEFINED", + "title": "- INITIALIZING: To indicate cases where task is initializing, like: ErrImagePull, ContainerCreating, PodInitializing\n - WAITING_FOR_RESOURCES: To address cases, where underlying resource is not available: Backoff error, Resource quota exceeded" + }, "coreTaskLog": { "type": "object", "properties": { @@ -1291,6 +1373,14 @@ }, "description": "A Task structure that uniquely identifies a task in the system\nTasks are registered as a first step in the system." }, + "coreTimePartition": { + "type": "object", + "properties": { + "value": { + "$ref": "#/definitions/coreLabelValue" + } + } + }, "coreTypeAnnotation": { "type": "object", "properties": { @@ -1407,6 +1497,10 @@ "name": { "type": "string", "description": "User or system provided value for the resource." + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "title": "Encapsulation of fields that uniquely identifies a Flyte workflow execution" diff --git a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json index 0fd08544b3..939f94f862 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json +++ b/flyteidl/gen/pb-go/flyteidl/service/dataproxy.swagger.json @@ -579,6 +579,10 @@ "name": { "type": "string", "description": "User or system provided value for the resource." + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "title": "Encapsulation of fields that uniquely identifies a Flyte workflow execution" diff --git a/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.pb.go b/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.pb.go index ab64516b19..ddd75fa855 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.pb.go @@ -371,41 +371,41 @@ func init() { } var fileDescriptor_74cbdb08eef5b1d1 = []byte{ - // 544 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x51, 0x6f, 0xd2, 0x50, - 0x14, 0xb6, 0x9d, 0x1b, 0xe3, 0xcc, 0x4d, 0x76, 0x13, 0x62, 0xc7, 0x34, 0x41, 0xe6, 0xc3, 0x62, - 0xb2, 0x36, 0xb2, 0x07, 0x13, 0x13, 0x63, 0x18, 0x54, 0x24, 0xb2, 0x86, 0x14, 0x78, 0xd0, 0x2c, - 0x69, 0x5a, 0x38, 0x60, 0xb7, 0xae, 0xbd, 0xb6, 0xb7, 0x66, 0xfc, 0x03, 0x7f, 0xca, 0x7e, 0xa6, - 0xb9, 0xf7, 0xb6, 0x1d, 0x30, 0xe5, 0xc1, 0x37, 0x7a, 0xce, 0x77, 0xbe, 0x73, 0xbe, 0xef, 0x9c, - 0x0b, 0xe8, 0xb3, 0x60, 0xc1, 0xd0, 0x9f, 0x06, 0x46, 0x82, 0xf1, 0x2f, 0x7f, 0x82, 0x06, 0xde, - 0x31, 0x8c, 0x43, 0x37, 0x70, 0x68, 0x90, 0xce, 0xfd, 0xd0, 0xc9, 0xe2, 0x3a, 0x8d, 0x23, 0x16, - 0x91, 0x4a, 0x8e, 0xd7, 0xb3, 0x78, 0xed, 0x65, 0xc1, 0x30, 0x89, 0x62, 0x34, 0x02, 0x9f, 0x61, - 0xec, 0x06, 0x89, 0xc4, 0xd7, 0x8e, 0x56, 0xb3, 0xcc, 0x4d, 0x6e, 0xf2, 0xd4, 0xab, 0xd5, 0x94, - 0x1f, 0x32, 0x8c, 0x67, 0x6e, 0xde, 0xa9, 0x71, 0xaf, 0xc0, 0xe1, 0xc8, 0x4d, 0x6e, 0xda, 0x31, - 0xba, 0x0c, 0x6d, 0xfc, 0x99, 0x62, 0xc2, 0xc8, 0x3b, 0xd8, 0xf1, 0x43, 0x9a, 0xb2, 0x44, 0x53, - 0xea, 0xca, 0xe9, 0x5e, 0xf3, 0xa8, 0x10, 0xa0, 0x73, 0x16, 0xbd, 0x2f, 0xdb, 0x5f, 0xba, 0xd4, - 0xce, 0x80, 0xe4, 0x3d, 0xec, 0x32, 0xbc, 0xa5, 0x81, 0xcb, 0x50, 0x53, 0x45, 0xd1, 0xf1, 0x5a, - 0x11, 0x6f, 0x33, 0xca, 0x20, 0x76, 0x01, 0x26, 0x27, 0xb0, 0x1f, 0xa5, 0x8c, 0xa6, 0xcc, 0xa1, - 0x31, 0xce, 0xfc, 0x3b, 0x6d, 0xab, 0xae, 0x9c, 0x96, 0xed, 0x67, 0x32, 0x38, 0x10, 0xb1, 0x0f, - 0xaa, 0xa6, 0x34, 0x0c, 0x20, 0xcb, 0x93, 0x26, 0x34, 0x0a, 0x13, 0x24, 0x55, 0xd8, 0xb9, 0x8e, - 0x3c, 0xc7, 0x9f, 0x8a, 0x51, 0xcb, 0xf6, 0xf6, 0x75, 0xe4, 0xf5, 0xa6, 0xa2, 0xe0, 0x0b, 0x1c, - 0xf0, 0x82, 0x2e, 0xb2, 0x5c, 0xd7, 0x31, 0x94, 0xb9, 0x37, 0x0e, 0x5b, 0x50, 0xcc, 0xf0, 0xbb, - 0x3c, 0x30, 0x5a, 0xd0, 0x65, 0x26, 0x75, 0x9d, 0x69, 0x01, 0xcf, 0x0b, 0xa6, 0xac, 0xef, 0x19, - 0x6c, 0x27, 0x8c, 0x8b, 0xe5, 0x34, 0x07, 0xcd, 0x17, 0xfa, 0xfa, 0xca, 0xf4, 0x21, 0x4f, 0xdb, - 0x12, 0x45, 0xce, 0xa1, 0x24, 0x05, 0x25, 0x99, 0x3b, 0x1b, 0x2c, 0xcd, 0x91, 0xa2, 0xf5, 0x57, - 0xb9, 0x9f, 0x0e, 0x06, 0xf8, 0xb0, 0x9f, 0xff, 0xd5, 0xa1, 0x49, 0x0b, 0x73, 0x32, 0x29, 0x85, - 0x67, 0xde, 0x7a, 0xb0, 0x2d, 0xe6, 0x25, 0x55, 0x38, 0xb4, 0xcd, 0x91, 0xfd, 0xad, 0x75, 0xd1, - 0x37, 0x9d, 0xcf, 0xad, 0x5e, 0x7f, 0x6c, 0x9b, 0x95, 0x27, 0x3c, 0x3c, 0x30, 0xed, 0xcb, 0x96, - 0x65, 0x5a, 0xa3, 0x22, 0xac, 0x90, 0x3d, 0x28, 0x0d, 0x4c, 0xab, 0xd3, 0xb3, 0xba, 0x15, 0x95, - 0x7f, 0xd8, 0x63, 0xcb, 0xe2, 0x1f, 0x5b, 0x64, 0x1f, 0xca, 0xc3, 0x71, 0xbb, 0x6d, 0x9a, 0x1d, - 0xb3, 0x53, 0x79, 0x5a, 0x53, 0x35, 0xa5, 0x79, 0xaf, 0x42, 0xd5, 0xcc, 0xee, 0x7e, 0x20, 0xce, - 0x7e, 0x28, 0xad, 0x22, 0x57, 0x00, 0x72, 0xad, 0x7c, 0x3a, 0x72, 0xf2, 0xd8, 0xcb, 0x47, 0x27, - 0x5a, 0x7b, 0xb3, 0x19, 0x24, 0xa5, 0x35, 0xb6, 0x7e, 0xab, 0x0a, 0x19, 0x42, 0xa9, 0x8b, 0x4c, - 0x50, 0xd7, 0xff, 0x5e, 0xf5, 0x70, 0x22, 0xb5, 0xd7, 0x1b, 0x10, 0xcb, 0xa4, 0x57, 0x00, 0xd2, - 0xc6, 0x4d, 0x23, 0xaf, 0x6c, 0xed, 0x5f, 0x23, 0xaf, 0x6e, 0x43, 0xb0, 0x5f, 0x7c, 0xfa, 0xfe, - 0x71, 0xee, 0xb3, 0x1f, 0xa9, 0xa7, 0x4f, 0xa2, 0x5b, 0x43, 0x94, 0x45, 0xf1, 0x5c, 0xfe, 0x30, - 0x8a, 0x17, 0x3d, 0xc7, 0xd0, 0xa0, 0xde, 0xd9, 0x3c, 0x32, 0xd6, 0xff, 0x5f, 0xbc, 0x1d, 0xf1, - 0xbc, 0xcf, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0xda, 0xdb, 0x38, 0x09, 0x7a, 0x04, 0x00, 0x00, + // 534 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x41, 0x6f, 0xda, 0x4c, + 0x10, 0xfd, 0xec, 0x7c, 0x40, 0x98, 0x34, 0x29, 0x59, 0x09, 0xd5, 0x21, 0x3d, 0x50, 0xd2, 0x43, + 0x54, 0x29, 0xb6, 0x4a, 0x0e, 0x95, 0x2a, 0x55, 0x15, 0x01, 0x97, 0xa2, 0x12, 0x0b, 0x2d, 0x70, + 0x68, 0x15, 0xc9, 0xb2, 0x61, 0x42, 0x9d, 0x38, 0x78, 0x6b, 0xaf, 0xab, 0xf0, 0x0f, 0xfa, 0x53, + 0xf2, 0x33, 0xab, 0xdd, 0x35, 0x0e, 0x90, 0x96, 0x43, 0x6f, 0xde, 0x99, 0x37, 0x6f, 0xdf, 0x9b, + 0x99, 0x35, 0x98, 0xd7, 0xe1, 0x82, 0x63, 0x30, 0x0d, 0xad, 0x04, 0xe3, 0x9f, 0xc1, 0x04, 0x2d, + 0xbc, 0xe7, 0x18, 0xcf, 0xbd, 0xd0, 0x65, 0x61, 0x3a, 0x0b, 0xe6, 0x6e, 0x16, 0x37, 0x59, 0x1c, + 0xf1, 0x88, 0x54, 0x96, 0x78, 0x33, 0x8b, 0xd7, 0x5e, 0xe6, 0x0c, 0x93, 0x28, 0x46, 0x2b, 0x0c, + 0x38, 0xc6, 0x5e, 0x98, 0x28, 0x7c, 0xed, 0x68, 0x3d, 0xcb, 0xbd, 0xe4, 0x36, 0x4b, 0x35, 0x1e, + 0x34, 0x38, 0x1c, 0x79, 0xc9, 0x6d, 0x3b, 0x46, 0x8f, 0x23, 0xc5, 0x1f, 0x29, 0x26, 0x9c, 0xbc, + 0x85, 0x62, 0x30, 0x67, 0x29, 0x4f, 0x0c, 0xad, 0xae, 0x9d, 0xee, 0x35, 0x8f, 0x72, 0x85, 0xa6, + 0x60, 0x30, 0xfb, 0x8a, 0xff, 0xd2, 0x63, 0x34, 0x03, 0x92, 0x77, 0xb0, 0xcb, 0xf1, 0x8e, 0x85, + 0x1e, 0x47, 0x43, 0x97, 0x45, 0xc7, 0x1b, 0x45, 0xe2, 0x9a, 0x51, 0x06, 0xa1, 0x39, 0x98, 0x9c, + 0xc0, 0x7e, 0x94, 0x72, 0x96, 0x72, 0x97, 0xc5, 0x78, 0x1d, 0xdc, 0x1b, 0x3b, 0x75, 0xed, 0xb4, + 0x4c, 0x9f, 0xa9, 0xe0, 0x40, 0xc6, 0xde, 0xeb, 0x86, 0xd6, 0xb0, 0x80, 0xac, 0x2a, 0x4d, 0x58, + 0x34, 0x4f, 0x90, 0x54, 0xa1, 0x78, 0x13, 0xf9, 0x6e, 0x30, 0x95, 0x52, 0xcb, 0xb4, 0x70, 0x13, + 0xf9, 0xbd, 0xa9, 0x2c, 0xf8, 0x0c, 0x07, 0xa2, 0xa0, 0x8b, 0x7c, 0xe9, 0xeb, 0x18, 0xca, 0xc2, + 0xbc, 0xcb, 0x17, 0x0c, 0x33, 0xfc, 0xae, 0x08, 0x8c, 0x16, 0x6c, 0x95, 0x49, 0xdf, 0x64, 0x5a, + 0xc0, 0xf3, 0x9c, 0x29, 0xbb, 0xf7, 0x0c, 0x0a, 0x09, 0x17, 0x66, 0x05, 0xcd, 0x41, 0xf3, 0x85, + 0xb9, 0x39, 0x13, 0x73, 0x28, 0xd2, 0x54, 0xa1, 0xc8, 0x39, 0x94, 0x94, 0xa1, 0x24, 0xeb, 0xce, + 0x96, 0x96, 0x2e, 0x91, 0xf2, 0xea, 0x2f, 0x6a, 0x3e, 0x1d, 0x0c, 0xf1, 0x71, 0x3e, 0xff, 0xea, + 0xc3, 0x50, 0x2d, 0x5c, 0x92, 0x29, 0x2b, 0x22, 0xf3, 0xc6, 0x87, 0x82, 0xd4, 0x4b, 0xaa, 0x70, + 0x48, 0xed, 0x11, 0xfd, 0xda, 0xba, 0xe8, 0xdb, 0xee, 0xa7, 0x56, 0xaf, 0x3f, 0xa6, 0x76, 0xe5, + 0x3f, 0x11, 0x1e, 0xd8, 0xf4, 0xb2, 0xe5, 0xd8, 0xce, 0x28, 0x0f, 0x6b, 0x64, 0x0f, 0x4a, 0x03, + 0xdb, 0xe9, 0xf4, 0x9c, 0x6e, 0x45, 0x17, 0x07, 0x3a, 0x76, 0x1c, 0x71, 0xd8, 0x21, 0xfb, 0x50, + 0x1e, 0x8e, 0xdb, 0x6d, 0xdb, 0xee, 0xd8, 0x9d, 0xca, 0xff, 0x35, 0xdd, 0xd0, 0x9a, 0x0f, 0x3a, + 0x54, 0xed, 0x6c, 0xb1, 0x07, 0x72, 0xaf, 0x87, 0xaa, 0x55, 0xe4, 0x0a, 0x40, 0x8d, 0x55, 0xa8, + 0x23, 0x27, 0x4f, 0x7b, 0xf9, 0x64, 0x45, 0x6b, 0xaf, 0xb7, 0x83, 0x94, 0xb5, 0xc6, 0xce, 0x2f, + 0x5d, 0x23, 0x43, 0x28, 0x75, 0x91, 0x4b, 0xea, 0xfa, 0x9f, 0xab, 0x1e, 0x57, 0xa4, 0xf6, 0x6a, + 0x0b, 0x62, 0x95, 0xf4, 0x0a, 0x40, 0xb5, 0x71, 0x9b, 0xe4, 0xb5, 0xa9, 0xfd, 0x4d, 0xf2, 0xfa, + 0x34, 0x24, 0xfb, 0xc5, 0xc7, 0x6f, 0x1f, 0x66, 0x01, 0xff, 0x9e, 0xfa, 0xe6, 0x24, 0xba, 0xb3, + 0x64, 0x59, 0x14, 0xcf, 0xd4, 0x87, 0x95, 0xbf, 0xe6, 0x19, 0xce, 0x2d, 0xe6, 0x9f, 0xcd, 0x22, + 0x6b, 0xf3, 0x07, 0xe2, 0x17, 0xe5, 0xf3, 0x3e, 0xff, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xd9, 0xdd, + 0xd7, 0x35, 0x5b, 0x04, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. diff --git a/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.swagger.json b/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.swagger.json index adf61a5c1f..c83aad0fa0 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.swagger.json +++ b/flyteidl/gen/pb-go/flyteidl/service/external_plugin_service.swagger.json @@ -171,11 +171,15 @@ "format": "int64" }, "partition_key": { - "type": "string", - "title": "These two fields are only relevant in the partition value case" + "type": "string" + }, + "bind_to_time_partition": { + "type": "boolean", + "format": "boolean" }, "transform": { - "type": "string" + "type": "string", + "title": "This is only relevant in the time partition case" } }, "title": "Only valid for triggers" @@ -190,7 +194,12 @@ "type": "string" }, "partitions": { - "$ref": "#/definitions/corePartitions" + "$ref": "#/definitions/corePartitions", + "description": "Think of a partition as a tag on an Artifact, except it's a key-value pair.\nDifferent partitions naturally have different versions (execution ids)." + }, + "time_partition": { + "$ref": "#/definitions/coreTimePartition", + "description": "There is no such thing as an empty time partition - if it's not set, then there is no time partition." } } }, @@ -449,6 +458,10 @@ "version": { "type": "string", "description": "Specific version of the resource." + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "description": "Encapsulation of fields that uniquely identifies a Flyte resource." @@ -539,7 +552,13 @@ "type": "object", "properties": { "static_value": { - "type": "string" + "type": "string", + "title": "The string static value is for use in the Partitions object" + }, + "time_value": { + "type": "string", + "format": "date-time", + "title": "The time value is for use in the TimePartition case" }, "triggered_binding": { "$ref": "#/definitions/coreArtifactBindingData" @@ -1097,6 +1116,14 @@ }, "description": "A Task structure that uniquely identifies a task in the system\nTasks are registered as a first step in the system." }, + "coreTimePartition": { + "type": "object", + "properties": { + "value": { + "$ref": "#/definitions/coreLabelValue" + } + } + }, "coreTypeAnnotation": { "type": "object", "properties": { diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/README.md b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/README.md index 5a4f66f46a..9ce6fd8121 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/README.md +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/README.md @@ -22,62 +22,120 @@ All URIs are relative to *http://localhost* Class | Method | HTTP request | Description ------------ | ------------- | ------------- | ------------- *AdminServiceApi* | [**CreateExecution**](docs/AdminServiceApi.md#createexecution) | **Post** /api/v1/executions | Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` +*AdminServiceApi* | [**CreateExecution2**](docs/AdminServiceApi.md#createexecution2) | **Put** /api/v1/executions/org/{org} | Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` *AdminServiceApi* | [**CreateLaunchPlan**](docs/AdminServiceApi.md#createlaunchplan) | **Post** /api/v1/launch_plans | Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition +*AdminServiceApi* | [**CreateLaunchPlan2**](docs/AdminServiceApi.md#createlaunchplan2) | **Post** /api/v1/launch_plans/org/{id.org} | Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition *AdminServiceApi* | [**CreateNodeEvent**](docs/AdminServiceApi.md#createnodeevent) | **Post** /api/v1/events/nodes | Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. +*AdminServiceApi* | [**CreateNodeEvent2**](docs/AdminServiceApi.md#createnodeevent2) | **Post** /api/v1/events/org/{event.id.execution_id.org}/nodes | Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. *AdminServiceApi* | [**CreateTask**](docs/AdminServiceApi.md#createtask) | **Post** /api/v1/tasks | Create and upload a :ref:`ref_flyteidl.admin.Task` definition +*AdminServiceApi* | [**CreateTask2**](docs/AdminServiceApi.md#createtask2) | **Post** /api/v1/tasks/org/{id.org} | Create and upload a :ref:`ref_flyteidl.admin.Task` definition *AdminServiceApi* | [**CreateTaskEvent**](docs/AdminServiceApi.md#createtaskevent) | **Post** /api/v1/events/tasks | Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. +*AdminServiceApi* | [**CreateTaskEvent2**](docs/AdminServiceApi.md#createtaskevent2) | **Post** /api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks | Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. *AdminServiceApi* | [**CreateWorkflow**](docs/AdminServiceApi.md#createworkflow) | **Post** /api/v1/workflows | Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition +*AdminServiceApi* | [**CreateWorkflow2**](docs/AdminServiceApi.md#createworkflow2) | **Post** /api/v1/workflows/org/{id.org} | Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition *AdminServiceApi* | [**CreateWorkflowEvent**](docs/AdminServiceApi.md#createworkflowevent) | **Post** /api/v1/events/workflows | Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. +*AdminServiceApi* | [**CreateWorkflowEvent2**](docs/AdminServiceApi.md#createworkflowevent2) | **Post** /api/v1/events/org/{event.execution_id.org}/workflows | Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. *AdminServiceApi* | [**DeleteProjectAttributes**](docs/AdminServiceApi.md#deleteprojectattributes) | **Delete** /api/v1/project_attributes/{project} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**DeleteProjectAttributes2**](docs/AdminServiceApi.md#deleteprojectattributes2) | **Delete** /api/v1/project_domain_attributes/org/{org}/{project} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**DeleteProjectDomainAttributes**](docs/AdminServiceApi.md#deleteprojectdomainattributes) | **Delete** /api/v1/project_domain_attributes/{project}/{domain} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**DeleteProjectDomainAttributes2**](docs/AdminServiceApi.md#deleteprojectdomainattributes2) | **Delete** /api/v1/project_domain_attributes/org/{org}/{project}/{domain} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**DeleteWorkflowAttributes**](docs/AdminServiceApi.md#deleteworkflowattributes) | **Delete** /api/v1/workflow_attributes/{project}/{domain}/{workflow} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +*AdminServiceApi* | [**DeleteWorkflowAttributes2**](docs/AdminServiceApi.md#deleteworkflowattributes2) | **Delete** /api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. *AdminServiceApi* | [**GetActiveLaunchPlan**](docs/AdminServiceApi.md#getactivelaunchplan) | **Get** /api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name} | Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. +*AdminServiceApi* | [**GetActiveLaunchPlan2**](docs/AdminServiceApi.md#getactivelaunchplan2) | **Get** /api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. *AdminServiceApi* | [**GetDescriptionEntity**](docs/AdminServiceApi.md#getdescriptionentity) | **Get** /api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. +*AdminServiceApi* | [**GetDescriptionEntity2**](docs/AdminServiceApi.md#getdescriptionentity2) | **Get** /api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. +*AdminServiceApi* | [**GetDynamicNodeWorkflow**](docs/AdminServiceApi.md#getdynamicnodeworkflow) | **Get** /api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow | Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. +*AdminServiceApi* | [**GetDynamicNodeWorkflow2**](docs/AdminServiceApi.md#getdynamicnodeworkflow2) | **Get** /api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow | Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. *AdminServiceApi* | [**GetExecution**](docs/AdminServiceApi.md#getexecution) | **Get** /api/v1/executions/{id.project}/{id.domain}/{id.name} | Fetches a :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**GetExecution2**](docs/AdminServiceApi.md#getexecution2) | **Get** /api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetches a :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**GetExecutionData**](docs/AdminServiceApi.md#getexecutiondata) | **Get** /api/v1/data/executions/{id.project}/{id.domain}/{id.name} | Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**GetExecutionData2**](docs/AdminServiceApi.md#getexecutiondata2) | **Get** /api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**GetExecutionMetrics**](docs/AdminServiceApi.md#getexecutionmetrics) | **Get** /api/v1/metrics/executions/{id.project}/{id.domain}/{id.name} | Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**GetExecutionMetrics2**](docs/AdminServiceApi.md#getexecutionmetrics2) | **Get** /api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**GetLaunchPlan**](docs/AdminServiceApi.md#getlaunchplan) | **Get** /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. +*AdminServiceApi* | [**GetLaunchPlan2**](docs/AdminServiceApi.md#getlaunchplan2) | **Get** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. *AdminServiceApi* | [**GetNamedEntity**](docs/AdminServiceApi.md#getnamedentity) | **Get** /api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name} | Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. +*AdminServiceApi* | [**GetNamedEntity2**](docs/AdminServiceApi.md#getnamedentity2) | **Get** /api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name} | Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. *AdminServiceApi* | [**GetNodeExecution**](docs/AdminServiceApi.md#getnodeexecution) | **Get** /api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. +*AdminServiceApi* | [**GetNodeExecution2**](docs/AdminServiceApi.md#getnodeexecution2) | **Get** /api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. *AdminServiceApi* | [**GetNodeExecutionData**](docs/AdminServiceApi.md#getnodeexecutiondata) | **Get** /api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. +*AdminServiceApi* | [**GetNodeExecutionData2**](docs/AdminServiceApi.md#getnodeexecutiondata2) | **Get** /api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. *AdminServiceApi* | [**GetProjectAttributes**](docs/AdminServiceApi.md#getprojectattributes) | **Get** /api/v1/project_attributes/{project} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**GetProjectAttributes2**](docs/AdminServiceApi.md#getprojectattributes2) | **Get** /api/v1/project_domain_attributes/org/{org}/{project} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**GetProjectDomainAttributes**](docs/AdminServiceApi.md#getprojectdomainattributes) | **Get** /api/v1/project_domain_attributes/{project}/{domain} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**GetProjectDomainAttributes2**](docs/AdminServiceApi.md#getprojectdomainattributes2) | **Get** /api/v1/project_domain_attributes/org/{org}/{project}/{domain} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**GetTask**](docs/AdminServiceApi.md#gettask) | **Get** /api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Task` definition. +*AdminServiceApi* | [**GetTask2**](docs/AdminServiceApi.md#gettask2) | **Get** /api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Task` definition. *AdminServiceApi* | [**GetTaskExecution**](docs/AdminServiceApi.md#gettaskexecution) | **Get** /api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**GetTaskExecution2**](docs/AdminServiceApi.md#gettaskexecution2) | **Get** /api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**GetTaskExecutionData**](docs/AdminServiceApi.md#gettaskexecutiondata) | **Get** /api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**GetTaskExecutionData2**](docs/AdminServiceApi.md#gettaskexecutiondata2) | **Get** /api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**GetVersion**](docs/AdminServiceApi.md#getversion) | **Get** /api/v1/version | *AdminServiceApi* | [**GetWorkflow**](docs/AdminServiceApi.md#getworkflow) | **Get** /api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. +*AdminServiceApi* | [**GetWorkflow2**](docs/AdminServiceApi.md#getworkflow2) | **Get** /api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. *AdminServiceApi* | [**GetWorkflowAttributes**](docs/AdminServiceApi.md#getworkflowattributes) | **Get** /api/v1/workflow_attributes/{project}/{domain}/{workflow} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +*AdminServiceApi* | [**GetWorkflowAttributes2**](docs/AdminServiceApi.md#getworkflowattributes2) | **Get** /api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. *AdminServiceApi* | [**ListActiveLaunchPlans**](docs/AdminServiceApi.md#listactivelaunchplans) | **Get** /api/v1/active_launch_plans/{project}/{domain} | List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. +*AdminServiceApi* | [**ListActiveLaunchPlans2**](docs/AdminServiceApi.md#listactivelaunchplans2) | **Get** /api/v1/active_launch_plans/org/{org}/{project}/{domain} | List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. *AdminServiceApi* | [**ListDescriptionEntities**](docs/AdminServiceApi.md#listdescriptionentities) | **Get** /api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. -*AdminServiceApi* | [**ListDescriptionEntities2**](docs/AdminServiceApi.md#listdescriptionentities2) | **Get** /api/v1/description_entities/{resource_type}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +*AdminServiceApi* | [**ListDescriptionEntities2**](docs/AdminServiceApi.md#listdescriptionentities2) | **Get** /api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +*AdminServiceApi* | [**ListDescriptionEntities3**](docs/AdminServiceApi.md#listdescriptionentities3) | **Get** /api/v1/description_entities/{resource_type}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +*AdminServiceApi* | [**ListDescriptionEntities4**](docs/AdminServiceApi.md#listdescriptionentities4) | **Get** /api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. *AdminServiceApi* | [**ListExecutions**](docs/AdminServiceApi.md#listexecutions) | **Get** /api/v1/executions/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**ListExecutions2**](docs/AdminServiceApi.md#listexecutions2) | **Get** /api/v1/executions/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**ListLaunchPlanIds**](docs/AdminServiceApi.md#listlaunchplanids) | **Get** /api/v1/launch_plan_ids/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. +*AdminServiceApi* | [**ListLaunchPlanIds2**](docs/AdminServiceApi.md#listlaunchplanids2) | **Get** /api/v1/launch_plan_ids/org/{org}/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. *AdminServiceApi* | [**ListLaunchPlans**](docs/AdminServiceApi.md#listlaunchplans) | **Get** /api/v1/launch_plans/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. -*AdminServiceApi* | [**ListLaunchPlans2**](docs/AdminServiceApi.md#listlaunchplans2) | **Get** /api/v1/launch_plans/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +*AdminServiceApi* | [**ListLaunchPlans2**](docs/AdminServiceApi.md#listlaunchplans2) | **Get** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +*AdminServiceApi* | [**ListLaunchPlans3**](docs/AdminServiceApi.md#listlaunchplans3) | **Get** /api/v1/launch_plans/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +*AdminServiceApi* | [**ListLaunchPlans4**](docs/AdminServiceApi.md#listlaunchplans4) | **Get** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. *AdminServiceApi* | [**ListMatchableAttributes**](docs/AdminServiceApi.md#listmatchableattributes) | **Get** /api/v1/matchable_attributes | Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. +*AdminServiceApi* | [**ListMatchableAttributes2**](docs/AdminServiceApi.md#listmatchableattributes2) | **Get** /api/v1/matchable_attributes/org/{org} | Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. *AdminServiceApi* | [**ListNamedEntities**](docs/AdminServiceApi.md#listnamedentities) | **Get** /api/v1/named_entities/{resource_type}/{project}/{domain} | Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. +*AdminServiceApi* | [**ListNamedEntities2**](docs/AdminServiceApi.md#listnamedentities2) | **Get** /api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain} | Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. *AdminServiceApi* | [**ListNodeExecutions**](docs/AdminServiceApi.md#listnodeexecutions) | **Get** /api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. +*AdminServiceApi* | [**ListNodeExecutions2**](docs/AdminServiceApi.md#listnodeexecutions2) | **Get** /api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. *AdminServiceApi* | [**ListNodeExecutionsForTask**](docs/AdminServiceApi.md#listnodeexecutionsfortask) | **Get** /api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**ListNodeExecutionsForTask2**](docs/AdminServiceApi.md#listnodeexecutionsfortask2) | **Get** /api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**ListProjects**](docs/AdminServiceApi.md#listprojects) | **Get** /api/v1/projects | Fetches a list of :ref:`ref_flyteidl.admin.Project` +*AdminServiceApi* | [**ListProjects2**](docs/AdminServiceApi.md#listprojects2) | **Get** /api/v1/projects/org/{org} | Fetches a list of :ref:`ref_flyteidl.admin.Project` *AdminServiceApi* | [**ListTaskExecutions**](docs/AdminServiceApi.md#listtaskexecutions) | **Get** /api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} | Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**ListTaskExecutions2**](docs/AdminServiceApi.md#listtaskexecutions2) | **Get** /api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} | Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**ListTaskIds**](docs/AdminServiceApi.md#listtaskids) | **Get** /api/v1/task_ids/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. +*AdminServiceApi* | [**ListTaskIds2**](docs/AdminServiceApi.md#listtaskids2) | **Get** /api/v1/tasks/org/{org}/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. *AdminServiceApi* | [**ListTasks**](docs/AdminServiceApi.md#listtasks) | **Get** /api/v1/tasks/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. -*AdminServiceApi* | [**ListTasks2**](docs/AdminServiceApi.md#listtasks2) | **Get** /api/v1/tasks/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +*AdminServiceApi* | [**ListTasks2**](docs/AdminServiceApi.md#listtasks2) | **Get** /api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +*AdminServiceApi* | [**ListTasks3**](docs/AdminServiceApi.md#listtasks3) | **Get** /api/v1/tasks/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +*AdminServiceApi* | [**ListTasks4**](docs/AdminServiceApi.md#listtasks4) | **Get** /api/v1/tasks/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. *AdminServiceApi* | [**ListWorkflowIds**](docs/AdminServiceApi.md#listworkflowids) | **Get** /api/v1/workflow_ids/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. +*AdminServiceApi* | [**ListWorkflowIds2**](docs/AdminServiceApi.md#listworkflowids2) | **Get** /api/v1/workflows/org/{org}/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. *AdminServiceApi* | [**ListWorkflows**](docs/AdminServiceApi.md#listworkflows) | **Get** /api/v1/workflows/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. -*AdminServiceApi* | [**ListWorkflows2**](docs/AdminServiceApi.md#listworkflows2) | **Get** /api/v1/workflows/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +*AdminServiceApi* | [**ListWorkflows2**](docs/AdminServiceApi.md#listworkflows2) | **Get** /api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +*AdminServiceApi* | [**ListWorkflows3**](docs/AdminServiceApi.md#listworkflows3) | **Get** /api/v1/workflows/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +*AdminServiceApi* | [**ListWorkflows4**](docs/AdminServiceApi.md#listworkflows4) | **Get** /api/v1/workflows/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. *AdminServiceApi* | [**RecoverExecution**](docs/AdminServiceApi.md#recoverexecution) | **Post** /api/v1/executions/recover | Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. +*AdminServiceApi* | [**RecoverExecution2**](docs/AdminServiceApi.md#recoverexecution2) | **Post** /api/v1/executions/org/{id.org}/recover | Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. *AdminServiceApi* | [**RegisterProject**](docs/AdminServiceApi.md#registerproject) | **Post** /api/v1/projects | Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. +*AdminServiceApi* | [**RegisterProject2**](docs/AdminServiceApi.md#registerproject2) | **Post** /api/v1/projects/org/{project.org} | Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. *AdminServiceApi* | [**RelaunchExecution**](docs/AdminServiceApi.md#relaunchexecution) | **Post** /api/v1/executions/relaunch | Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` +*AdminServiceApi* | [**RelaunchExecution2**](docs/AdminServiceApi.md#relaunchexecution2) | **Post** /api/v1/executions/org/{id.org}/relaunch | Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` *AdminServiceApi* | [**TerminateExecution**](docs/AdminServiceApi.md#terminateexecution) | **Delete** /api/v1/executions/{id.project}/{id.domain}/{id.name} | Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**TerminateExecution2**](docs/AdminServiceApi.md#terminateexecution2) | **Delete** /api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**UpdateExecution**](docs/AdminServiceApi.md#updateexecution) | **Put** /api/v1/executions/{id.project}/{id.domain}/{id.name} | Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**UpdateExecution2**](docs/AdminServiceApi.md#updateexecution2) | **Put** /api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**UpdateLaunchPlan**](docs/AdminServiceApi.md#updatelaunchplan) | **Put** /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version} | Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. +*AdminServiceApi* | [**UpdateLaunchPlan2**](docs/AdminServiceApi.md#updatelaunchplan2) | **Put** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. *AdminServiceApi* | [**UpdateNamedEntity**](docs/AdminServiceApi.md#updatenamedentity) | **Put** /api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name} | Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. -*AdminServiceApi* | [**UpdateProject**](docs/AdminServiceApi.md#updateproject) | **Put** /api/v1/projects/{id} | Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. +*AdminServiceApi* | [**UpdateNamedEntity2**](docs/AdminServiceApi.md#updatenamedentity2) | **Put** /api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name} | Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. +*AdminServiceApi* | [**UpdateProject**](docs/AdminServiceApi.md#updateproject) | **Put** /api/v1/projects/{id} | Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. +*AdminServiceApi* | [**UpdateProject2**](docs/AdminServiceApi.md#updateproject2) | **Put** /api/v1/projects/org/{org}/{id} | Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. *AdminServiceApi* | [**UpdateProjectAttributes**](docs/AdminServiceApi.md#updateprojectattributes) | **Put** /api/v1/project_attributes/{attributes.project} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level +*AdminServiceApi* | [**UpdateProjectAttributes2**](docs/AdminServiceApi.md#updateprojectattributes2) | **Put** /api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level *AdminServiceApi* | [**UpdateProjectDomainAttributes**](docs/AdminServiceApi.md#updateprojectdomainattributes) | **Put** /api/v1/project_domain_attributes/{attributes.project}/{attributes.domain} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**UpdateProjectDomainAttributes2**](docs/AdminServiceApi.md#updateprojectdomainattributes2) | **Put** /api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**UpdateWorkflowAttributes**](docs/AdminServiceApi.md#updateworkflowattributes) | **Put** /api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +*AdminServiceApi* | [**UpdateWorkflowAttributes2**](docs/AdminServiceApi.md#updateworkflowattributes2) | **Put** /api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. ## Documentation For Models @@ -94,6 +152,7 @@ Class | Method | HTTP request | Description - [AdminDescriptionEntityList](docs/AdminDescriptionEntityList.md) - [AdminDescriptionFormat](docs/AdminDescriptionFormat.md) - [AdminDomain](docs/AdminDomain.md) + - [AdminDynamicNodeWorkflowResponse](docs/AdminDynamicNodeWorkflowResponse.md) - [AdminEmailNotification](docs/AdminEmailNotification.md) - [AdminEnvs](docs/AdminEnvs.md) - [AdminExecution](docs/AdminExecution.md) @@ -302,6 +361,7 @@ Class | Method | HTTP request | Description - [CoreTaskNode](docs/CoreTaskNode.md) - [CoreTaskNodeOverrides](docs/CoreTaskNodeOverrides.md) - [CoreTaskTemplate](docs/CoreTaskTemplate.md) + - [CoreTimePartition](docs/CoreTimePartition.md) - [CoreTypeAnnotation](docs/CoreTypeAnnotation.md) - [CoreTypeStructure](docs/CoreTypeStructure.md) - [CoreTypedInterface](docs/CoreTypedInterface.md) diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api/swagger.yaml b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api/swagger.yaml index 392faa0aab..b37018248b 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api/swagger.yaml +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api/swagger.yaml @@ -11,6 +11,112 @@ consumes: produces: - "application/json" paths: + /api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}: + get: + tags: + - "AdminService" + summary: "Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`." + operationId: "GetActiveLaunchPlan2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" + required: true + type: "string" + x-exportParamName: "IdName" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminLaunchPlan" + /api/v1/active_launch_plans/org/{org}/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`." + operationId: "ListActiveLaunchPlans2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Name of the project that contains the identifiers.\n+required." + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Name of the domain the identifiers belongs to within the project.\n\ + +required." + required: true + type: "string" + x-exportParamName: "Domain" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminLaunchPlanList" /api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}: get: tags: @@ -39,6 +145,13 @@ paths: required: true type: "string" x-exportParamName: "IdName" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" responses: 200: description: "A successful response." @@ -100,11 +213,163 @@ paths: - "ASCENDING" x-exportParamName: "SortByDirection" x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" responses: 200: description: "A successful response." schema: $ref: "#/definitions/adminLaunchPlanList" + ? /api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt} + : get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by\ + \ the reference :ref:`ref_flyteidl.admin.TaskExecution`." + operationId: "ListNodeExecutionsForTask2" + parameters: + - name: "task_execution_id.node_execution_id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdNodeExecutionIdExecutionIdOrg" + - name: "task_execution_id.node_execution_id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdNodeExecutionIdExecutionIdProject" + - name: "task_execution_id.node_execution_id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdNodeExecutionIdExecutionIdDomain" + - name: "task_execution_id.node_execution_id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdNodeExecutionIdExecutionIdName" + - name: "task_execution_id.node_execution_id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "TaskExecutionIdNodeExecutionIdNodeId" + - name: "task_execution_id.task_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdTaskIdProject" + - name: "task_execution_id.task_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdTaskIdDomain" + - name: "task_execution_id.task_id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdTaskIdName" + - name: "task_execution_id.task_id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "TaskExecutionIdTaskIdVersion" + - name: "task_execution_id.retry_attempt" + in: "path" + required: true + type: "integer" + format: "int64" + x-exportParamName: "TaskExecutionIdRetryAttempt" + - name: "task_execution_id.task_id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "TaskExecutionIdTaskIdResourceType" + x-optionalDataType: "String" + - name: "task_execution_id.task_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "TaskExecutionIdTaskIdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the, server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNodeExecutionList" ? /api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt} : get: tags: @@ -188,6 +453,20 @@ paths: - "DATASET" x-exportParamName: "TaskExecutionIdTaskIdResourceType" x-optionalDataType: "String" + - name: "task_execution_id.task_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "TaskExecutionIdTaskIdOrg" + x-optionalDataType: "String" + - name: "task_execution_id.node_execution_id.execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "TaskExecutionIdNodeExecutionIdExecutionIdOrg" + x-optionalDataType: "String" - name: "limit" in: "query" description: "Indicates the number of resources to be returned.\n+required." @@ -237,6 +516,43 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminNodeExecutionList" + /api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}: + get: + tags: + - "AdminService" + summary: "Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`." + operationId: "GetExecutionData2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdName" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowExecutionGetDataResponse" /api/v1/data/executions/{id.project}/{id.domain}/{id.name}: get: tags: @@ -263,6 +579,13 @@ paths: required: true type: "string" x-exportParamName: "IdName" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" responses: 200: description: "A successful response." @@ -275,6 +598,55 @@ paths: summary: "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`." operationId: "GetNodeExecutionData" parameters: + - name: "id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdExecutionIdProject" + - name: "id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdExecutionIdDomain" + - name: "id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdExecutionIdName" + - name: "id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "IdNodeId" + - name: "id.execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdExecutionIdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNodeExecutionGetDataResponse" + ? /api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} + : get: + tags: + - "AdminService" + summary: "Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`." + operationId: "GetNodeExecutionData2" + parameters: + - name: "id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdExecutionIdOrg" - name: "id.execution_id.project" in: "path" description: "Name of the project the resource belongs to." @@ -304,6 +676,106 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminNodeExecutionGetDataResponse" + ? /api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} + : get: + tags: + - "AdminService" + summary: "Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`." + operationId: "GetTaskExecutionData2" + parameters: + - name: "id.node_execution_id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdOrg" + - name: "id.node_execution_id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdProject" + - name: "id.node_execution_id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdDomain" + - name: "id.node_execution_id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdName" + - name: "id.node_execution_id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdNodeId" + - name: "id.task_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdTaskIdProject" + - name: "id.task_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdTaskIdDomain" + - name: "id.task_id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdTaskIdName" + - name: "id.task_id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdTaskIdVersion" + - name: "id.retry_attempt" + in: "path" + required: true + type: "integer" + format: "int64" + x-exportParamName: "IdRetryAttempt" + - name: "id.task_id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdTaskIdResourceType" + x-optionalDataType: "String" + - name: "id.task_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdTaskIdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskExecutionGetDataResponse" ? /api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} : get: tags: @@ -386,18 +858,38 @@ paths: - "DATASET" x-exportParamName: "IdTaskIdResourceType" x-optionalDataType: "String" + - name: "id.task_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdTaskIdOrg" + x-optionalDataType: "String" + - name: "id.node_execution_id.execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdOrg" + x-optionalDataType: "String" responses: 200: description: "A successful response." schema: $ref: "#/definitions/adminTaskExecutionGetDataResponse" - /api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}: + /api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}: get: tags: - "AdminService" summary: "Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object." - operationId: "GetDescriptionEntity" + operationId: "GetDescriptionEntity2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "id.resource_type" in: "path" description: "Identifies the specific type of resource that this identifier\ @@ -441,13 +933,19 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminDescriptionEntity" - /api/v1/description_entities/{resource_type}/{id.project}/{id.domain}: + /api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}: get: tags: - "AdminService" summary: "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions." - operationId: "ListDescriptionEntities2" + operationId: "ListDescriptionEntities4" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "resource_type" in: "path" description: "Identifies the specific type of resource that this identifier\ @@ -532,13 +1030,19 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminDescriptionEntityList" - /api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}: + /api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}: get: tags: - "AdminService" summary: "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions." - operationId: "ListDescriptionEntities" + operationId: "ListDescriptionEntities2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "resource_type" in: "path" description: "Identifies the specific type of resource that this identifier\ @@ -622,35 +1126,359 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminDescriptionEntityList" - /api/v1/events/nodes: - post: + /api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}: + get: tags: - "AdminService" - summary: "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred." - operationId: "CreateNodeEvent" + summary: "Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object." + operationId: "GetDescriptionEntity" parameters: - - in: "body" - name: "body" + - name: "id.resource_type" + in: "path" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to." required: true - schema: - $ref: "#/definitions/adminNodeExecutionEventRequest" - x-exportParamName: "Body" + type: "string" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdResourceType" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdVersion" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminNodeExecutionEventResponse" - /api/v1/events/tasks: - post: + $ref: "#/definitions/adminDescriptionEntity" + /api/v1/description_entities/{resource_type}/{id.project}/{id.domain}: + get: tags: - "AdminService" - summary: "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred." - operationId: "CreateTaskEvent" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions." + operationId: "ListDescriptionEntities3" parameters: - - in: "body" - name: "body" + - name: "resource_type" + in: "path" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to." required: true - schema: + type: "string" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "ResourceType" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "query" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'." + required: false + type: "string" + x-exportParamName: "IdName" + x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminDescriptionEntityList" + /api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions." + operationId: "ListDescriptionEntities" + parameters: + - name: "resource_type" + in: "path" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to." + required: true + type: "string" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "ResourceType" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminDescriptionEntityList" + /api/v1/events/nodes: + post: + tags: + - "AdminService" + summary: "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred." + operationId: "CreateNodeEvent" + parameters: + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminNodeExecutionEventRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNodeExecutionEventResponse" + /api/v1/events/org/{event.execution_id.org}/workflows: + post: + tags: + - "AdminService" + summary: "Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred." + operationId: "CreateWorkflowEvent2" + parameters: + - name: "event.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "EventExecutionIdOrg" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminWorkflowExecutionEventRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowExecutionEventResponse" + /api/v1/events/org/{event.id.execution_id.org}/nodes: + post: + tags: + - "AdminService" + summary: "Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred." + operationId: "CreateNodeEvent2" + parameters: + - name: "event.id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "EventIdExecutionIdOrg" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminNodeExecutionEventRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNodeExecutionEventResponse" + /api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks: + post: + tags: + - "AdminService" + summary: "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred." + operationId: "CreateTaskEvent2" + parameters: + - name: "event.parent_node_execution_id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "EventParentNodeExecutionIdExecutionIdOrg" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminTaskExecutionEventRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskExecutionEventResponse" + /api/v1/events/tasks: + post: + tags: + - "AdminService" + summary: "Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred." + operationId: "CreateTaskEvent" + parameters: + - in: "body" + name: "body" + required: true + schema: $ref: "#/definitions/adminTaskExecutionEventRequest" x-exportParamName: "Body" responses: @@ -694,7 +1522,7 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminExecutionCreateResponse" - /api/v1/executions/recover: + /api/v1/executions/org/{id.org}/recover: post: tags: - "AdminService" @@ -706,8 +1534,14 @@ paths: downstream system failures (downstream services), or simply to recover executions\ \ that failed because of retry exhaustion and should complete if tried again.\n\ See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details." - operationId: "RecoverExecution" + operationId: "RecoverExecution2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - in: "body" name: "body" required: true @@ -719,13 +1553,19 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminExecutionCreateResponse" - /api/v1/executions/relaunch: + /api/v1/executions/org/{id.org}/relaunch: post: tags: - "AdminService" summary: "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`" - operationId: "RelaunchExecution" + operationId: "RelaunchExecution2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - in: "body" name: "body" required: true @@ -737,13 +1577,19 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminExecutionCreateResponse" - /api/v1/executions/{id.project}/{id.domain}: + /api/v1/executions/org/{id.org}/{id.project}/{id.domain}: get: tags: - "AdminService" summary: "Fetch a list of :ref:`ref_flyteidl.admin.Execution`." - operationId: "ListExecutions" + operationId: "ListExecutions2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -815,13 +1661,19 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminExecutionList" - /api/v1/executions/{id.project}/{id.domain}/{id.name}: + /api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}: get: tags: - "AdminService" summary: "Fetches a :ref:`ref_flyteidl.admin.Execution`." - operationId: "GetExecution" + operationId: "GetExecution2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -850,8 +1702,14 @@ paths: tags: - "AdminService" summary: "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`." - operationId: "UpdateExecution" + operationId: "UpdateExecution2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -886,8 +1744,14 @@ paths: tags: - "AdminService" summary: "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`." - operationId: "TerminateExecution" + operationId: "TerminateExecution2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -918,99 +1782,79 @@ paths: description: "A successful response." schema: $ref: "#/definitions/adminExecutionTerminateResponse" - /api/v1/launch_plan_ids/{project}/{domain}: - get: + /api/v1/executions/org/{org}: + put: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ - \ launch plan objects." - operationId: "ListLaunchPlanIds" + summary: "Triggers the creation of a :ref:`ref_flyteidl.admin.Execution`" + operationId: "CreateExecution2" parameters: - - name: "project" + - name: "org" in: "path" - description: "Name of the project that contains the identifiers.\n+required" + description: "Optional, org key applied to the resource." required: true type: "string" - x-exportParamName: "Project" - - name: "domain" - in: "path" - description: "Name of the domain the identifiers belongs to within the project.\n\ - +required" + x-exportParamName: "Org" + - in: "body" + name: "body" required: true - type: "string" - x-exportParamName: "Domain" - - name: "limit" - in: "query" - description: "Indicates the number of resources to be returned.\n+required." - required: false - type: "integer" - format: "int64" - x-exportParamName: "Limit" - x-optionalDataType: "Int64" - - name: "token" - in: "query" - description: "In the case of multiple pages of results, the server-provided\ - \ token can be used to fetch the next page\nin a query.\n+optional." - required: false - type: "string" - x-exportParamName: "Token" - x-optionalDataType: "String" - - name: "sort_by.key" - in: "query" - description: "Indicates an attribute to sort the response values.\n+required." - required: false - type: "string" - x-exportParamName: "SortByKey" - x-optionalDataType: "String" - - name: "sort_by.direction" - in: "query" - description: "Indicates the direction to apply sort key for response values.\n\ - +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ - \ order." - required: false - type: "string" - default: "DESCENDING" - enum: - - "DESCENDING" - - "ASCENDING" - x-exportParamName: "SortByDirection" - x-optionalDataType: "String" - - name: "filters" - in: "query" - description: "Indicates a list of filters passed as string.\n+optional." - required: false - type: "string" - x-exportParamName: "Filters" - x-optionalDataType: "String" + schema: + $ref: "#/definitions/adminExecutionCreateRequest" + x-exportParamName: "Body" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminNamedEntityIdentifierList" - /api/v1/launch_plans: + $ref: "#/definitions/adminExecutionCreateResponse" + /api/v1/executions/recover: post: tags: - "AdminService" - summary: "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition" - operationId: "CreateLaunchPlan" + summary: "Recreates a previously-run workflow execution that will only start\ + \ executing from the last known failure point.\nIn Recover mode, users cannot\ + \ change any input parameters or update the version of the execution.\nThis\ + \ is extremely useful to recover from system errors and byzantine faults like\ + \ - Loss of K8s cluster, bugs in platform or instability, machine failures,\n\ + downstream system failures (downstream services), or simply to recover executions\ + \ that failed because of retry exhaustion and should complete if tried again.\n\ + See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details." + operationId: "RecoverExecution" parameters: - in: "body" name: "body" required: true schema: - $ref: "#/definitions/adminLaunchPlanCreateRequest" + $ref: "#/definitions/adminExecutionRecoverRequest" x-exportParamName: "Body" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminLaunchPlanCreateResponse" - /api/v1/launch_plans/{id.project}/{id.domain}: + $ref: "#/definitions/adminExecutionCreateResponse" + /api/v1/executions/relaunch: + post: + tags: + - "AdminService" + summary: "Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution`" + operationId: "RelaunchExecution" + parameters: + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminExecutionRelaunchRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminExecutionCreateResponse" + /api/v1/executions/{id.project}/{id.domain}: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." - operationId: "ListLaunchPlans2" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Execution`." + operationId: "ListExecutions" parameters: - name: "id.project" in: "path" @@ -1034,6 +1878,13 @@ paths: type: "string" x-exportParamName: "IdName" x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" - name: "limit" in: "query" description: "Indicates the number of resources to be returned.\n+required." @@ -1082,13 +1933,13 @@ paths: 200: description: "A successful response." schema: - $ref: "#/definitions/adminLaunchPlanList" - /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}: + $ref: "#/definitions/adminExecutionList" + /api/v1/executions/{id.project}/{id.domain}/{id.name}: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." - operationId: "ListLaunchPlans" + summary: "Fetches a :ref:`ref_flyteidl.admin.Execution`." + operationId: "GetExecution" parameters: - name: "id.project" in: "path" @@ -1105,67 +1956,27 @@ paths: x-exportParamName: "IdDomain" - name: "id.name" in: "path" - description: "User provided value for the resource.\nThe combination of project\ - \ + domain + name uniquely identifies the resource.\n+optional - in certain\ - \ contexts - like 'List API', 'Launch plans'" + description: "User or system provided value for the resource." required: true type: "string" x-exportParamName: "IdName" - - name: "limit" - in: "query" - description: "Indicates the number of resources to be returned.\n+required." - required: false - type: "integer" - format: "int64" - x-exportParamName: "Limit" - x-optionalDataType: "Int64" - - name: "token" - in: "query" - description: "In the case of multiple pages of results, this server-provided\ - \ token can be used to fetch the next page\nin a query.\n+optional." - required: false - type: "string" - x-exportParamName: "Token" - x-optionalDataType: "String" - - name: "filters" - in: "query" - description: "Indicates a list of filters passed as string.\nMore info on\ - \ constructing filters : \n+optional." - required: false - type: "string" - x-exportParamName: "Filters" - x-optionalDataType: "String" - - name: "sort_by.key" - in: "query" - description: "Indicates an attribute to sort the response values.\n+required." - required: false - type: "string" - x-exportParamName: "SortByKey" - x-optionalDataType: "String" - - name: "sort_by.direction" + - name: "id.org" in: "query" - description: "Indicates the direction to apply sort key for response values.\n\ - +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ - \ order." + description: "Optional, org key applied to the resource." required: false type: "string" - default: "DESCENDING" - enum: - - "DESCENDING" - - "ASCENDING" - x-exportParamName: "SortByDirection" + x-exportParamName: "IdOrg" x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminLaunchPlanList" - /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}: - get: + $ref: "#/definitions/adminExecution" + put: tags: - "AdminService" - summary: "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition." - operationId: "GetLaunchPlan" + summary: "Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`." + operationId: "UpdateExecution" parameters: - name: "id.project" in: "path" @@ -1182,46 +1993,26 @@ paths: x-exportParamName: "IdDomain" - name: "id.name" in: "path" - description: "User provided value for the resource." + description: "User or system provided value for the resource." required: true type: "string" x-exportParamName: "IdName" - - name: "id.version" - in: "path" - description: "Specific version of the resource." + - in: "body" + name: "body" required: true - type: "string" - x-exportParamName: "IdVersion" - - name: "id.resource_type" - in: "query" - description: "Identifies the specific type of resource that this identifier\ - \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ - \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ - \ a compilation of multiple individual objects.\nEventually all Catalog\ - \ objects should be modeled similar to Flyte Objects. The Dataset entities\ - \ makes it possible for the UI and CLI to act on the objects \nin a similar\ - \ manner to other Flyte objects" - required: false - type: "string" - default: "UNSPECIFIED" - enum: - - "UNSPECIFIED" - - "TASK" - - "WORKFLOW" - - "LAUNCH_PLAN" - - "DATASET" - x-exportParamName: "IdResourceType" - x-optionalDataType: "String" + schema: + $ref: "#/definitions/adminExecutionUpdateRequest" + x-exportParamName: "Body" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminLaunchPlan" - put: + $ref: "#/definitions/adminExecutionUpdateResponse" + delete: tags: - "AdminService" - summary: "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`." - operationId: "UpdateLaunchPlan" + summary: "Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`." + operationId: "TerminateExecution" parameters: - name: "id.project" in: "path" @@ -1238,127 +2029,227 @@ paths: x-exportParamName: "IdDomain" - name: "id.name" in: "path" - description: "User provided value for the resource." + description: "User or system provided value for the resource." required: true type: "string" x-exportParamName: "IdName" - - name: "id.version" - in: "path" - description: "Specific version of the resource." - required: true - type: "string" - x-exportParamName: "IdVersion" - in: "body" name: "body" required: true schema: - $ref: "#/definitions/adminLaunchPlanUpdateRequest" + $ref: "#/definitions/adminExecutionTerminateRequest" x-exportParamName: "Body" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminLaunchPlanUpdateResponse" - /api/v1/matchable_attributes: + $ref: "#/definitions/adminExecutionTerminateResponse" + /api/v1/launch_plan_ids/org/{org}/{project}/{domain}: get: tags: - "AdminService" - summary: "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a specific resource type." - operationId: "ListMatchableAttributes" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ + \ launch plan objects." + operationId: "ListLaunchPlanIds2" parameters: - - name: "resource_type" + - name: "org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Name of the project that contains the identifiers.\n+required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Name of the domain the identifiers belongs to within the project.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "limit" in: "query" - description: "+required.\n\n - TASK_RESOURCE: Applies to customizable task\ - \ resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring\ - \ templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures\ - \ task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL:\ - \ Configures the K8s cluster label to be used for execution to be run\n\ - \ - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service\ - \ when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable\ - \ plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG:\ - \ Adds defaults for customizable workflow-execution specifications and overrides.\n\ - \ - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which\ - \ this execution should run." + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." required: false type: "string" - default: "TASK_RESOURCE" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" enum: - - "TASK_RESOURCE" - - "CLUSTER_RESOURCE" - - "EXECUTION_QUEUE" - - "EXECUTION_CLUSTER_LABEL" - - "QUALITY_OF_SERVICE_SPECIFICATION" - - "PLUGIN_OVERRIDE" - - "WORKFLOW_EXECUTION_CONFIG" - - "CLUSTER_ASSIGNMENT" - x-exportParamName: "ResourceType" + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\n+optional." + required: false + type: "string" + x-exportParamName: "Filters" x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminListMatchableAttributesResponse" - /api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}: + $ref: "#/definitions/adminNamedEntityIdentifierList" + /api/v1/launch_plan_ids/{project}/{domain}: get: tags: - "AdminService" - summary: "Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`." - operationId: "GetExecutionMetrics" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ + \ launch plan objects." + operationId: "ListLaunchPlanIds" parameters: - - name: "id.project" - in: "path" - description: "Name of the project the resource belongs to." - required: true - type: "string" - x-exportParamName: "IdProject" - - name: "id.domain" + - name: "project" in: "path" - description: "Name of the domain the resource belongs to.\nA domain can be\ - \ considered as a subset within a specific project." + description: "Name of the project that contains the identifiers.\n+required" required: true type: "string" - x-exportParamName: "IdDomain" - - name: "id.name" + x-exportParamName: "Project" + - name: "domain" in: "path" - description: "User or system provided value for the resource." + description: "Name of the domain the identifiers belongs to within the project.\n\ + +required" required: true type: "string" - x-exportParamName: "IdName" - - name: "depth" + x-exportParamName: "Domain" + - name: "limit" in: "query" - description: "depth defines the number of Flyte entity levels to traverse\ - \ when breaking down execution details." + description: "Indicates the number of resources to be returned.\n+required." required: false type: "integer" - format: "int32" - x-exportParamName: "Depth" - x-optionalDataType: "Int32" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminWorkflowExecutionGetMetricsResponse" - /api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}: + $ref: "#/definitions/adminNamedEntityIdentifierList" + /api/v1/launch_plans: + post: + tags: + - "AdminService" + summary: "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition" + operationId: "CreateLaunchPlan" + parameters: + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminLaunchPlanCreateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminLaunchPlanCreateResponse" + /api/v1/launch_plans/org/{id.org}: + post: + tags: + - "AdminService" + summary: "Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition" + operationId: "CreateLaunchPlan2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminLaunchPlanCreateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminLaunchPlanCreateResponse" + /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}: get: tags: - "AdminService" - summary: "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object." - operationId: "GetNamedEntity" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." + operationId: "ListLaunchPlans4" parameters: - - name: "resource_type" + - name: "id.org" in: "path" - description: "Resource type of the metadata to get. One of Task, Workflow\ - \ or LaunchPlan.\n+required" + description: "Optional, org key applied to the resource." required: true type: "string" - enum: - - "UNSPECIFIED" - - "TASK" - - "WORKFLOW" - - "LAUNCH_PLAN" - - "DATASET" - x-exportParamName: "ResourceType" + x-exportParamName: "IdOrg" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -1373,36 +2264,76 @@ paths: type: "string" x-exportParamName: "IdDomain" - name: "id.name" - in: "path" + in: "query" description: "User provided value for the resource.\nThe combination of project\ \ + domain + name uniquely identifies the resource.\n+optional - in certain\ - \ contexts - like 'List API', 'Launch plans'" - required: true + \ contexts - like 'List API', 'Launch plans'." + required: false type: "string" x-exportParamName: "IdName" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminNamedEntity" - put: + $ref: "#/definitions/adminLaunchPlanList" + /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}: + get: tags: - "AdminService" - summary: "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object." - operationId: "UpdateNamedEntity" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." + operationId: "ListLaunchPlans2" parameters: - - name: "resource_type" + - name: "id.org" in: "path" - description: "Resource type of the metadata to update\n+required" + description: "Optional, org key applied to the resource." required: true type: "string" - enum: - - "UNSPECIFIED" - - "TASK" - - "WORKFLOW" - - "LAUNCH_PLAN" - - "DATASET" - x-exportParamName: "ResourceType" + x-exportParamName: "IdOrg" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -1424,52 +2355,9 @@ paths: required: true type: "string" x-exportParamName: "IdName" - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminNamedEntityUpdateRequest" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminNamedEntityUpdateResponse" - /api/v1/named_entities/{resource_type}/{project}/{domain}: - get: - tags: - - "AdminService" - summary: "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects." - operationId: "ListNamedEntities" - parameters: - - name: "resource_type" - in: "path" - description: "Resource type of the metadata to query. One of Task, Workflow\ - \ or LaunchPlan.\n+required" - required: true - type: "string" - enum: - - "UNSPECIFIED" - - "TASK" - - "WORKFLOW" - - "LAUNCH_PLAN" - - "DATASET" - x-exportParamName: "ResourceType" - - name: "project" - in: "path" - description: "Name of the project that contains the identifiers.\n+required" - required: true - type: "string" - x-exportParamName: "Project" - - name: "domain" - in: "path" - description: "Name of the domain the identifiers belongs to within the project." - required: true - type: "string" - x-exportParamName: "Domain" - name: "limit" in: "query" - description: "Indicates the number of resources to be returned." + description: "Indicates the number of resources to be returned.\n+required." required: false type: "integer" format: "int64" @@ -1477,12 +2365,20 @@ paths: x-optionalDataType: "Int64" - name: "token" in: "query" - description: "In the case of multiple pages of results, the server-provided\ + description: "In the case of multiple pages of results, this server-provided\ \ token can be used to fetch the next page\nin a query.\n+optional." required: false type: "string" x-exportParamName: "Token" x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" - name: "sort_by.key" in: "query" description: "Indicates an attribute to sort the response values.\n+required." @@ -1503,80 +2399,152 @@ paths: - "ASCENDING" x-exportParamName: "SortByDirection" x-optionalDataType: "String" - - name: "filters" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminLaunchPlanList" + /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}: + get: + tags: + - "AdminService" + summary: "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition." + operationId: "GetLaunchPlan2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdVersion" + - name: "id.resource_type" in: "query" - description: "Indicates a list of filters passed as string.\n+optional." + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" required: false type: "string" - x-exportParamName: "Filters" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdResourceType" x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminNamedEntityList" - /api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}: - get: + $ref: "#/definitions/adminLaunchPlan" + put: tags: - "AdminService" - summary: "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`." - operationId: "GetNodeExecution" + summary: "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`." + operationId: "UpdateLaunchPlan2" parameters: - - name: "id.execution_id.project" + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" in: "path" description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "IdExecutionIdProject" - - name: "id.execution_id.domain" + x-exportParamName: "IdProject" + - name: "id.domain" in: "path" description: "Name of the domain the resource belongs to.\nA domain can be\ \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "IdExecutionIdDomain" - - name: "id.execution_id.name" + x-exportParamName: "IdDomain" + - name: "id.name" in: "path" - description: "User or system provided value for the resource." + description: "User provided value for the resource." required: true type: "string" - x-exportParamName: "IdExecutionIdName" - - name: "id.node_id" + x-exportParamName: "IdName" + - name: "id.version" in: "path" + description: "Specific version of the resource." required: true type: "string" - x-exportParamName: "IdNodeId" + x-exportParamName: "IdVersion" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/flyteidladminNodeExecution" - /api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}: + $ref: "#/definitions/adminLaunchPlanUpdateResponse" + /api/v1/launch_plans/{id.project}/{id.domain}: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`." - operationId: "ListNodeExecutions" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." + operationId: "ListLaunchPlans3" parameters: - - name: "workflow_execution_id.project" + - name: "id.project" in: "path" description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "WorkflowExecutionIdProject" - - name: "workflow_execution_id.domain" + x-exportParamName: "IdProject" + - name: "id.domain" in: "path" description: "Name of the domain the resource belongs to.\nA domain can be\ \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "WorkflowExecutionIdDomain" - - name: "workflow_execution_id.name" - in: "path" - description: "User or system provided value for the resource." - required: true + x-exportParamName: "IdDomain" + - name: "id.name" + in: "query" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'." + required: false type: "string" - x-exportParamName: "WorkflowExecutionIdName" + x-exportParamName: "IdName" + x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" - name: "limit" in: "query" description: "Indicates the number of resources to be returned.\n+required." @@ -1587,6 +2555,8 @@ paths: x-optionalDataType: "Int64" - name: "token" in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." required: false type: "string" x-exportParamName: "Token" @@ -1619,244 +2589,49 @@ paths: - "ASCENDING" x-exportParamName: "SortByDirection" x-optionalDataType: "String" - - name: "unique_parent_id" - in: "query" - description: "Unique identifier of the parent node in the execution\n+optional." - required: false - type: "string" - x-exportParamName: "UniqueParentId" - x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminNodeExecutionList" - /api/v1/project_attributes/{attributes.project}: - put: + $ref: "#/definitions/adminLaunchPlanList" + /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}: + get: tags: - "AdminService" - summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ at the project level" - operationId: "UpdateProjectAttributes" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions." + operationId: "ListLaunchPlans" parameters: - - name: "attributes.project" + - name: "id.project" in: "path" - description: "Unique project id for which this set of attributes will be applied." + description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "AttributesProject" - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminProjectAttributesUpdateRequest" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectAttributesUpdateResponse" - /api/v1/project_attributes/{project}: - get: - tags: - - "AdminService" - summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project and domain." - operationId: "GetProjectAttributes" - parameters: - - name: "project" + x-exportParamName: "IdProject" + - name: "id.domain" in: "path" - description: "Unique project id which this set of attributes references.\n\ - +required" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "Project" - - name: "resource_type" - in: "query" - description: "Which type of matchable attributes to return.\n+required.\n\n\ - \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ - \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ - \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ - \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ - \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ - \ Configures default quality of service when undefined in an execution spec.\n\ - \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ - \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ - \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ - \ Controls how to select an available cluster on which this execution should\ - \ run." - required: false - type: "string" - default: "TASK_RESOURCE" - enum: - - "TASK_RESOURCE" - - "CLUSTER_RESOURCE" - - "EXECUTION_QUEUE" - - "EXECUTION_CLUSTER_LABEL" - - "QUALITY_OF_SERVICE_SPECIFICATION" - - "PLUGIN_OVERRIDE" - - "WORKFLOW_EXECUTION_CONFIG" - - "CLUSTER_ASSIGNMENT" - x-exportParamName: "ResourceType" - x-optionalDataType: "String" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectAttributesGetResponse" - delete: - tags: - - "AdminService" - summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project and domain." - operationId: "DeleteProjectAttributes" - parameters: - - name: "project" - in: "path" - description: "Unique project id which this set of attributes references.\n\ - +required" - required: true - type: "string" - x-exportParamName: "Project" - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminProjectAttributesDeleteRequest" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectAttributesDeleteResponse" - /api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}: - put: - tags: - - "AdminService" - summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project and domain." - operationId: "UpdateProjectDomainAttributes" - parameters: - - name: "attributes.project" - in: "path" - description: "Unique project id for which this set of attributes will be applied." - required: true - type: "string" - x-exportParamName: "AttributesProject" - - name: "attributes.domain" - in: "path" - description: "Unique domain id for which this set of attributes will be applied." - required: true - type: "string" - x-exportParamName: "AttributesDomain" - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminProjectDomainAttributesUpdateRequest" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectDomainAttributesUpdateResponse" - /api/v1/project_domain_attributes/{project}/{domain}: - get: - tags: - - "AdminService" - summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project and domain." - operationId: "GetProjectDomainAttributes" - parameters: - - name: "project" - in: "path" - description: "Unique project id which this set of attributes references.\n\ - +required" - required: true - type: "string" - x-exportParamName: "Project" - - name: "domain" + x-exportParamName: "IdDomain" + - name: "id.name" in: "path" - description: "Unique domain id which this set of attributes references.\n\ - +required" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" required: true type: "string" - x-exportParamName: "Domain" - - name: "resource_type" + x-exportParamName: "IdName" + - name: "id.org" in: "query" - description: "Which type of matchable attributes to return.\n+required.\n\n\ - \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ - \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ - \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ - \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ - \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ - \ Configures default quality of service when undefined in an execution spec.\n\ - \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ - \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ - \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ - \ Controls how to select an available cluster on which this execution should\ - \ run." + description: "Optional, org key applied to the resource." required: false type: "string" - default: "TASK_RESOURCE" - enum: - - "TASK_RESOURCE" - - "CLUSTER_RESOURCE" - - "EXECUTION_QUEUE" - - "EXECUTION_CLUSTER_LABEL" - - "QUALITY_OF_SERVICE_SPECIFICATION" - - "PLUGIN_OVERRIDE" - - "WORKFLOW_EXECUTION_CONFIG" - - "CLUSTER_ASSIGNMENT" - x-exportParamName: "ResourceType" + x-exportParamName: "IdOrg" x-optionalDataType: "String" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectDomainAttributesGetResponse" - delete: - tags: - - "AdminService" - summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project and domain." - operationId: "DeleteProjectDomainAttributes" - parameters: - - name: "project" - in: "path" - description: "Unique project id which this set of attributes references.\n\ - +required" - required: true - type: "string" - x-exportParamName: "Project" - - name: "domain" - in: "path" - description: "Unique domain id which this set of attributes references.\n\ - +required" - required: true - type: "string" - x-exportParamName: "Domain" - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminProjectDomainAttributesDeleteRequest" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectDomainAttributesDeleteResponse" - /api/v1/projects: - get: - tags: - - "AdminService" - summary: "Fetches a list of :ref:`ref_flyteidl.admin.Project`" - operationId: "ListProjects" - parameters: - name: "limit" in: "query" - description: "Indicates the number of projects to be returned.\n+required." + description: "Indicates the number of resources to be returned.\n+required." required: false type: "integer" format: "int64" @@ -1902,113 +2677,40 @@ paths: 200: description: "A successful response." schema: - $ref: "#/definitions/adminProjects" - post: - tags: - - "AdminService" - summary: "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment." - operationId: "RegisterProject" - parameters: - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminProjectRegisterRequest" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectRegisterResponse" - /api/v1/projects/{id}: - put: - tags: - - "AdminService" - summary: "Updates an existing :ref:`ref_flyteidl.admin.Project` \nflyteidl.admin.Project\ - \ should be passed but the domains property should be empty;\nit will be ignored\ - \ in the handler as domains cannot be updated via this API." - operationId: "UpdateProject" - parameters: - - name: "id" - in: "path" - description: "Globally unique project name." - required: true - type: "string" - x-exportParamName: "Id" - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminProject" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminProjectUpdateResponse" - ? /api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} - : get: + $ref: "#/definitions/adminLaunchPlanList" + /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}: + get: tags: - "AdminService" - summary: "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`." - operationId: "GetTaskExecution" + summary: "Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition." + operationId: "GetLaunchPlan" parameters: - - name: "id.node_execution_id.execution_id.project" - in: "path" - description: "Name of the project the resource belongs to." - required: true - type: "string" - x-exportParamName: "IdNodeExecutionIdExecutionIdProject" - - name: "id.node_execution_id.execution_id.domain" - in: "path" - description: "Name of the domain the resource belongs to.\nA domain can be\ - \ considered as a subset within a specific project." - required: true - type: "string" - x-exportParamName: "IdNodeExecutionIdExecutionIdDomain" - - name: "id.node_execution_id.execution_id.name" - in: "path" - description: "User or system provided value for the resource." - required: true - type: "string" - x-exportParamName: "IdNodeExecutionIdExecutionIdName" - - name: "id.node_execution_id.node_id" - in: "path" - required: true - type: "string" - x-exportParamName: "IdNodeExecutionIdNodeId" - - name: "id.task_id.project" + - name: "id.project" in: "path" description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "IdTaskIdProject" - - name: "id.task_id.domain" + x-exportParamName: "IdProject" + - name: "id.domain" in: "path" description: "Name of the domain the resource belongs to.\nA domain can be\ \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "IdTaskIdDomain" - - name: "id.task_id.name" + x-exportParamName: "IdDomain" + - name: "id.name" in: "path" description: "User provided value for the resource." required: true type: "string" - x-exportParamName: "IdTaskIdName" - - name: "id.task_id.version" + x-exportParamName: "IdName" + - name: "id.version" in: "path" description: "Specific version of the resource." required: true type: "string" - x-exportParamName: "IdTaskIdVersion" - - name: "id.retry_attempt" - in: "path" - required: true - type: "integer" - format: "int64" - x-exportParamName: "IdRetryAttempt" - - name: "id.task_id.resource_type" + x-exportParamName: "IdVersion" + - name: "id.resource_type" in: "query" description: "Identifies the specific type of resource that this identifier\ \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ @@ -2026,187 +2728,168 @@ paths: - "WORKFLOW" - "LAUNCH_PLAN" - "DATASET" - x-exportParamName: "IdTaskIdResourceType" + x-exportParamName: "IdResourceType" + x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/flyteidladminTaskExecution" - ? /api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} - : get: + $ref: "#/definitions/adminLaunchPlan" + put: tags: - "AdminService" - summary: "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`." - operationId: "ListTaskExecutions" + summary: "Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`." + operationId: "UpdateLaunchPlan" parameters: - - name: "node_execution_id.execution_id.project" + - name: "id.project" in: "path" description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "NodeExecutionIdExecutionIdProject" - - name: "node_execution_id.execution_id.domain" + x-exportParamName: "IdProject" + - name: "id.domain" in: "path" description: "Name of the domain the resource belongs to.\nA domain can be\ \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "NodeExecutionIdExecutionIdDomain" - - name: "node_execution_id.execution_id.name" + x-exportParamName: "IdDomain" + - name: "id.name" in: "path" - description: "User or system provided value for the resource." + description: "User provided value for the resource." required: true type: "string" - x-exportParamName: "NodeExecutionIdExecutionIdName" - - name: "node_execution_id.node_id" + x-exportParamName: "IdName" + - name: "id.version" in: "path" + description: "Specific version of the resource." required: true type: "string" - x-exportParamName: "NodeExecutionIdNodeId" - - name: "limit" - in: "query" - description: "Indicates the number of resources to be returned.\n+required." - required: false - type: "integer" - format: "int64" - x-exportParamName: "Limit" - x-optionalDataType: "Int64" - - name: "token" - in: "query" - description: "In the case of multiple pages of results, the server-provided\ - \ token can be used to fetch the next page\nin a query.\n+optional." - required: false - type: "string" - x-exportParamName: "Token" - x-optionalDataType: "String" - - name: "filters" - in: "query" - description: "Indicates a list of filters passed as string.\nMore info on\ - \ constructing filters : \n+optional." - required: false - type: "string" - x-exportParamName: "Filters" - x-optionalDataType: "String" - - name: "sort_by.key" - in: "query" - description: "Indicates an attribute to sort the response values.\n+required." - required: false - type: "string" - x-exportParamName: "SortByKey" - x-optionalDataType: "String" - - name: "sort_by.direction" - in: "query" - description: "Indicates the direction to apply sort key for response values.\n\ - +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ - \ order." - required: false - type: "string" - default: "DESCENDING" - enum: - - "DESCENDING" - - "ASCENDING" - x-exportParamName: "SortByDirection" - x-optionalDataType: "String" + x-exportParamName: "IdVersion" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminLaunchPlanUpdateRequest" + x-exportParamName: "Body" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminTaskExecutionList" - /api/v1/task_ids/{project}/{domain}: + $ref: "#/definitions/adminLaunchPlanUpdateResponse" + /api/v1/matchable_attributes: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ - \ task objects." - operationId: "ListTaskIds" + summary: "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a specific resource type." + operationId: "ListMatchableAttributes" parameters: - - name: "project" - in: "path" - description: "Name of the project that contains the identifiers.\n+required" - required: true - type: "string" - x-exportParamName: "Project" - - name: "domain" - in: "path" - description: "Name of the domain the identifiers belongs to within the project.\n\ - +required" - required: true - type: "string" - x-exportParamName: "Domain" - - name: "limit" - in: "query" - description: "Indicates the number of resources to be returned.\n+required." - required: false - type: "integer" - format: "int64" - x-exportParamName: "Limit" - x-optionalDataType: "Int64" - - name: "token" - in: "query" - description: "In the case of multiple pages of results, the server-provided\ - \ token can be used to fetch the next page\nin a query.\n+optional." - required: false - type: "string" - x-exportParamName: "Token" - x-optionalDataType: "String" - - name: "sort_by.key" - in: "query" - description: "Indicates an attribute to sort the response values.\n+required." - required: false - type: "string" - x-exportParamName: "SortByKey" - x-optionalDataType: "String" - - name: "sort_by.direction" + - name: "resource_type" in: "query" - description: "Indicates the direction to apply sort key for response values.\n\ - +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ - \ order." + description: "+required.\n\n - TASK_RESOURCE: Applies to customizable task\ + \ resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring\ + \ templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures\ + \ task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL:\ + \ Configures the K8s cluster label to be used for execution to be run\n\ + \ - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service\ + \ when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable\ + \ plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG:\ + \ Adds defaults for customizable workflow-execution specifications and overrides.\n\ + \ - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which\ + \ this execution should run." required: false type: "string" - default: "DESCENDING" + default: "TASK_RESOURCE" enum: - - "DESCENDING" - - "ASCENDING" - x-exportParamName: "SortByDirection" + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" x-optionalDataType: "String" - - name: "filters" + - name: "org" in: "query" - description: "Indicates a list of filters passed as string.\n+optional." + description: "Optional, org filter applied to list project requests." required: false type: "string" - x-exportParamName: "Filters" + x-exportParamName: "Org" x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminNamedEntityIdentifierList" - /api/v1/tasks: - post: + $ref: "#/definitions/adminListMatchableAttributesResponse" + /api/v1/matchable_attributes/org/{org}: + get: tags: - "AdminService" - summary: "Create and upload a :ref:`ref_flyteidl.admin.Task` definition" - operationId: "CreateTask" + summary: "Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a specific resource type." + operationId: "ListMatchableAttributes2" parameters: - - in: "body" - name: "body" + - name: "org" + in: "path" + description: "Optional, org filter applied to list project requests." required: true - schema: - $ref: "#/definitions/flyteidladminTaskCreateRequest" - x-exportParamName: "Body" + type: "string" + x-exportParamName: "Org" + - name: "resource_type" + in: "query" + description: "+required.\n\n - TASK_RESOURCE: Applies to customizable task\ + \ resource requests and limits.\n - CLUSTER_RESOURCE: Applies to configuring\ + \ templated kubernetes cluster resources.\n - EXECUTION_QUEUE: Configures\ + \ task and dynamic task execution queue assignment.\n - EXECUTION_CLUSTER_LABEL:\ + \ Configures the K8s cluster label to be used for execution to be run\n\ + \ - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service\ + \ when undefined in an execution spec.\n - PLUGIN_OVERRIDE: Selects configurable\ + \ plugin implementation behavior for a given task type.\n - WORKFLOW_EXECUTION_CONFIG:\ + \ Adds defaults for customizable workflow-execution specifications and overrides.\n\ + \ - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which\ + \ this execution should run." + required: false + type: "string" + default: "TASK_RESOURCE" + enum: + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" + x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/flyteidladminTaskCreateResponse" - /api/v1/tasks/{id.project}/{id.domain}: + $ref: "#/definitions/adminListMatchableAttributesResponse" + /api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." - operationId: "ListTasks2" + summary: "Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`." + operationId: "GetExecutionMetrics2" parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -2221,69 +2904,31 @@ paths: type: "string" x-exportParamName: "IdDomain" - name: "id.name" - in: "query" - description: "User provided value for the resource.\nThe combination of project\ - \ + domain + name uniquely identifies the resource.\n+optional - in certain\ - \ contexts - like 'List API', 'Launch plans'." - required: false + in: "path" + description: "User or system provided value for the resource." + required: true type: "string" x-exportParamName: "IdName" - x-optionalDataType: "String" - - name: "limit" + - name: "depth" in: "query" - description: "Indicates the number of resources to be returned.\n+required." + description: "depth defines the number of Flyte entity levels to traverse\ + \ when breaking down execution details." required: false type: "integer" - format: "int64" - x-exportParamName: "Limit" - x-optionalDataType: "Int64" - - name: "token" - in: "query" - description: "In the case of multiple pages of results, this server-provided\ - \ token can be used to fetch the next page\nin a query.\n+optional." - required: false - type: "string" - x-exportParamName: "Token" - x-optionalDataType: "String" - - name: "filters" - in: "query" - description: "Indicates a list of filters passed as string.\nMore info on\ - \ constructing filters : \n+optional." - required: false - type: "string" - x-exportParamName: "Filters" - x-optionalDataType: "String" - - name: "sort_by.key" - in: "query" - description: "Indicates an attribute to sort the response values.\n+required." - required: false - type: "string" - x-exportParamName: "SortByKey" - x-optionalDataType: "String" - - name: "sort_by.direction" - in: "query" - description: "Indicates the direction to apply sort key for response values.\n\ - +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ - \ order." - required: false - type: "string" - default: "DESCENDING" - enum: - - "DESCENDING" - - "ASCENDING" - x-exportParamName: "SortByDirection" - x-optionalDataType: "String" + format: "int32" + x-exportParamName: "Depth" + x-optionalDataType: "Int32" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminTaskList" - /api/v1/tasks/{id.project}/{id.domain}/{id.name}: + $ref: "#/definitions/adminWorkflowExecutionGetMetricsResponse" + /api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." - operationId: "ListTasks" + summary: "Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`." + operationId: "GetExecutionMetrics" parameters: - name: "id.project" in: "path" @@ -2300,273 +2945,165 @@ paths: x-exportParamName: "IdDomain" - name: "id.name" in: "path" - description: "User provided value for the resource.\nThe combination of project\ - \ + domain + name uniquely identifies the resource.\n+optional - in certain\ - \ contexts - like 'List API', 'Launch plans'" + description: "User or system provided value for the resource." required: true type: "string" x-exportParamName: "IdName" - - name: "limit" - in: "query" - description: "Indicates the number of resources to be returned.\n+required." - required: false - type: "integer" - format: "int64" - x-exportParamName: "Limit" - x-optionalDataType: "Int64" - - name: "token" - in: "query" - description: "In the case of multiple pages of results, this server-provided\ - \ token can be used to fetch the next page\nin a query.\n+optional." - required: false - type: "string" - x-exportParamName: "Token" - x-optionalDataType: "String" - - name: "filters" - in: "query" - description: "Indicates a list of filters passed as string.\nMore info on\ - \ constructing filters : \n+optional." - required: false - type: "string" - x-exportParamName: "Filters" - x-optionalDataType: "String" - - name: "sort_by.key" + - name: "id.org" in: "query" - description: "Indicates an attribute to sort the response values.\n+required." + description: "Optional, org key applied to the resource." required: false type: "string" - x-exportParamName: "SortByKey" + x-exportParamName: "IdOrg" x-optionalDataType: "String" - - name: "sort_by.direction" + - name: "depth" in: "query" - description: "Indicates the direction to apply sort key for response values.\n\ - +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ - \ order." + description: "depth defines the number of Flyte entity levels to traverse\ + \ when breaking down execution details." required: false - type: "string" - default: "DESCENDING" - enum: - - "DESCENDING" - - "ASCENDING" - x-exportParamName: "SortByDirection" - x-optionalDataType: "String" + type: "integer" + format: "int32" + x-exportParamName: "Depth" + x-optionalDataType: "Int32" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminTaskList" - /api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}: + $ref: "#/definitions/adminWorkflowExecutionGetMetricsResponse" + /api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}: get: tags: - "AdminService" - summary: "Fetch a :ref:`ref_flyteidl.admin.Task` definition." - operationId: "GetTask" + summary: "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object." + operationId: "GetNamedEntity2" parameters: - - name: "id.project" - in: "path" - description: "Name of the project the resource belongs to." - required: true - type: "string" - x-exportParamName: "IdProject" - - name: "id.domain" + - name: "id.org" in: "path" - description: "Name of the domain the resource belongs to.\nA domain can be\ - \ considered as a subset within a specific project." - required: true - type: "string" - x-exportParamName: "IdDomain" - - name: "id.name" - in: "path" - description: "User provided value for the resource." + description: "Optional, org key applied to the resource." required: true type: "string" - x-exportParamName: "IdName" - - name: "id.version" + x-exportParamName: "IdOrg" + - name: "resource_type" in: "path" - description: "Specific version of the resource." + description: "Resource type of the metadata to get. One of Task, Workflow\ + \ or LaunchPlan.\n+required" required: true type: "string" - x-exportParamName: "IdVersion" - - name: "id.resource_type" - in: "query" - description: "Identifies the specific type of resource that this identifier\ - \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ - \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ - \ a compilation of multiple individual objects.\nEventually all Catalog\ - \ objects should be modeled similar to Flyte Objects. The Dataset entities\ - \ makes it possible for the UI and CLI to act on the objects \nin a similar\ - \ manner to other Flyte objects" - required: false - type: "string" - default: "UNSPECIFIED" enum: - "UNSPECIFIED" - "TASK" - "WORKFLOW" - "LAUNCH_PLAN" - "DATASET" - x-exportParamName: "IdResourceType" - x-optionalDataType: "String" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminTask" - /api/v1/version: - get: - tags: - - "AdminService" - operationId: "GetVersion" - parameters: [] - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminGetVersionResponse" - /api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}: - put: - tags: - - "AdminService" - summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project, domain and workflow." - operationId: "UpdateWorkflowAttributes" - parameters: - - name: "attributes.project" + x-exportParamName: "ResourceType" + - name: "id.project" in: "path" - description: "Unique project id for which this set of attributes will be applied." + description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "AttributesProject" - - name: "attributes.domain" + x-exportParamName: "IdProject" + - name: "id.domain" in: "path" - description: "Unique domain id for which this set of attributes will be applied." + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "AttributesDomain" - - name: "attributes.workflow" + x-exportParamName: "IdDomain" + - name: "id.name" in: "path" - description: "Workflow name for which this set of attributes will be applied." + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" required: true type: "string" - x-exportParamName: "AttributesWorkflow" - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminWorkflowAttributesUpdateRequest" - x-exportParamName: "Body" + x-exportParamName: "IdName" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminWorkflowAttributesUpdateResponse" - /api/v1/workflow_attributes/{project}/{domain}/{workflow}: - get: + $ref: "#/definitions/adminNamedEntity" + put: tags: - "AdminService" - summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project, domain and workflow." - operationId: "GetWorkflowAttributes" + summary: "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object." + operationId: "UpdateNamedEntity2" parameters: - - name: "project" - in: "path" - description: "Unique project id which this set of attributes references.\n\ - +required" - required: true - type: "string" - x-exportParamName: "Project" - - name: "domain" + - name: "id.org" in: "path" - description: "Unique domain id which this set of attributes references.\n\ - +required" + description: "Optional, org key applied to the resource." required: true type: "string" - x-exportParamName: "Domain" - - name: "workflow" + x-exportParamName: "IdOrg" + - name: "resource_type" in: "path" - description: "Workflow name which this set of attributes references.\n+required" + description: "Resource type of the metadata to update\n+required" required: true type: "string" - x-exportParamName: "Workflow" - - name: "resource_type" - in: "query" - description: "Which type of matchable attributes to return.\n+required.\n\n\ - \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ - \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ - \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ - \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ - \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ - \ Configures default quality of service when undefined in an execution spec.\n\ - \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ - \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ - \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ - \ Controls how to select an available cluster on which this execution should\ - \ run." - required: false - type: "string" - default: "TASK_RESOURCE" enum: - - "TASK_RESOURCE" - - "CLUSTER_RESOURCE" - - "EXECUTION_QUEUE" - - "EXECUTION_CLUSTER_LABEL" - - "QUALITY_OF_SERVICE_SPECIFICATION" - - "PLUGIN_OVERRIDE" - - "WORKFLOW_EXECUTION_CONFIG" - - "CLUSTER_ASSIGNMENT" + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" x-exportParamName: "ResourceType" - x-optionalDataType: "String" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminWorkflowAttributesGetResponse" - delete: - tags: - - "AdminService" - summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ - \ for a project, domain and workflow." - operationId: "DeleteWorkflowAttributes" - parameters: - - name: "project" + - name: "id.project" in: "path" - description: "Unique project id which this set of attributes references.\n\ - +required" + description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "Project" - - name: "domain" + x-exportParamName: "IdProject" + - name: "id.domain" in: "path" - description: "Unique domain id which this set of attributes references.\n\ - +required" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "Domain" - - name: "workflow" + x-exportParamName: "IdDomain" + - name: "id.name" in: "path" - description: "Workflow name which this set of attributes references.\n+required" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" required: true type: "string" - x-exportParamName: "Workflow" + x-exportParamName: "IdName" - in: "body" name: "body" required: true schema: - $ref: "#/definitions/adminWorkflowAttributesDeleteRequest" + $ref: "#/definitions/adminNamedEntityUpdateRequest" x-exportParamName: "Body" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminWorkflowAttributesDeleteResponse" - /api/v1/workflow_ids/{project}/{domain}: + $ref: "#/definitions/adminNamedEntityUpdateResponse" + /api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain}: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ - \ workflow objects." - operationId: "ListWorkflowIds" + summary: "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects." + operationId: "ListNamedEntities2" parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "Org" + - name: "resource_type" + in: "path" + description: "Resource type of the metadata to query. One of Task, Workflow\ + \ or LaunchPlan.\n+required" + required: true + type: "string" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "ResourceType" - name: "project" in: "path" description: "Name of the project that contains the identifiers.\n+required" @@ -2575,14 +3112,13 @@ paths: x-exportParamName: "Project" - name: "domain" in: "path" - description: "Name of the domain the identifiers belongs to within the project.\n\ - +required" + description: "Name of the domain the identifiers belongs to within the project." required: true type: "string" x-exportParamName: "Domain" - name: "limit" in: "query" - description: "Indicates the number of resources to be returned.\n+required." + description: "Indicates the number of resources to be returned." required: false type: "integer" format: "int64" @@ -2627,32 +3163,27 @@ paths: 200: description: "A successful response." schema: - $ref: "#/definitions/adminNamedEntityIdentifierList" - /api/v1/workflows: - post: - tags: - - "AdminService" - summary: "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition" - operationId: "CreateWorkflow" - parameters: - - in: "body" - name: "body" - required: true - schema: - $ref: "#/definitions/adminWorkflowCreateRequest" - x-exportParamName: "Body" - responses: - 200: - description: "A successful response." - schema: - $ref: "#/definitions/adminWorkflowCreateResponse" - /api/v1/workflows/{id.project}/{id.domain}: + $ref: "#/definitions/adminNamedEntityList" + /api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}: get: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." - operationId: "ListWorkflows2" + summary: "Returns a :ref:`ref_flyteidl.admin.NamedEntity` object." + operationId: "GetNamedEntity" parameters: + - name: "resource_type" + in: "path" + description: "Resource type of the metadata to get. One of Task, Workflow\ + \ or LaunchPlan.\n+required" + required: true + type: "string" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "ResourceType" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -2667,70 +3198,43 @@ paths: type: "string" x-exportParamName: "IdDomain" - name: "id.name" - in: "query" + in: "path" description: "User provided value for the resource.\nThe combination of project\ \ + domain + name uniquely identifies the resource.\n+optional - in certain\ - \ contexts - like 'List API', 'Launch plans'." - required: false + \ contexts - like 'List API', 'Launch plans'" + required: true type: "string" x-exportParamName: "IdName" - x-optionalDataType: "String" - - name: "limit" - in: "query" - description: "Indicates the number of resources to be returned.\n+required." - required: false - type: "integer" - format: "int64" - x-exportParamName: "Limit" - x-optionalDataType: "Int64" - - name: "token" - in: "query" - description: "In the case of multiple pages of results, this server-provided\ - \ token can be used to fetch the next page\nin a query.\n+optional." - required: false - type: "string" - x-exportParamName: "Token" - x-optionalDataType: "String" - - name: "filters" - in: "query" - description: "Indicates a list of filters passed as string.\nMore info on\ - \ constructing filters : \n+optional." - required: false - type: "string" - x-exportParamName: "Filters" - x-optionalDataType: "String" - - name: "sort_by.key" - in: "query" - description: "Indicates an attribute to sort the response values.\n+required." - required: false - type: "string" - x-exportParamName: "SortByKey" - x-optionalDataType: "String" - - name: "sort_by.direction" + - name: "id.org" in: "query" - description: "Indicates the direction to apply sort key for response values.\n\ - +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ - \ order." + description: "Optional, org key applied to the resource." required: false type: "string" - default: "DESCENDING" - enum: - - "DESCENDING" - - "ASCENDING" - x-exportParamName: "SortByDirection" + x-exportParamName: "IdOrg" x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminWorkflowList" - /api/v1/workflows/{id.project}/{id.domain}/{id.name}: - get: + $ref: "#/definitions/adminNamedEntity" + put: tags: - "AdminService" - summary: "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." - operationId: "ListWorkflows" + summary: "Updates a :ref:`ref_flyteidl.admin.NamedEntity` object." + operationId: "UpdateNamedEntity" parameters: + - name: "resource_type" + in: "path" + description: "Resource type of the metadata to update\n+required" + required: true + type: "string" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "ResourceType" - name: "id.project" in: "path" description: "Name of the project the resource belongs to." @@ -2752,9 +3256,52 @@ paths: required: true type: "string" x-exportParamName: "IdName" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminNamedEntityUpdateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNamedEntityUpdateResponse" + /api/v1/named_entities/{resource_type}/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects." + operationId: "ListNamedEntities" + parameters: + - name: "resource_type" + in: "path" + description: "Resource type of the metadata to query. One of Task, Workflow\ + \ or LaunchPlan.\n+required" + required: true + type: "string" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "ResourceType" + - name: "project" + in: "path" + description: "Name of the project that contains the identifiers.\n+required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Name of the domain the identifiers belongs to within the project." + required: true + type: "string" + x-exportParamName: "Domain" - name: "limit" in: "query" - description: "Indicates the number of resources to be returned.\n+required." + description: "Indicates the number of resources to be returned." required: false type: "integer" format: "int64" @@ -2762,20 +3309,12 @@ paths: x-optionalDataType: "Int64" - name: "token" in: "query" - description: "In the case of multiple pages of results, this server-provided\ + description: "In the case of multiple pages of results, the server-provided\ \ token can be used to fetch the next page\nin a query.\n+optional." required: false type: "string" x-exportParamName: "Token" x-optionalDataType: "String" - - name: "filters" - in: "query" - description: "Indicates a list of filters passed as string.\nMore info on\ - \ constructing filters : \n+optional." - required: false - type: "string" - x-exportParamName: "Filters" - x-optionalDataType: "String" - name: "sort_by.key" in: "query" description: "Indicates an attribute to sort the response values.\n+required." @@ -2796,602 +3335,16492 @@ paths: - "ASCENDING" x-exportParamName: "SortByDirection" x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminWorkflowList" - /api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}: - get: + $ref: "#/definitions/adminNamedEntityList" + ? /api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} + : get: tags: - "AdminService" - summary: "Fetch a :ref:`ref_flyteidl.admin.Workflow` definition." - operationId: "GetWorkflow" + summary: "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`." + operationId: "GetNodeExecution2" parameters: - - name: "id.project" + - name: "id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdExecutionIdOrg" + - name: "id.execution_id.project" in: "path" description: "Name of the project the resource belongs to." required: true type: "string" - x-exportParamName: "IdProject" - - name: "id.domain" + x-exportParamName: "IdExecutionIdProject" + - name: "id.execution_id.domain" in: "path" description: "Name of the domain the resource belongs to.\nA domain can be\ \ considered as a subset within a specific project." required: true type: "string" - x-exportParamName: "IdDomain" - - name: "id.name" + x-exportParamName: "IdExecutionIdDomain" + - name: "id.execution_id.name" in: "path" - description: "User provided value for the resource." + description: "User or system provided value for the resource." required: true type: "string" - x-exportParamName: "IdName" - - name: "id.version" + x-exportParamName: "IdExecutionIdName" + - name: "id.node_id" in: "path" - description: "Specific version of the resource." required: true type: "string" - x-exportParamName: "IdVersion" - - name: "id.resource_type" - in: "query" - description: "Identifies the specific type of resource that this identifier\ - \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ - \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ - \ a compilation of multiple individual objects.\nEventually all Catalog\ - \ objects should be modeled similar to Flyte Objects. The Dataset entities\ - \ makes it possible for the UI and CLI to act on the objects \nin a similar\ - \ manner to other Flyte objects" - required: false - type: "string" - default: "UNSPECIFIED" - enum: - - "UNSPECIFIED" - - "TASK" - - "WORKFLOW" - - "LAUNCH_PLAN" - - "DATASET" - x-exportParamName: "IdResourceType" - x-optionalDataType: "String" + x-exportParamName: "IdNodeId" responses: 200: description: "A successful response." schema: - $ref: "#/definitions/adminWorkflow" -definitions: - BlobTypeBlobDimensionality: - type: "string" - enum: - - "SINGLE" - - "MULTIPART" - default: "SINGLE" - CatalogReservationStatus: - type: "string" - description: "Indicates the status of a catalog reservation operation.\n\n - RESERVATION_DISABLED:\ - \ Used to indicate that reservations are disabled\n - RESERVATION_ACQUIRED:\ - \ Used to indicate that a reservation was successfully acquired or extended\n\ - \ - RESERVATION_EXISTS: Used to indicate that an active reservation currently\ - \ exists\n - RESERVATION_RELEASED: Used to indicate that the reservation has\ - \ been successfully released\n - RESERVATION_FAILURE: Used to indicate that\ - \ a reservation operation resulted in failure" - enum: - - "RESERVATION_DISABLED" - - "RESERVATION_ACQUIRED" - - "RESERVATION_EXISTS" - - "RESERVATION_RELEASED" - - "RESERVATION_FAILURE" - default: "RESERVATION_DISABLED" - ComparisonExpressionOperator: - type: "string" - title: "Binary Operator for each expression" - description: "- GT: Greater Than\n - LT: Less Than" - enum: - - "EQ" - - "NEQ" - - "GT" - - "GTE" - - "LT" - - "LTE" - default: "EQ" - ConjunctionExpressionLogicalOperator: - type: "string" - title: "Nested conditions. They can be conjoined using AND / OR\nOrder of evaluation\ - \ is not important as the operators are Commutative" - description: "- AND: Conjunction" - enum: - - "AND" - - "OR" - default: "AND" - ConnectionSetIdList: - type: "object" - properties: - ids: - type: "array" - items: - type: "string" - example: - ids: - - "ids" - - "ids" - ContainerArchitecture: - type: "string" - description: "Architecture-type the container image supports." - enum: - - "UNKNOWN" - - "AMD64" - - "ARM64" - - "ARM_V6" - - "ARM_V7" - default: "UNKNOWN" - DataLoadingConfigLiteralMapFormat: - type: "string" - title: "LiteralMapFormat decides the encoding format in which the input metadata\ - \ should be made available to the containers.\nIf the user has access to the\ - \ protocol buffer definitions, it is recommended to use the PROTO format.\n\ - JSON and YAML do not need any protobuf definitions to read it\nAll remote references\ - \ in core.LiteralMap are replaced with local filesystem references (the data\ - \ is downloaded to local filesystem)" - description: "- JSON: JSON / YAML for the metadata (which contains inlined primitive\ - \ values). The representation is inline with the standard json specification\ - \ as specified - https://www.json.org/json-en.html\n - PROTO: Proto is a serialized\ - \ binary of `core.LiteralMap` defined in flyteidl/core" - enum: - - "JSON" - - "YAML" - - "PROTO" - default: "JSON" - ExecutionErrorErrorKind: - type: "string" - title: "Error type: System or User" - enum: - - "UNKNOWN" - - "USER" - - "SYSTEM" - default: "UNKNOWN" - ExecutionMetadataExecutionMode: - type: "string" - description: "The method by which this execution was launched.\n\n - MANUAL: The\ - \ default execution mode, MANUAL implies that an execution was launched by an\ - \ individual.\n - SCHEDULED: A schedule triggered this execution launch.\n -\ - \ SYSTEM: A system process was responsible for launching this execution rather\ - \ an individual.\n - RELAUNCH: This execution was launched with identical inputs\ - \ as a previous execution.\n - CHILD_WORKFLOW: This execution was triggered\ - \ by another execution.\n - RECOVERED: This execution was recovered from another\ - \ execution." - enum: - - "MANUAL" - - "SCHEDULED" - - "SYSTEM" - - "RELAUNCH" - - "CHILD_WORKFLOW" - - "RECOVERED" - default: "MANUAL" - IOStrategyDownloadMode: - type: "string" - title: "Mode to use for downloading" - description: "- DOWNLOAD_EAGER: All data will be downloaded before the main container\ - \ is executed\n - DOWNLOAD_STREAM: Data will be downloaded as a stream and an\ - \ End-Of-Stream marker will be written to indicate all data has been downloaded.\ - \ Refer to protocol for details\n - DO_NOT_DOWNLOAD: Large objects (offloaded)\ - \ will not be downloaded" - enum: - - "DOWNLOAD_EAGER" - - "DOWNLOAD_STREAM" - - "DO_NOT_DOWNLOAD" - default: "DOWNLOAD_EAGER" - IOStrategyUploadMode: - type: "string" - title: "Mode to use for uploading" - description: "- UPLOAD_ON_EXIT: All data will be uploaded after the main container\ - \ exits\n - UPLOAD_EAGER: Data will be uploaded as it appears. Refer to protocol\ - \ specification for details\n - DO_NOT_UPLOAD: Data will not be uploaded, only\ - \ references will be written" - enum: - - "UPLOAD_ON_EXIT" - - "UPLOAD_EAGER" - - "DO_NOT_UPLOAD" - default: "UPLOAD_ON_EXIT" - PluginOverrideMissingPluginBehavior: - type: "string" - description: " - FAIL: By default, if this plugin is not enabled for a Flyte deployment\ - \ then execution will fail.\n - USE_DEFAULT: Uses the system-configured default\ - \ implementation." - enum: - - "FAIL" - - "USE_DEFAULT" - default: "FAIL" - ProjectProjectState: - type: "string" - description: "The state of the project is used to control its visibility in the\ - \ UI and validity.\n\n - ACTIVE: By default, all projects are considered active.\n\ - \ - ARCHIVED: Archived projects are no longer visible in the UI and no longer\ - \ valid.\n - SYSTEM_GENERATED: System generated projects that aren't explicitly\ - \ created or managed by a user." - enum: - - "ACTIVE" - - "ARCHIVED" - - "SYSTEM_GENERATED" - default: "ACTIVE" - QualityOfServiceTier: - type: "string" - description: " - UNDEFINED: Default: no quality of service specified." - enum: - - "UNDEFINED" - - "HIGH" - - "MEDIUM" - - "LOW" - default: "UNDEFINED" - ResourcesResourceEntry: - type: "object" - properties: - name: - description: "Resource name." - $ref: "#/definitions/ResourcesResourceName" - value: + $ref: "#/definitions/flyteidladminNodeExecution" + ? /api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow + : get: + tags: + - "AdminService" + summary: "Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`." + operationId: "GetDynamicNodeWorkflow2" + parameters: + - name: "id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true type: "string" - title: "Value must be a valid k8s quantity. See\nhttps://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80" - description: "Encapsulates a resource name and value." - example: - name: {} - value: "value" - ResourcesResourceName: - type: "string" - description: "Known resource names.\n\n - EPHEMERAL_STORAGE: For Kubernetes-based\ - \ deployments, pods use ephemeral local storage for scratch space, caching,\ - \ and for logs." - enum: - - "UNKNOWN" - - "CPU" - - "GPU" - - "MEMORY" - - "STORAGE" - - "EPHEMERAL_STORAGE" - default: "UNKNOWN" - RuntimeMetadataRuntimeType: - type: "string" - enum: - - "OTHER" - - "FLYTE_SDK" - default: "OTHER" - SchemaColumnSchemaColumnType: - type: "string" - enum: - - "INTEGER" - - "FLOAT" - - "STRING" - - "BOOLEAN" - - "DATETIME" - - "DURATION" - default: "INTEGER" - SchemaTypeSchemaColumn: - type: "object" - properties: - name: + x-exportParamName: "IdExecutionIdOrg" + - name: "id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true type: "string" - title: "A unique name -within the schema type- for the column" - type: - description: "The column type. This allows a limited set of types currently." - $ref: "#/definitions/SchemaColumnSchemaColumnType" - example: - name: "name" - type: {} - SecretMountType: - type: "string" - description: " - ANY: Default case, indicates the client can tolerate either mounting\ - \ options.\n - ENV_VAR: ENV_VAR indicates the secret needs to be mounted as\ - \ an environment variable.\n - FILE: FILE indicates the secret needs to be mounted\ - \ as a file." - enum: - - "ANY" - - "ENV_VAR" - - "FILE" - default: "ANY" - SortDirection: - type: "string" - description: " - DESCENDING: By default, fields are sorted in descending order." - enum: - - "DESCENDING" - - "ASCENDING" - default: "DESCENDING" - SqlDialect: - type: "string" - description: "The dialect of the SQL statement. This is used to validate and parse\ - \ SQL statements at compilation time to avoid\nexpensive runtime operations.\ - \ If set to an unsupported dialect, no validation will be done on the statement.\n\ - We support the following dialect: ansi, hive." - enum: - - "UNDEFINED" - - "ANSI" - - "HIVE" - - "OTHER" - default: "UNDEFINED" - StructuredDatasetTypeDatasetColumn: - type: "object" - properties: - name: + x-exportParamName: "IdExecutionIdProject" + - name: "id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true type: "string" - description: "A unique name within the schema type for the column." - literal_type: - description: "The column type." - $ref: "#/definitions/coreLiteralType" - example: - name: "name" - TaskExecutionMetadataInstanceClass: - type: "string" - description: "Includes the broad category of machine used for this specific task\ - \ execution.\n\n - DEFAULT: The default instance class configured for the flyte\ - \ application platform.\n - INTERRUPTIBLE: The instance class configured for\ - \ interruptible tasks." - enum: - - "DEFAULT" - - "INTERRUPTIBLE" - default: "DEFAULT" - TaskLogMessageFormat: - type: "string" - enum: - - "UNKNOWN" - - "CSV" - - "JSON" - default: "UNKNOWN" - WorkflowMetadataOnFailurePolicy: - type: "string" - title: "Failure Handling Strategy" - description: "- FAIL_IMMEDIATELY: FAIL_IMMEDIATELY instructs the system to fail\ - \ as soon as a node fails in the workflow. It'll automatically\nabort all currently\ - \ running nodes and clean up resources before finally marking the workflow executions\ - \ as\nfailed.\n - FAIL_AFTER_EXECUTABLE_NODES_COMPLETE: FAIL_AFTER_EXECUTABLE_NODES_COMPLETE\ - \ instructs the system to make as much progress as it can. The system will\n\ - not alter the dependencies of the execution graph so any node that depend on\ - \ the failed node will not be run.\nOther nodes that will be executed to completion\ - \ before cleaning up resources and marking the workflow\nexecution as failed." - enum: - - "FAIL_IMMEDIATELY" - - "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE" - default: "FAIL_IMMEDIATELY" - adminAbortMetadata: - type: "object" - properties: - cause: + x-exportParamName: "IdExecutionIdDomain" + - name: "id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true type: "string" - description: "In the case of a user-specified abort, this will pass along\ - \ the user-supplied cause." - principal: + x-exportParamName: "IdExecutionIdName" + - name: "id.node_id" + in: "path" + required: true type: "string" - title: "Identifies the entity (if any) responsible for terminating the execution" - description: "Specifies metadata around an aborted workflow execution." - example: - principal: "principal" - cause: "cause" - adminAnnotations: - type: "object" - properties: - values: - type: "object" - description: "Map of custom annotations to be applied to the execution resource." - additionalProperties: - type: "string" - description: "Annotation values to be applied to an execution resource.\nIn the\ - \ future a mode (e.g. OVERRIDE, APPEND, etc) can be defined\nto specify how\ - \ to merge annotations defined at registration and execution time." - example: - values: - key: "values" - adminAuth: - type: "object" - properties: - assumable_iam_role: + x-exportParamName: "IdNodeId" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminDynamicNodeWorkflowResponse" + ? /api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name} + : get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`." + operationId: "ListNodeExecutions2" + parameters: + - name: "workflow_execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true type: "string" - description: "Defines an optional iam role which will be used for tasks run\ - \ in executions created with this launch plan." - kubernetes_service_account: + x-exportParamName: "WorkflowExecutionIdOrg" + - name: "workflow_execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true type: "string" - description: "Defines an optional kubernetes service account which will be\ - \ used for tasks run in executions created with this launch plan." - description: "Defines permissions associated with executions created by this launch\ - \ plan spec.\nUse either of these roles when they have permissions required\ - \ by your workflow execution.\nDeprecated." - example: - kubernetes_service_account: "kubernetes_service_account" - assumable_iam_role: "assumable_iam_role" - adminAuthRole: - type: "object" - properties: - assumable_iam_role: + x-exportParamName: "WorkflowExecutionIdProject" + - name: "workflow_execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true type: "string" - description: "Defines an optional iam role which will be used for tasks run\ - \ in executions created with this launch plan." - kubernetes_service_account: + x-exportParamName: "WorkflowExecutionIdDomain" + - name: "workflow_execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true type: "string" - description: "Defines an optional kubernetes service account which will be\ - \ used for tasks run in executions created with this launch plan." - description: "Defines permissions associated with executions created by this launch\ - \ plan spec.\nUse either of these roles when they have permissions required\ - \ by your workflow execution.\nDeprecated." - example: - kubernetes_service_account: "kubernetes_service_account" - assumable_iam_role: "assumable_iam_role" - adminClusterAssignment: - type: "object" - properties: - cluster_pool_name: + x-exportParamName: "WorkflowExecutionIdName" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + required: false type: "string" - description: "Encapsulates specifications for routing an execution onto a specific\ - \ cluster." - example: - cluster_pool_name: "cluster_pool_name" - adminClusterResourceAttributes: - type: "object" - properties: - attributes: - type: "object" - description: "Custom resource attributes which will be applied in cluster\ - \ resource creation (e.g. quotas).\nMap keys are the *case-sensitive* names\ - \ of variables in templatized resource files.\nMap values should be the\ - \ custom values which get substituted during resource creation." - additionalProperties: - type: "string" - example: - attributes: - key: "attributes" - adminCronSchedule: - type: "object" - properties: - schedule: + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false type: "string" - title: "Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression;\n\ - Also supports nonstandard predefined scheduling definitions\nas described\ - \ by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions\n\ - except @reboot" - offset: + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false type: "string" - title: "ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations" - description: "Options for schedules to run according to a cron expression." - example: - schedule: "schedule" - offset: "offset" - adminDescription: - type: "object" - properties: - value: + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false type: "string" - title: "long description - no more than 4KB" - uri: + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "unique_parent_id" + in: "query" + description: "Unique identifier of the parent node in the execution\n+optional." + required: false type: "string" - title: "if the description sizes exceed some threshold we can offload the\ - \ entire\ndescription proto altogether to an external data store, like S3\ - \ rather than store inline in the db" - format: - title: "Format of the long description" - $ref: "#/definitions/adminDescriptionFormat" - icon_link: + x-exportParamName: "UniqueParentId" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNodeExecutionList" + /api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}: + get: + tags: + - "AdminService" + summary: "Fetches a :ref:`ref_flyteidl.admin.NodeExecution`." + operationId: "GetNodeExecution" + parameters: + - name: "id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true type: "string" - title: "Optional link to an icon for the entity" - description: "Full user description with formatting preserved. This can be rendered\n\ - by clients, such as the console or command line tools with in-tact\nformatting." - example: - format: {} - icon_link: "icon_link" - value: "value" - uri: "uri" - adminDescriptionEntity: - type: "object" - properties: - id: - description: "id represents the unique identifier of the description entity." - $ref: "#/definitions/coreIdentifier" - short_description: + x-exportParamName: "IdExecutionIdProject" + - name: "id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true type: "string" - description: "One-liner overview of the entity." - long_description: - description: "Full user description with formatting preserved." - $ref: "#/definitions/adminDescription" - source_code: - description: "Optional link to source code used to define this entity." - $ref: "#/definitions/adminSourceCode" + x-exportParamName: "IdExecutionIdDomain" + - name: "id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdExecutionIdName" + - name: "id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "IdNodeId" + - name: "id.execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdExecutionIdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/flyteidladminNodeExecution" + /api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow: + get: tags: - type: "array" - description: "User-specified tags. These are arbitrary and can be used for\ - \ searching\nfiltering and discovering tasks." - items: - type: "string" - description: "DescriptionEntity contains detailed description for the task/workflow.\n\ - Documentation could provide insight into the algorithms, business use case,\ - \ etc." - example: - short_description: "short_description" - id: - domain: "domain" - resource_type: {} - name: "name" - project: "project" - version: "version" - long_description: - format: {} - icon_link: "icon_link" - value: "value" - uri: "uri" - source_code: - link: "link" + - "AdminService" + summary: "Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`." + operationId: "GetDynamicNodeWorkflow" + parameters: + - name: "id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdExecutionIdProject" + - name: "id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdExecutionIdDomain" + - name: "id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdExecutionIdName" + - name: "id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "IdNodeId" + - name: "id.execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdExecutionIdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminDynamicNodeWorkflowResponse" + /api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}: + get: tags: - - "tags" - - "tags" - adminDescriptionEntityList: - type: "object" - properties: - descriptionEntities: - type: "array" - description: "A list of DescriptionEntities returned based on the request." - items: - $ref: "#/definitions/adminDescriptionEntity" - token: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`." + operationId: "ListNodeExecutions" + parameters: + - name: "workflow_execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true type: "string" - description: "In the case of multiple pages of results, the server-provided\ - \ token can be used to fetch the next page\nin a query. If there are no\ - \ more results, this value will be empty." - title: "Represents a list of DescriptionEntities returned from the admin.\nSee\ - \ :ref:`ref_flyteidl.admin.DescriptionEntity` for more details" - example: - descriptionEntities: - - short_description: "short_description" - id: - domain: "domain" - resource_type: {} - name: "name" - project: "project" - version: "version" - long_description: - format: {} - icon_link: "icon_link" - value: "value" - uri: "uri" - source_code: - link: "link" - tags: - - "tags" - - "tags" - - short_description: "short_description" - id: - domain: "domain" - resource_type: {} - name: "name" - project: "project" - version: "version" - long_description: - format: {} - icon_link: "icon_link" - value: "value" - uri: "uri" - source_code: - link: "link" - tags: - - "tags" - - "tags" - token: "token" - adminDescriptionFormat: - type: "string" - title: "The format of the long description" - description: "- DESCRIPTION_FORMAT_RST: python default documentation - comments\ - \ is rst" - enum: - - "DESCRIPTION_FORMAT_UNKNOWN" - - "DESCRIPTION_FORMAT_MARKDOWN" - - "DESCRIPTION_FORMAT_HTML" - - "DESCRIPTION_FORMAT_RST" - default: "DESCRIPTION_FORMAT_UNKNOWN" - adminDomain: - type: "object" - properties: - id: + x-exportParamName: "WorkflowExecutionIdProject" + - name: "workflow_execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true type: "string" - description: "Globally unique domain name." - name: + x-exportParamName: "WorkflowExecutionIdDomain" + - name: "workflow_execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true type: "string" - description: "Display name." - description: "Namespace within a project commonly used to differentiate between\ - \ different service instances.\ne.g. \"production\", \"development\", etc." - example: - name: "name" - id: "id" + x-exportParamName: "WorkflowExecutionIdName" + - name: "workflow_execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "WorkflowExecutionIdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "unique_parent_id" + in: "query" + description: "Unique identifier of the parent node in the execution\n+optional." + required: false + type: "string" + x-exportParamName: "UniqueParentId" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNodeExecutionList" + /api/v1/project_attributes/{attributes.project}: + put: + tags: + - "AdminService" + summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ at the project level" + operationId: "UpdateProjectAttributes" + parameters: + - name: "attributes.project" + in: "path" + description: "Unique project id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesProject" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectAttributesUpdateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectAttributesUpdateResponse" + /api/v1/project_attributes/{project}: + get: + tags: + - "AdminService" + summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "GetProjectAttributes" + parameters: + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "resource_type" + in: "query" + description: "Which type of matchable attributes to return.\n+required.\n\n\ + \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ + \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ + \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ + \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ + \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ + \ Configures default quality of service when undefined in an execution spec.\n\ + \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ + \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ + \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ + \ Controls how to select an available cluster on which this execution should\ + \ run." + required: false + type: "string" + default: "TASK_RESOURCE" + enum: + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the project." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectAttributesGetResponse" + delete: + tags: + - "AdminService" + summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "DeleteProjectAttributes" + parameters: + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectAttributesDeleteRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectAttributesDeleteResponse" + /api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}: + put: + tags: + - "AdminService" + summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ at the project level" + operationId: "UpdateProjectAttributes2" + parameters: + - name: "attributes.org" + in: "path" + description: "Optional, org key applied to the project." + required: true + type: "string" + x-exportParamName: "AttributesOrg" + - name: "attributes.project" + in: "path" + description: "Unique project id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesProject" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectAttributesUpdateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectAttributesUpdateResponse" + /api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}: + put: + tags: + - "AdminService" + summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "UpdateProjectDomainAttributes2" + parameters: + - name: "attributes.org" + in: "path" + description: "Optional, org key applied to the attributes." + required: true + type: "string" + x-exportParamName: "AttributesOrg" + - name: "attributes.project" + in: "path" + description: "Unique project id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesProject" + - name: "attributes.domain" + in: "path" + description: "Unique domain id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesDomain" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectDomainAttributesUpdateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectDomainAttributesUpdateResponse" + /api/v1/project_domain_attributes/org/{org}/{project}: + get: + tags: + - "AdminService" + summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "GetProjectAttributes2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the project." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "resource_type" + in: "query" + description: "Which type of matchable attributes to return.\n+required.\n\n\ + \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ + \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ + \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ + \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ + \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ + \ Configures default quality of service when undefined in an execution spec.\n\ + \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ + \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ + \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ + \ Controls how to select an available cluster on which this execution should\ + \ run." + required: false + type: "string" + default: "TASK_RESOURCE" + enum: + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectAttributesGetResponse" + delete: + tags: + - "AdminService" + summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "DeleteProjectAttributes2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the project." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectAttributesDeleteRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectAttributesDeleteResponse" + /api/v1/project_domain_attributes/org/{org}/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "GetProjectDomainAttributes2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the attributes." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "resource_type" + in: "query" + description: "Which type of matchable attributes to return.\n+required.\n\n\ + \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ + \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ + \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ + \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ + \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ + \ Configures default quality of service when undefined in an execution spec.\n\ + \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ + \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ + \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ + \ Controls how to select an available cluster on which this execution should\ + \ run." + required: false + type: "string" + default: "TASK_RESOURCE" + enum: + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectDomainAttributesGetResponse" + delete: + tags: + - "AdminService" + summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "DeleteProjectDomainAttributes2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the attributes." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectDomainAttributesDeleteRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectDomainAttributesDeleteResponse" + /api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}: + put: + tags: + - "AdminService" + summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "UpdateProjectDomainAttributes" + parameters: + - name: "attributes.project" + in: "path" + description: "Unique project id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesProject" + - name: "attributes.domain" + in: "path" + description: "Unique domain id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesDomain" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectDomainAttributesUpdateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectDomainAttributesUpdateResponse" + /api/v1/project_domain_attributes/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "GetProjectDomainAttributes" + parameters: + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "resource_type" + in: "query" + description: "Which type of matchable attributes to return.\n+required.\n\n\ + \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ + \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ + \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ + \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ + \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ + \ Configures default quality of service when undefined in an execution spec.\n\ + \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ + \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ + \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ + \ Controls how to select an available cluster on which this execution should\ + \ run." + required: false + type: "string" + default: "TASK_RESOURCE" + enum: + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the attributes." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectDomainAttributesGetResponse" + delete: + tags: + - "AdminService" + summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project and domain." + operationId: "DeleteProjectDomainAttributes" + parameters: + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectDomainAttributesDeleteRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectDomainAttributesDeleteResponse" + /api/v1/projects: + get: + tags: + - "AdminService" + summary: "Fetches a list of :ref:`ref_flyteidl.admin.Project`" + operationId: "ListProjects" + parameters: + - name: "limit" + in: "query" + description: "Indicates the number of projects to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org filter applied to list project requests." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjects" + post: + tags: + - "AdminService" + summary: "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment." + operationId: "RegisterProject" + parameters: + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectRegisterRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectRegisterResponse" + /api/v1/projects/org/{org}: + get: + tags: + - "AdminService" + summary: "Fetches a list of :ref:`ref_flyteidl.admin.Project`" + operationId: "ListProjects2" + parameters: + - name: "org" + in: "path" + description: "Optional, org filter applied to list project requests." + required: true + type: "string" + x-exportParamName: "Org" + - name: "limit" + in: "query" + description: "Indicates the number of projects to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjects" + /api/v1/projects/org/{org}/{id}: + put: + tags: + - "AdminService" + summary: "Updates an existing :ref:`ref_flyteidl.admin.Project`\nflyteidl.admin.Project\ + \ should be passed but the domains property should be empty;\nit will be ignored\ + \ in the handler as domains cannot be updated via this API." + operationId: "UpdateProject2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "Org" + - name: "id" + in: "path" + description: "Globally unique project name." + required: true + type: "string" + x-exportParamName: "Id" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProject" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectUpdateResponse" + /api/v1/projects/org/{project.org}: + post: + tags: + - "AdminService" + summary: "Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment." + operationId: "RegisterProject2" + parameters: + - name: "project.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "ProjectOrg" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProjectRegisterRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectRegisterResponse" + /api/v1/projects/{id}: + put: + tags: + - "AdminService" + summary: "Updates an existing :ref:`ref_flyteidl.admin.Project`\nflyteidl.admin.Project\ + \ should be passed but the domains property should be empty;\nit will be ignored\ + \ in the handler as domains cannot be updated via this API." + operationId: "UpdateProject" + parameters: + - name: "id" + in: "path" + description: "Globally unique project name." + required: true + type: "string" + x-exportParamName: "Id" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminProject" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminProjectUpdateResponse" + ? /api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} + : get: + tags: + - "AdminService" + summary: "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`." + operationId: "GetTaskExecution2" + parameters: + - name: "id.node_execution_id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdOrg" + - name: "id.node_execution_id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdProject" + - name: "id.node_execution_id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdDomain" + - name: "id.node_execution_id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdName" + - name: "id.node_execution_id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdNodeId" + - name: "id.task_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdTaskIdProject" + - name: "id.task_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdTaskIdDomain" + - name: "id.task_id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdTaskIdName" + - name: "id.task_id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdTaskIdVersion" + - name: "id.retry_attempt" + in: "path" + required: true + type: "integer" + format: "int64" + x-exportParamName: "IdRetryAttempt" + - name: "id.task_id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdTaskIdResourceType" + x-optionalDataType: "String" + - name: "id.task_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdTaskIdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/flyteidladminTaskExecution" + ? /api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} + : get: + tags: + - "AdminService" + summary: "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`." + operationId: "ListTaskExecutions2" + parameters: + - name: "node_execution_id.execution_id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdOrg" + - name: "node_execution_id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdProject" + - name: "node_execution_id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdDomain" + - name: "node_execution_id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdName" + - name: "node_execution_id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "NodeExecutionIdNodeId" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskExecutionList" + ? /api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} + : get: + tags: + - "AdminService" + summary: "Fetches a :ref:`ref_flyteidl.admin.TaskExecution`." + operationId: "GetTaskExecution" + parameters: + - name: "id.node_execution_id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdProject" + - name: "id.node_execution_id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdDomain" + - name: "id.node_execution_id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdName" + - name: "id.node_execution_id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "IdNodeExecutionIdNodeId" + - name: "id.task_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdTaskIdProject" + - name: "id.task_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdTaskIdDomain" + - name: "id.task_id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdTaskIdName" + - name: "id.task_id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdTaskIdVersion" + - name: "id.retry_attempt" + in: "path" + required: true + type: "integer" + format: "int64" + x-exportParamName: "IdRetryAttempt" + - name: "id.task_id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdTaskIdResourceType" + x-optionalDataType: "String" + - name: "id.task_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdTaskIdOrg" + x-optionalDataType: "String" + - name: "id.node_execution_id.execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdNodeExecutionIdExecutionIdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/flyteidladminTaskExecution" + ? /api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} + : get: + tags: + - "AdminService" + summary: "Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`." + operationId: "ListTaskExecutions" + parameters: + - name: "node_execution_id.execution_id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdProject" + - name: "node_execution_id.execution_id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdDomain" + - name: "node_execution_id.execution_id.name" + in: "path" + description: "User or system provided value for the resource." + required: true + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdName" + - name: "node_execution_id.node_id" + in: "path" + required: true + type: "string" + x-exportParamName: "NodeExecutionIdNodeId" + - name: "node_execution_id.execution_id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "NodeExecutionIdExecutionIdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskExecutionList" + /api/v1/task_ids/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ + \ task objects." + operationId: "ListTaskIds" + parameters: + - name: "project" + in: "path" + description: "Name of the project that contains the identifiers.\n+required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Name of the domain the identifiers belongs to within the project.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNamedEntityIdentifierList" + /api/v1/tasks: + post: + tags: + - "AdminService" + summary: "Create and upload a :ref:`ref_flyteidl.admin.Task` definition" + operationId: "CreateTask" + parameters: + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/flyteidladminTaskCreateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/flyteidladminTaskCreateResponse" + /api/v1/tasks/org/{id.org}: + post: + tags: + - "AdminService" + summary: "Create and upload a :ref:`ref_flyteidl.admin.Task` definition" + operationId: "CreateTask2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/flyteidladminTaskCreateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/flyteidladminTaskCreateResponse" + /api/v1/tasks/org/{id.org}/{id.project}/{id.domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." + operationId: "ListTasks4" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "query" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'." + required: false + type: "string" + x-exportParamName: "IdName" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskList" + /api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." + operationId: "ListTasks2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" + required: true + type: "string" + x-exportParamName: "IdName" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskList" + /api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}: + get: + tags: + - "AdminService" + summary: "Fetch a :ref:`ref_flyteidl.admin.Task` definition." + operationId: "GetTask2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdVersion" + - name: "id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdResourceType" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTask" + /api/v1/tasks/org/{org}/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ + \ task objects." + operationId: "ListTaskIds2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Name of the project that contains the identifiers.\n+required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Name of the domain the identifiers belongs to within the project.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNamedEntityIdentifierList" + /api/v1/tasks/{id.project}/{id.domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." + operationId: "ListTasks3" + parameters: + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "query" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'." + required: false + type: "string" + x-exportParamName: "IdName" + x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskList" + /api/v1/tasks/{id.project}/{id.domain}/{id.name}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions." + operationId: "ListTasks" + parameters: + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTaskList" + /api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}: + get: + tags: + - "AdminService" + summary: "Fetch a :ref:`ref_flyteidl.admin.Task` definition." + operationId: "GetTask" + parameters: + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdVersion" + - name: "id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdResourceType" + x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminTask" + /api/v1/version: + get: + tags: + - "AdminService" + operationId: "GetVersion" + parameters: [] + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminGetVersionResponse" + /api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow}: + put: + tags: + - "AdminService" + summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project, domain and workflow." + operationId: "UpdateWorkflowAttributes2" + parameters: + - name: "attributes.org" + in: "path" + description: "Optional, org key applied to the attributes." + required: true + type: "string" + x-exportParamName: "AttributesOrg" + - name: "attributes.project" + in: "path" + description: "Unique project id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesProject" + - name: "attributes.domain" + in: "path" + description: "Unique domain id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesDomain" + - name: "attributes.workflow" + in: "path" + description: "Workflow name for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesWorkflow" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminWorkflowAttributesUpdateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowAttributesUpdateResponse" + /api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}: + get: + tags: + - "AdminService" + summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project, domain and workflow." + operationId: "GetWorkflowAttributes2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the attributes." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "workflow" + in: "path" + description: "Workflow name which this set of attributes references.\n+required" + required: true + type: "string" + x-exportParamName: "Workflow" + - name: "resource_type" + in: "query" + description: "Which type of matchable attributes to return.\n+required.\n\n\ + \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ + \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ + \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ + \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ + \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ + \ Configures default quality of service when undefined in an execution spec.\n\ + \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ + \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ + \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ + \ Controls how to select an available cluster on which this execution should\ + \ run." + required: false + type: "string" + default: "TASK_RESOURCE" + enum: + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowAttributesGetResponse" + delete: + tags: + - "AdminService" + summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project, domain and workflow." + operationId: "DeleteWorkflowAttributes2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the attributes." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "workflow" + in: "path" + description: "Workflow name which this set of attributes references.\n+required" + required: true + type: "string" + x-exportParamName: "Workflow" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminWorkflowAttributesDeleteRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowAttributesDeleteResponse" + /api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}: + put: + tags: + - "AdminService" + summary: "Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project, domain and workflow." + operationId: "UpdateWorkflowAttributes" + parameters: + - name: "attributes.project" + in: "path" + description: "Unique project id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesProject" + - name: "attributes.domain" + in: "path" + description: "Unique domain id for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesDomain" + - name: "attributes.workflow" + in: "path" + description: "Workflow name for which this set of attributes will be applied." + required: true + type: "string" + x-exportParamName: "AttributesWorkflow" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminWorkflowAttributesUpdateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowAttributesUpdateResponse" + /api/v1/workflow_attributes/{project}/{domain}/{workflow}: + get: + tags: + - "AdminService" + summary: "Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project, domain and workflow." + operationId: "GetWorkflowAttributes" + parameters: + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "workflow" + in: "path" + description: "Workflow name which this set of attributes references.\n+required" + required: true + type: "string" + x-exportParamName: "Workflow" + - name: "resource_type" + in: "query" + description: "Which type of matchable attributes to return.\n+required.\n\n\ + \ - TASK_RESOURCE: Applies to customizable task resource requests and limits.\n\ + \ - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster\ + \ resources.\n - EXECUTION_QUEUE: Configures task and dynamic task execution\ + \ queue assignment.\n - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster\ + \ label to be used for execution to be run\n - QUALITY_OF_SERVICE_SPECIFICATION:\ + \ Configures default quality of service when undefined in an execution spec.\n\ + \ - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior\ + \ for a given task type.\n - WORKFLOW_EXECUTION_CONFIG: Adds defaults for\ + \ customizable workflow-execution specifications and overrides.\n - CLUSTER_ASSIGNMENT:\ + \ Controls how to select an available cluster on which this execution should\ + \ run." + required: false + type: "string" + default: "TASK_RESOURCE" + enum: + - "TASK_RESOURCE" + - "CLUSTER_RESOURCE" + - "EXECUTION_QUEUE" + - "EXECUTION_CLUSTER_LABEL" + - "QUALITY_OF_SERVICE_SPECIFICATION" + - "PLUGIN_OVERRIDE" + - "WORKFLOW_EXECUTION_CONFIG" + - "CLUSTER_ASSIGNMENT" + x-exportParamName: "ResourceType" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the attributes." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowAttributesGetResponse" + delete: + tags: + - "AdminService" + summary: "Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`\ + \ for a project, domain and workflow." + operationId: "DeleteWorkflowAttributes" + parameters: + - name: "project" + in: "path" + description: "Unique project id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Unique domain id which this set of attributes references.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "workflow" + in: "path" + description: "Workflow name which this set of attributes references.\n+required" + required: true + type: "string" + x-exportParamName: "Workflow" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminWorkflowAttributesDeleteRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowAttributesDeleteResponse" + /api/v1/workflow_ids/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ + \ workflow objects." + operationId: "ListWorkflowIds" + parameters: + - name: "project" + in: "path" + description: "Name of the project that contains the identifiers.\n+required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Name of the domain the identifiers belongs to within the project.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "Org" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNamedEntityIdentifierList" + /api/v1/workflows: + post: + tags: + - "AdminService" + summary: "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition" + operationId: "CreateWorkflow" + parameters: + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminWorkflowCreateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowCreateResponse" + /api/v1/workflows/org/{id.org}: + post: + tags: + - "AdminService" + summary: "Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition" + operationId: "CreateWorkflow2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - in: "body" + name: "body" + required: true + schema: + $ref: "#/definitions/adminWorkflowCreateRequest" + x-exportParamName: "Body" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowCreateResponse" + /api/v1/workflows/org/{id.org}/{id.project}/{id.domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." + operationId: "ListWorkflows4" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "query" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'." + required: false + type: "string" + x-exportParamName: "IdName" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowList" + /api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." + operationId: "ListWorkflows2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" + required: true + type: "string" + x-exportParamName: "IdName" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowList" + /api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}: + get: + tags: + - "AdminService" + summary: "Fetch a :ref:`ref_flyteidl.admin.Workflow` definition." + operationId: "GetWorkflow2" + parameters: + - name: "id.org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "IdOrg" + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdVersion" + - name: "id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdResourceType" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflow" + /api/v1/workflows/org/{org}/{project}/{domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of\ + \ workflow objects." + operationId: "ListWorkflowIds2" + parameters: + - name: "org" + in: "path" + description: "Optional, org key applied to the resource." + required: true + type: "string" + x-exportParamName: "Org" + - name: "project" + in: "path" + description: "Name of the project that contains the identifiers.\n+required" + required: true + type: "string" + x-exportParamName: "Project" + - name: "domain" + in: "path" + description: "Name of the domain the identifiers belongs to within the project.\n\ + +required" + required: true + type: "string" + x-exportParamName: "Domain" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminNamedEntityIdentifierList" + /api/v1/workflows/{id.project}/{id.domain}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." + operationId: "ListWorkflows3" + parameters: + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "query" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'." + required: false + type: "string" + x-exportParamName: "IdName" + x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowList" + /api/v1/workflows/{id.project}/{id.domain}/{id.name}: + get: + tags: + - "AdminService" + summary: "Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions." + operationId: "ListWorkflows" + parameters: + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource.\nThe combination of project\ + \ + domain + name uniquely identifies the resource.\n+optional - in certain\ + \ contexts - like 'List API', 'Launch plans'" + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + - name: "limit" + in: "query" + description: "Indicates the number of resources to be returned.\n+required." + required: false + type: "integer" + format: "int64" + x-exportParamName: "Limit" + x-optionalDataType: "Int64" + - name: "token" + in: "query" + description: "In the case of multiple pages of results, this server-provided\ + \ token can be used to fetch the next page\nin a query.\n+optional." + required: false + type: "string" + x-exportParamName: "Token" + x-optionalDataType: "String" + - name: "filters" + in: "query" + description: "Indicates a list of filters passed as string.\nMore info on\ + \ constructing filters : \n+optional." + required: false + type: "string" + x-exportParamName: "Filters" + x-optionalDataType: "String" + - name: "sort_by.key" + in: "query" + description: "Indicates an attribute to sort the response values.\n+required." + required: false + type: "string" + x-exportParamName: "SortByKey" + x-optionalDataType: "String" + - name: "sort_by.direction" + in: "query" + description: "Indicates the direction to apply sort key for response values.\n\ + +optional.\n\n - DESCENDING: By default, fields are sorted in descending\ + \ order." + required: false + type: "string" + default: "DESCENDING" + enum: + - "DESCENDING" + - "ASCENDING" + x-exportParamName: "SortByDirection" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflowList" + /api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}: + get: + tags: + - "AdminService" + summary: "Fetch a :ref:`ref_flyteidl.admin.Workflow` definition." + operationId: "GetWorkflow" + parameters: + - name: "id.project" + in: "path" + description: "Name of the project the resource belongs to." + required: true + type: "string" + x-exportParamName: "IdProject" + - name: "id.domain" + in: "path" + description: "Name of the domain the resource belongs to.\nA domain can be\ + \ considered as a subset within a specific project." + required: true + type: "string" + x-exportParamName: "IdDomain" + - name: "id.name" + in: "path" + description: "User provided value for the resource." + required: true + type: "string" + x-exportParamName: "IdName" + - name: "id.version" + in: "path" + description: "Specific version of the resource." + required: true + type: "string" + x-exportParamName: "IdVersion" + - name: "id.resource_type" + in: "query" + description: "Identifies the specific type of resource that this identifier\ + \ corresponds to.\n\n - DATASET: A dataset represents an entity modeled\ + \ in Flyte DataCatalog. A Dataset is also a versioned entity and can be\ + \ a compilation of multiple individual objects.\nEventually all Catalog\ + \ objects should be modeled similar to Flyte Objects. The Dataset entities\ + \ makes it possible for the UI and CLI to act on the objects \nin a similar\ + \ manner to other Flyte objects" + required: false + type: "string" + default: "UNSPECIFIED" + enum: + - "UNSPECIFIED" + - "TASK" + - "WORKFLOW" + - "LAUNCH_PLAN" + - "DATASET" + x-exportParamName: "IdResourceType" + x-optionalDataType: "String" + - name: "id.org" + in: "query" + description: "Optional, org key applied to the resource." + required: false + type: "string" + x-exportParamName: "IdOrg" + x-optionalDataType: "String" + responses: + 200: + description: "A successful response." + schema: + $ref: "#/definitions/adminWorkflow" +definitions: + BlobTypeBlobDimensionality: + type: "string" + enum: + - "SINGLE" + - "MULTIPART" + default: "SINGLE" + CatalogReservationStatus: + type: "string" + description: "Indicates the status of a catalog reservation operation.\n\n - RESERVATION_DISABLED:\ + \ Used to indicate that reservations are disabled\n - RESERVATION_ACQUIRED:\ + \ Used to indicate that a reservation was successfully acquired or extended\n\ + \ - RESERVATION_EXISTS: Used to indicate that an active reservation currently\ + \ exists\n - RESERVATION_RELEASED: Used to indicate that the reservation has\ + \ been successfully released\n - RESERVATION_FAILURE: Used to indicate that\ + \ a reservation operation resulted in failure" + enum: + - "RESERVATION_DISABLED" + - "RESERVATION_ACQUIRED" + - "RESERVATION_EXISTS" + - "RESERVATION_RELEASED" + - "RESERVATION_FAILURE" + default: "RESERVATION_DISABLED" + ComparisonExpressionOperator: + type: "string" + title: "Binary Operator for each expression" + description: "- GT: Greater Than\n - LT: Less Than" + enum: + - "EQ" + - "NEQ" + - "GT" + - "GTE" + - "LT" + - "LTE" + default: "EQ" + ConjunctionExpressionLogicalOperator: + type: "string" + title: "Nested conditions. They can be conjoined using AND / OR\nOrder of evaluation\ + \ is not important as the operators are Commutative" + description: "- AND: Conjunction" + enum: + - "AND" + - "OR" + default: "AND" + ConnectionSetIdList: + type: "object" + properties: + ids: + type: "array" + items: + type: "string" + example: + ids: + - "ids" + - "ids" + ContainerArchitecture: + type: "string" + description: "Architecture-type the container image supports." + enum: + - "UNKNOWN" + - "AMD64" + - "ARM64" + - "ARM_V6" + - "ARM_V7" + default: "UNKNOWN" + DataLoadingConfigLiteralMapFormat: + type: "string" + title: "LiteralMapFormat decides the encoding format in which the input metadata\ + \ should be made available to the containers.\nIf the user has access to the\ + \ protocol buffer definitions, it is recommended to use the PROTO format.\n\ + JSON and YAML do not need any protobuf definitions to read it\nAll remote references\ + \ in core.LiteralMap are replaced with local filesystem references (the data\ + \ is downloaded to local filesystem)" + description: "- JSON: JSON / YAML for the metadata (which contains inlined primitive\ + \ values). The representation is inline with the standard json specification\ + \ as specified - https://www.json.org/json-en.html\n - PROTO: Proto is a serialized\ + \ binary of `core.LiteralMap` defined in flyteidl/core" + enum: + - "JSON" + - "YAML" + - "PROTO" + default: "JSON" + ExecutionErrorErrorKind: + type: "string" + title: "Error type: System or User" + enum: + - "UNKNOWN" + - "USER" + - "SYSTEM" + default: "UNKNOWN" + ExecutionMetadataExecutionMode: + type: "string" + description: "The method by which this execution was launched.\n\n - MANUAL: The\ + \ default execution mode, MANUAL implies that an execution was launched by an\ + \ individual.\n - SCHEDULED: A schedule triggered this execution launch.\n -\ + \ SYSTEM: A system process was responsible for launching this execution rather\ + \ an individual.\n - RELAUNCH: This execution was launched with identical inputs\ + \ as a previous execution.\n - CHILD_WORKFLOW: This execution was triggered\ + \ by another execution.\n - RECOVERED: This execution was recovered from another\ + \ execution." + enum: + - "MANUAL" + - "SCHEDULED" + - "SYSTEM" + - "RELAUNCH" + - "CHILD_WORKFLOW" + - "RECOVERED" + default: "MANUAL" + IOStrategyDownloadMode: + type: "string" + title: "Mode to use for downloading" + description: "- DOWNLOAD_EAGER: All data will be downloaded before the main container\ + \ is executed\n - DOWNLOAD_STREAM: Data will be downloaded as a stream and an\ + \ End-Of-Stream marker will be written to indicate all data has been downloaded.\ + \ Refer to protocol for details\n - DO_NOT_DOWNLOAD: Large objects (offloaded)\ + \ will not be downloaded" + enum: + - "DOWNLOAD_EAGER" + - "DOWNLOAD_STREAM" + - "DO_NOT_DOWNLOAD" + default: "DOWNLOAD_EAGER" + IOStrategyUploadMode: + type: "string" + title: "Mode to use for uploading" + description: "- UPLOAD_ON_EXIT: All data will be uploaded after the main container\ + \ exits\n - UPLOAD_EAGER: Data will be uploaded as it appears. Refer to protocol\ + \ specification for details\n - DO_NOT_UPLOAD: Data will not be uploaded, only\ + \ references will be written" + enum: + - "UPLOAD_ON_EXIT" + - "UPLOAD_EAGER" + - "DO_NOT_UPLOAD" + default: "UPLOAD_ON_EXIT" + PluginOverrideMissingPluginBehavior: + type: "string" + description: " - FAIL: By default, if this plugin is not enabled for a Flyte deployment\ + \ then execution will fail.\n - USE_DEFAULT: Uses the system-configured default\ + \ implementation." + enum: + - "FAIL" + - "USE_DEFAULT" + default: "FAIL" + ProjectProjectState: + type: "string" + description: "The state of the project is used to control its visibility in the\ + \ UI and validity.\n\n - ACTIVE: By default, all projects are considered active.\n\ + \ - ARCHIVED: Archived projects are no longer visible in the UI and no longer\ + \ valid.\n - SYSTEM_GENERATED: System generated projects that aren't explicitly\ + \ created or managed by a user." + enum: + - "ACTIVE" + - "ARCHIVED" + - "SYSTEM_GENERATED" + default: "ACTIVE" + QualityOfServiceTier: + type: "string" + description: " - UNDEFINED: Default: no quality of service specified." + enum: + - "UNDEFINED" + - "HIGH" + - "MEDIUM" + - "LOW" + default: "UNDEFINED" + ResourcesResourceEntry: + type: "object" + properties: + name: + description: "Resource name." + $ref: "#/definitions/ResourcesResourceName" + value: + type: "string" + title: "Value must be a valid k8s quantity. See\nhttps://github.com/kubernetes/apimachinery/blob/master/pkg/api/resource/quantity.go#L30-L80" + description: "Encapsulates a resource name and value." + example: + name: {} + value: "value" + ResourcesResourceName: + type: "string" + description: "Known resource names.\n\n - EPHEMERAL_STORAGE: For Kubernetes-based\ + \ deployments, pods use ephemeral local storage for scratch space, caching,\ + \ and for logs." + enum: + - "UNKNOWN" + - "CPU" + - "GPU" + - "MEMORY" + - "STORAGE" + - "EPHEMERAL_STORAGE" + default: "UNKNOWN" + RuntimeMetadataRuntimeType: + type: "string" + enum: + - "OTHER" + - "FLYTE_SDK" + default: "OTHER" + SchemaColumnSchemaColumnType: + type: "string" + enum: + - "INTEGER" + - "FLOAT" + - "STRING" + - "BOOLEAN" + - "DATETIME" + - "DURATION" + default: "INTEGER" + SchemaTypeSchemaColumn: + type: "object" + properties: + name: + type: "string" + title: "A unique name -within the schema type- for the column" + type: + description: "The column type. This allows a limited set of types currently." + $ref: "#/definitions/SchemaColumnSchemaColumnType" + example: + name: "name" + type: {} + SecretMountType: + type: "string" + description: " - ANY: Default case, indicates the client can tolerate either mounting\ + \ options.\n - ENV_VAR: ENV_VAR indicates the secret needs to be mounted as\ + \ an environment variable.\n - FILE: FILE indicates the secret needs to be mounted\ + \ as a file." + enum: + - "ANY" + - "ENV_VAR" + - "FILE" + default: "ANY" + SortDirection: + type: "string" + description: " - DESCENDING: By default, fields are sorted in descending order." + enum: + - "DESCENDING" + - "ASCENDING" + default: "DESCENDING" + SqlDialect: + type: "string" + description: "The dialect of the SQL statement. This is used to validate and parse\ + \ SQL statements at compilation time to avoid\nexpensive runtime operations.\ + \ If set to an unsupported dialect, no validation will be done on the statement.\n\ + We support the following dialect: ansi, hive." + enum: + - "UNDEFINED" + - "ANSI" + - "HIVE" + - "OTHER" + default: "UNDEFINED" + StructuredDatasetTypeDatasetColumn: + type: "object" + properties: + name: + type: "string" + description: "A unique name within the schema type for the column." + literal_type: + description: "The column type." + $ref: "#/definitions/coreLiteralType" + example: + name: "name" + TaskExecutionMetadataInstanceClass: + type: "string" + description: "Includes the broad category of machine used for this specific task\ + \ execution.\n\n - DEFAULT: The default instance class configured for the flyte\ + \ application platform.\n - INTERRUPTIBLE: The instance class configured for\ + \ interruptible tasks." + enum: + - "DEFAULT" + - "INTERRUPTIBLE" + default: "DEFAULT" + TaskLogMessageFormat: + type: "string" + enum: + - "UNKNOWN" + - "CSV" + - "JSON" + default: "UNKNOWN" + WorkflowMetadataOnFailurePolicy: + type: "string" + title: "Failure Handling Strategy" + description: "- FAIL_IMMEDIATELY: FAIL_IMMEDIATELY instructs the system to fail\ + \ as soon as a node fails in the workflow. It'll automatically\nabort all currently\ + \ running nodes and clean up resources before finally marking the workflow executions\ + \ as\nfailed.\n - FAIL_AFTER_EXECUTABLE_NODES_COMPLETE: FAIL_AFTER_EXECUTABLE_NODES_COMPLETE\ + \ instructs the system to make as much progress as it can. The system will\n\ + not alter the dependencies of the execution graph so any node that depend on\ + \ the failed node will not be run.\nOther nodes that will be executed to completion\ + \ before cleaning up resources and marking the workflow\nexecution as failed." + enum: + - "FAIL_IMMEDIATELY" + - "FAIL_AFTER_EXECUTABLE_NODES_COMPLETE" + default: "FAIL_IMMEDIATELY" + adminAbortMetadata: + type: "object" + properties: + cause: + type: "string" + description: "In the case of a user-specified abort, this will pass along\ + \ the user-supplied cause." + principal: + type: "string" + title: "Identifies the entity (if any) responsible for terminating the execution" + description: "Specifies metadata around an aborted workflow execution." + example: + principal: "principal" + cause: "cause" + adminAnnotations: + type: "object" + properties: + values: + type: "object" + description: "Map of custom annotations to be applied to the execution resource." + additionalProperties: + type: "string" + description: "Annotation values to be applied to an execution resource.\nIn the\ + \ future a mode (e.g. OVERRIDE, APPEND, etc) can be defined\nto specify how\ + \ to merge annotations defined at registration and execution time." + example: + values: + key: "values" + adminAuth: + type: "object" + properties: + assumable_iam_role: + type: "string" + description: "Defines an optional iam role which will be used for tasks run\ + \ in executions created with this launch plan." + kubernetes_service_account: + type: "string" + description: "Defines an optional kubernetes service account which will be\ + \ used for tasks run in executions created with this launch plan." + description: "Defines permissions associated with executions created by this launch\ + \ plan spec.\nUse either of these roles when they have permissions required\ + \ by your workflow execution.\nDeprecated." + example: + kubernetes_service_account: "kubernetes_service_account" + assumable_iam_role: "assumable_iam_role" + adminAuthRole: + type: "object" + properties: + assumable_iam_role: + type: "string" + description: "Defines an optional iam role which will be used for tasks run\ + \ in executions created with this launch plan." + kubernetes_service_account: + type: "string" + description: "Defines an optional kubernetes service account which will be\ + \ used for tasks run in executions created with this launch plan." + description: "Defines permissions associated with executions created by this launch\ + \ plan spec.\nUse either of these roles when they have permissions required\ + \ by your workflow execution.\nDeprecated." + example: + kubernetes_service_account: "kubernetes_service_account" + assumable_iam_role: "assumable_iam_role" + adminClusterAssignment: + type: "object" + properties: + cluster_pool_name: + type: "string" + description: "Encapsulates specifications for routing an execution onto a specific\ + \ cluster." + example: + cluster_pool_name: "cluster_pool_name" + adminClusterResourceAttributes: + type: "object" + properties: + attributes: + type: "object" + description: "Custom resource attributes which will be applied in cluster\ + \ resource creation (e.g. quotas).\nMap keys are the *case-sensitive* names\ + \ of variables in templatized resource files.\nMap values should be the\ + \ custom values which get substituted during resource creation." + additionalProperties: + type: "string" + example: + attributes: + key: "attributes" + adminCronSchedule: + type: "object" + properties: + schedule: + type: "string" + title: "Standard/default cron implementation as described by https://en.wikipedia.org/wiki/Cron#CRON_expression;\n\ + Also supports nonstandard predefined scheduling definitions\nas described\ + \ by https://docs.aws.amazon.com/AmazonCloudWatch/latest/events/ScheduledEvents.html#CronExpressions\n\ + except @reboot" + offset: + type: "string" + title: "ISO 8601 duration as described by https://en.wikipedia.org/wiki/ISO_8601#Durations" + description: "Options for schedules to run according to a cron expression." + example: + schedule: "schedule" + offset: "offset" + adminDescription: + type: "object" + properties: + value: + type: "string" + title: "long description - no more than 4KB" + uri: + type: "string" + title: "if the description sizes exceed some threshold we can offload the\ + \ entire\ndescription proto altogether to an external data store, like S3\ + \ rather than store inline in the db" + format: + title: "Format of the long description" + $ref: "#/definitions/adminDescriptionFormat" + icon_link: + type: "string" + title: "Optional link to an icon for the entity" + description: "Full user description with formatting preserved. This can be rendered\n\ + by clients, such as the console or command line tools with in-tact\nformatting." + example: + format: {} + icon_link: "icon_link" + value: "value" + uri: "uri" + adminDescriptionEntity: + type: "object" + properties: + id: + description: "id represents the unique identifier of the description entity." + $ref: "#/definitions/coreIdentifier" + short_description: + type: "string" + description: "One-liner overview of the entity." + long_description: + description: "Full user description with formatting preserved." + $ref: "#/definitions/adminDescription" + source_code: + description: "Optional link to source code used to define this entity." + $ref: "#/definitions/adminSourceCode" + tags: + type: "array" + description: "User-specified tags. These are arbitrary and can be used for\ + \ searching\nfiltering and discovering tasks." + items: + type: "string" + description: "DescriptionEntity contains detailed description for the task/workflow.\n\ + Documentation could provide insight into the algorithms, business use case,\ + \ etc." + example: + short_description: "short_description" + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + long_description: + format: {} + icon_link: "icon_link" + value: "value" + uri: "uri" + source_code: + link: "link" + tags: + - "tags" + - "tags" + adminDescriptionEntityList: + type: "object" + properties: + descriptionEntities: + type: "array" + description: "A list of DescriptionEntities returned based on the request." + items: + $ref: "#/definitions/adminDescriptionEntity" + token: + type: "string" + description: "In the case of multiple pages of results, the server-provided\ + \ token can be used to fetch the next page\nin a query. If there are no\ + \ more results, this value will be empty." + title: "Represents a list of DescriptionEntities returned from the admin.\nSee\ + \ :ref:`ref_flyteidl.admin.DescriptionEntity` for more details" + example: + descriptionEntities: + - short_description: "short_description" + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + long_description: + format: {} + icon_link: "icon_link" + value: "value" + uri: "uri" + source_code: + link: "link" + tags: + - "tags" + - "tags" + - short_description: "short_description" + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + long_description: + format: {} + icon_link: "icon_link" + value: "value" + uri: "uri" + source_code: + link: "link" + tags: + - "tags" + - "tags" + token: "token" + adminDescriptionFormat: + type: "string" + title: "The format of the long description" + description: "- DESCRIPTION_FORMAT_RST: python default documentation - comments\ + \ is rst" + enum: + - "DESCRIPTION_FORMAT_UNKNOWN" + - "DESCRIPTION_FORMAT_MARKDOWN" + - "DESCRIPTION_FORMAT_HTML" + - "DESCRIPTION_FORMAT_RST" + default: "DESCRIPTION_FORMAT_UNKNOWN" + adminDomain: + type: "object" + properties: + id: + type: "string" + description: "Globally unique domain name." + name: + type: "string" + description: "Display name." + description: "Namespace within a project commonly used to differentiate between\ + \ different service instances.\ne.g. \"production\", \"development\", etc." + example: + name: "name" + id: "id" + adminDynamicNodeWorkflowResponse: + type: "object" + properties: + compiled_workflow: + $ref: "#/definitions/coreCompiledWorkflowClosure" + example: + compiled_workflow: + sub_workflows: + - template: + outputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + metadata: + on_failure: {} + quality_of_service: + tier: {} + spec: + queueing_budget: "queueing_budget" + tags: + key: "tags" + failure_node: + branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + nodes: + - branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + - branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + metadata_defaults: + interruptible: true + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + interface: + outputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + inputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + connections: + upstream: + key: + ids: + - "ids" + - "ids" + downstream: + key: + ids: + - "ids" + - "ids" + - template: + outputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + metadata: + on_failure: {} + quality_of_service: + tier: {} + spec: + queueing_budget: "queueing_budget" + tags: + key: "tags" + failure_node: + branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + nodes: + - branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + - branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + metadata_defaults: + interruptible: true + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + interface: + outputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + inputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + connections: + upstream: + key: + ids: + - "ids" + - "ids" + downstream: + key: + ids: + - "ids" + - "ids" + tasks: + - template: + container: + args: + - "args" + - "args" + image: "image" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + data_config: + io_strategy: + upload_mode: {} + download_mode: {} + format: {} + output_path: "output_path" + enabled: true + input_path: "input_path" + env: + - value: "value" + key: "key" + - value: "value" + key: "key" + ports: + - container_port: 2 + - container_port: 2 + config: + - value: "value" + key: "key" + - value: "value" + key: "key" + command: + - "command" + - "command" + architecture: {} + metadata: + retries: + retries: 0 + pod_template_name: "pod_template_name" + discoverable: true + runtime: + flavor: "flavor" + type: {} + version: "version" + cache_serializable: true + discovery_version: "discovery_version" + deprecated_error_message: "deprecated_error_message" + interruptible: true + timeout: "timeout" + generates_deck: true + cache_ignore_input_vars: + - "cache_ignore_input_vars" + - "cache_ignore_input_vars" + tags: + key: "tags" + task_type_version: 7 + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + custom: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + k8s_pod: + metadata: + annotations: + key: "annotations" + labels: + key: "labels" + pod_spec: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + data_config: + io_strategy: + upload_mode: {} + download_mode: {} + format: {} + output_path: "output_path" + enabled: true + input_path: "input_path" + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + type: "type" + interface: + outputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + inputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + config: + key: "config" + security_context: + run_as: + execution_identity: "execution_identity" + iam_role: "iam_role" + oauth2_client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + k8s_service_account: "k8s_service_account" + tokens: + - idp_discovery_endpoint: "idp_discovery_endpoint" + name: "name" + client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + type: {} + token_endpoint: "token_endpoint" + - idp_discovery_endpoint: "idp_discovery_endpoint" + name: "name" + client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + type: {} + token_endpoint: "token_endpoint" + secrets: + - mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + - mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + sql: + dialect: {} + statement: "statement" + - template: + container: + args: + - "args" + - "args" + image: "image" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + data_config: + io_strategy: + upload_mode: {} + download_mode: {} + format: {} + output_path: "output_path" + enabled: true + input_path: "input_path" + env: + - value: "value" + key: "key" + - value: "value" + key: "key" + ports: + - container_port: 2 + - container_port: 2 + config: + - value: "value" + key: "key" + - value: "value" + key: "key" + command: + - "command" + - "command" + architecture: {} + metadata: + retries: + retries: 0 + pod_template_name: "pod_template_name" + discoverable: true + runtime: + flavor: "flavor" + type: {} + version: "version" + cache_serializable: true + discovery_version: "discovery_version" + deprecated_error_message: "deprecated_error_message" + interruptible: true + timeout: "timeout" + generates_deck: true + cache_ignore_input_vars: + - "cache_ignore_input_vars" + - "cache_ignore_input_vars" + tags: + key: "tags" + task_type_version: 7 + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + custom: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + k8s_pod: + metadata: + annotations: + key: "annotations" + labels: + key: "labels" + pod_spec: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + data_config: + io_strategy: + upload_mode: {} + download_mode: {} + format: {} + output_path: "output_path" + enabled: true + input_path: "input_path" + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + type: "type" + interface: + outputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + inputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + config: + key: "config" + security_context: + run_as: + execution_identity: "execution_identity" + iam_role: "iam_role" + oauth2_client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + k8s_service_account: "k8s_service_account" + tokens: + - idp_discovery_endpoint: "idp_discovery_endpoint" + name: "name" + client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + type: {} + token_endpoint: "token_endpoint" + - idp_discovery_endpoint: "idp_discovery_endpoint" + name: "name" + client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + type: {} + token_endpoint: "token_endpoint" + secrets: + - mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + - mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + sql: + dialect: {} + statement: "statement" + primary: + template: + outputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + metadata: + on_failure: {} + quality_of_service: + tier: {} + spec: + queueing_budget: "queueing_budget" + tags: + key: "tags" + failure_node: + branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + nodes: + - branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + - branch_node: + if_else: + other: + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + - condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + error: + message: "message" + failed_node_id: "failed_node_id" + case: + condition: + conjunction: + operator: {} + comparison: + left_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + right_value: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + var: "var" + operator: {} + array_node: + min_successes: 5 + parallelism: 1 + min_success_ratio: 5.637377 + metadata: + retries: + retries: 0 + name: "name" + interruptible: true + timeout: "timeout" + gate_node: + sleep: + duration: "duration" + approve: + signal_id: "signal_id" + signal: + output_variable_name: "output_variable_name" + signal_id: "signal_id" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + upstream_node_ids: + - "upstream_node_ids" + - "upstream_node_ids" + inputs: + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + - var: "var" + binding: + scalar: + schema: + type: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + uri: "uri" + blob: + metadata: + type: + dimensionality: {} + format: "format" + uri: "uri" + none_type: {} + primitive: + duration: "duration" + datetime: "2000-01-23T04:56:07.000+00:00" + string_value: "string_value" + boolean: true + float_value: 5.962133916683182 + integer: "integer" + binary: + tag: "tag" + value: "value" + structured_dataset: + metadata: + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + uri: "uri" + union: + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + error: + message: "message" + failed_node_id: "failed_node_id" + generic: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + promise: + attr_path: + - string_value: "string_value" + int_value: 6 + - string_value: "string_value" + int_value: 6 + var: "var" + node_id: "node_id" + collection: + bindings: + - null + - null + union: + targetType: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + map: + bindings: {} + output_aliases: + - var: "var" + alias: "alias" + - var: "var" + alias: "alias" + task_node: + reference_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + overrides: + extended_resources: + gpu_accelerator: + partition_size: "partition_size" + unpartitioned: true + device: "device" + resources: + requests: + - name: {} + value: "value" + - name: {} + value: "value" + limits: + - name: {} + value: "value" + - name: {} + value: "value" + id: "id" + workflow_node: + launchplan_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + sub_workflow_ref: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + metadata_defaults: + interruptible: true + id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + interface: + outputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + inputs: + variables: + key: + description: "description" + artifact_partial_id: + partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + type: + schema: + columns: + - name: "name" + type: {} + - name: "name" + type: {} + annotation: + annotations: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + structured_dataset_type: + external_schema_type: "external_schema_type" + columns: + - name: "name" + - name: "name" + format: "format" + external_schema_bytes: "external_schema_bytes" + metadata: + fields: + key: + list_value: + values: + - null + - null + number_value: 6.027456183070403 + string_value: "string_value" + null_value: {} + bool_value: true + blob: + dimensionality: {} + format: "format" + enum_type: + values: + - "values" + - "values" + union_type: + variants: + - null + - null + simple: {} + structure: + dataclass_type: {} + tag: "tag" + artifact_tag: + artifact_key: + domain: "domain" + name: "name" + project: "project" + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + connections: + upstream: + key: + ids: + - "ids" + - "ids" + downstream: + key: + ids: + - "ids" + - "ids" adminEmailNotification: type: "object" properties: @@ -3441,6 +19870,7 @@ definitions: \ Get and List execution requests." example: id: + org: "org" domain: "domain" name: "name" project: "project" @@ -3451,6 +19881,7 @@ definitions: uri: "uri" phase: {} workflow_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -3514,11 +19945,13 @@ definitions: principal: "principal" parent_node_execution: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" reference_execution: + org: "org" domain: "domain" name: "name" project: "project" @@ -3532,12 +19965,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" - partitions: value: key: @@ -3547,12 +19993,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" scheduled_at: "2000-01-23T04:56:07.000+00:00" nesting: 0 system_metadata: @@ -3579,6 +20038,7 @@ definitions: - "tags" - "tags" launch_plan: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -3738,6 +20198,7 @@ definitions: uri: "uri" phase: {} workflow_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -3825,6 +20286,9 @@ definitions: \ be\nincluded in this map. If not required and not provided, defaults apply.\n\ +optional" $ref: "#/definitions/coreLiteralMap" + org: + type: "string" + description: "Optional, org key applied to the resource." description: "Request to launch an execution with the given project, domain and\ \ optionally-assigned name." adminExecutionCreateResponse: @@ -3837,6 +20301,7 @@ definitions: \ a generated name." example: id: + org: "org" domain: "domain" name: "name" project: "project" @@ -3857,6 +20322,270 @@ definitions: example: executions: - id: + org: "org" + domain: "domain" + name: "name" + project: "project" + closure: + outputs: + values: + literals: {} + uri: "uri" + phase: {} + workflow_id: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + created_at: "2000-01-23T04:56:07.000+00:00" + state_change_details: + occurred_at: "2000-01-23T04:56:07.000+00:00" + principal: "principal" + state: {} + error: + code: "code" + kind: {} + message: "message" + error_uri: "error_uri" + duration: "duration" + computed_inputs: + literals: {} + abort_metadata: + principal: "principal" + cause: "cause" + updated_at: "2000-01-23T04:56:07.000+00:00" + started_at: "2000-01-23T04:56:07.000+00:00" + abort_cause: "abort_cause" + output_data: + literals: {} + notifications: + - pager_duty: + recipients_email: + - "recipients_email" + - "recipients_email" + slack: + recipients_email: + - "recipients_email" + - "recipients_email" + phases: + - {} + - {} + email: + recipients_email: + - "recipients_email" + - "recipients_email" + - pager_duty: + recipients_email: + - "recipients_email" + - "recipients_email" + slack: + recipients_email: + - "recipients_email" + - "recipients_email" + phases: + - {} + - {} + email: + recipients_email: + - "recipients_email" + - "recipients_email" + spec: + metadata: + mode: {} + principal: "principal" + parent_node_execution: + execution_id: + org: "org" + domain: "domain" + name: "name" + project: "project" + node_id: "node_id" + reference_execution: + org: "org" + domain: "domain" + name: "name" + project: "project" + artifact_ids: + - partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + - partitions: + value: + key: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + artifact_key: + domain: "domain" + name: "name" + project: "project" + version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" + scheduled_at: "2000-01-23T04:56:07.000+00:00" + nesting: 0 + system_metadata: + execution_cluster: "execution_cluster" + namespace: "namespace" + disable_all: true + inputs: + literals: {} + annotations: + values: + key: "values" + max_parallelism: 6 + envs: + values: + - value: "value" + key: "key" + - value: "value" + key: "key" + interruptible: true + labels: + values: + key: "values" + tags: + - "tags" + - "tags" + launch_plan: + org: "org" + domain: "domain" + resource_type: {} + name: "name" + project: "project" + version: "version" + quality_of_service: + tier: {} + spec: + queueing_budget: "queueing_budget" + overwrite_cache: true + raw_output_data_config: + output_location_prefix: "output_location_prefix" + cluster_assignment: + cluster_pool_name: "cluster_pool_name" + notifications: + notifications: + - pager_duty: + recipients_email: + - "recipients_email" + - "recipients_email" + slack: + recipients_email: + - "recipients_email" + - "recipients_email" + phases: + - {} + - {} + email: + recipients_email: + - "recipients_email" + - "recipients_email" + - pager_duty: + recipients_email: + - "recipients_email" + - "recipients_email" + slack: + recipients_email: + - "recipients_email" + - "recipients_email" + phases: + - {} + - {} + email: + recipients_email: + - "recipients_email" + - "recipients_email" + security_context: + run_as: + execution_identity: "execution_identity" + iam_role: "iam_role" + oauth2_client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + k8s_service_account: "k8s_service_account" + tokens: + - idp_discovery_endpoint: "idp_discovery_endpoint" + name: "name" + client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + type: {} + token_endpoint: "token_endpoint" + - idp_discovery_endpoint: "idp_discovery_endpoint" + name: "name" + client: + client_secret: + mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + client_id: "client_id" + type: {} + token_endpoint: "token_endpoint" + secrets: + - mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + - mount_requirement: {} + group_version: "group_version" + key: "key" + group: "group" + auth_role: + kubernetes_service_account: "kubernetes_service_account" + assumable_iam_role: "assumable_iam_role" + - id: + org: "org" domain: "domain" name: "name" project: "project" @@ -3867,6 +20596,7 @@ definitions: uri: "uri" phase: {} workflow_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -3930,11 +20660,13 @@ definitions: principal: "principal" parent_node_execution: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" reference_execution: + org: "org" domain: "domain" name: "name" project: "project" @@ -3948,229 +20680,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" - - partitions: + time_partition: value: - key: - input_binding: - var: "var" - static_value: "static_value" - triggered_binding: - transform: "transform" - partition_key: "partition_key" - index: 1 - artifact_key: - domain: "domain" - name: "name" - project: "project" - version: "version" - scheduled_at: "2000-01-23T04:56:07.000+00:00" - nesting: 0 - system_metadata: - execution_cluster: "execution_cluster" - namespace: "namespace" - disable_all: true - inputs: - literals: {} - annotations: - values: - key: "values" - max_parallelism: 6 - envs: - values: - - value: "value" - key: "key" - - value: "value" - key: "key" - interruptible: true - labels: - values: - key: "values" - tags: - - "tags" - - "tags" - launch_plan: - domain: "domain" - resource_type: {} - name: "name" - project: "project" - version: "version" - quality_of_service: - tier: {} - spec: - queueing_budget: "queueing_budget" - overwrite_cache: true - raw_output_data_config: - output_location_prefix: "output_location_prefix" - cluster_assignment: - cluster_pool_name: "cluster_pool_name" - notifications: - notifications: - - pager_duty: - recipients_email: - - "recipients_email" - - "recipients_email" - slack: - recipients_email: - - "recipients_email" - - "recipients_email" - phases: - - {} - - {} - email: - recipients_email: - - "recipients_email" - - "recipients_email" - - pager_duty: - recipients_email: - - "recipients_email" - - "recipients_email" - slack: - recipients_email: - - "recipients_email" - - "recipients_email" - phases: - - {} - - {} - email: - recipients_email: - - "recipients_email" - - "recipients_email" - security_context: - run_as: - execution_identity: "execution_identity" - iam_role: "iam_role" - oauth2_client: - client_secret: - mount_requirement: {} - group_version: "group_version" - key: "key" - group: "group" - client_id: "client_id" - k8s_service_account: "k8s_service_account" - tokens: - - idp_discovery_endpoint: "idp_discovery_endpoint" - name: "name" - client: - client_secret: - mount_requirement: {} - group_version: "group_version" - key: "key" - group: "group" - client_id: "client_id" - type: {} - token_endpoint: "token_endpoint" - - idp_discovery_endpoint: "idp_discovery_endpoint" - name: "name" - client: - client_secret: - mount_requirement: {} - group_version: "group_version" - key: "key" - group: "group" - client_id: "client_id" - type: {} - token_endpoint: "token_endpoint" - secrets: - - mount_requirement: {} - group_version: "group_version" - key: "key" - group: "group" - - mount_requirement: {} - group_version: "group_version" - key: "key" - group: "group" - auth_role: - kubernetes_service_account: "kubernetes_service_account" - assumable_iam_role: "assumable_iam_role" - - id: - domain: "domain" - name: "name" - project: "project" - closure: - outputs: - values: - literals: {} - uri: "uri" - phase: {} - workflow_id: - domain: "domain" - resource_type: {} - name: "name" - project: "project" - version: "version" - created_at: "2000-01-23T04:56:07.000+00:00" - state_change_details: - occurred_at: "2000-01-23T04:56:07.000+00:00" - principal: "principal" - state: {} - error: - code: "code" - kind: {} - message: "message" - error_uri: "error_uri" - duration: "duration" - computed_inputs: - literals: {} - abort_metadata: - principal: "principal" - cause: "cause" - updated_at: "2000-01-23T04:56:07.000+00:00" - started_at: "2000-01-23T04:56:07.000+00:00" - abort_cause: "abort_cause" - output_data: - literals: {} - notifications: - - pager_duty: - recipients_email: - - "recipients_email" - - "recipients_email" - slack: - recipients_email: - - "recipients_email" - - "recipients_email" - phases: - - {} - - {} - email: - recipients_email: - - "recipients_email" - - "recipients_email" - - pager_duty: - recipients_email: - - "recipients_email" - - "recipients_email" - slack: - recipients_email: - - "recipients_email" - - "recipients_email" - phases: - - {} - - {} - email: - recipients_email: - - "recipients_email" - - "recipients_email" - spec: - metadata: - mode: {} - principal: "principal" - parent_node_execution: - execution_id: - domain: "domain" - name: "name" - project: "project" - node_id: "node_id" - reference_execution: - domain: "domain" - name: "name" - project: "project" - artifact_ids: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" - partitions: value: key: @@ -4180,27 +20708,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" - - partitions: + time_partition: value: - key: - input_binding: - var: "var" - static_value: "static_value" - triggered_binding: - transform: "transform" - partition_key: "partition_key" - index: 1 - artifact_key: - domain: "domain" - name: "name" - project: "project" - version: "version" + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" scheduled_at: "2000-01-23T04:56:07.000+00:00" nesting: 0 system_metadata: @@ -4227,6 +20753,7 @@ definitions: - "tags" - "tags" launch_plan: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -4370,11 +20897,13 @@ definitions: principal: "principal" parent_node_execution: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" reference_execution: + org: "org" domain: "domain" name: "name" project: "project" @@ -4388,12 +20917,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" - partitions: value: key: @@ -4403,12 +20945,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" scheduled_at: "2000-01-23T04:56:07.000+00:00" nesting: 0 system_metadata: @@ -4546,11 +21101,13 @@ definitions: principal: "principal" parent_node_execution: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" reference_execution: + org: "org" domain: "domain" name: "name" project: "project" @@ -4564,12 +21121,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" - partitions: value: key: @@ -4579,12 +21149,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" scheduled_at: "2000-01-23T04:56:07.000+00:00" nesting: 0 system_metadata: @@ -4611,6 +21194,7 @@ definitions: - "tags" - "tags" launch_plan: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -4843,6 +21427,7 @@ definitions: \ have a default value for said input." example: id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -4863,12 +21448,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -4933,7 +21531,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" updated_at: "2000-01-23T04:56:07.000+00:00" created_at: "2000-01-23T04:56:07.000+00:00" state: {} @@ -5065,12 +21665,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -5135,11 +21748,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -5151,12 +21767,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -5170,7 +21799,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -5181,15 +21812,29 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true spec: workflow_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -5394,12 +22039,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -5464,11 +22122,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -5480,12 +22141,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -5499,7 +22173,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -5510,12 +22186,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true security_context: run_as: @@ -5605,12 +22294,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -5675,7 +22377,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" updated_at: "2000-01-23T04:56:07.000+00:00" created_at: "2000-01-23T04:56:07.000+00:00" state: {} @@ -5807,12 +22511,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -5877,11 +22594,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -5893,12 +22613,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -5912,7 +22645,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -5923,12 +22658,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true adminLaunchPlanCreateRequest: type: "object" @@ -5964,6 +22712,7 @@ definitions: example: launch_plans: - id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -5984,12 +22733,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -6054,7 +22816,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" updated_at: "2000-01-23T04:56:07.000+00:00" created_at: "2000-01-23T04:56:07.000+00:00" state: {} @@ -6186,12 +22950,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -6256,11 +23033,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -6272,12 +23052,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -6291,7 +23084,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -6302,15 +23097,29 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true spec: workflow_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -6515,12 +23324,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -6585,11 +23407,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -6601,12 +23426,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -6620,7 +23458,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -6631,12 +23471,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true security_context: run_as: @@ -6686,6 +23539,7 @@ definitions: kubernetes_service_account: "kubernetes_service_account" assumable_iam_role: "assumable_iam_role" - id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -6706,12 +23560,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -6776,7 +23643,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" updated_at: "2000-01-23T04:56:07.000+00:00" created_at: "2000-01-23T04:56:07.000+00:00" state: {} @@ -6908,12 +23777,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -6978,11 +23860,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -6994,12 +23879,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -7013,7 +23911,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -7024,15 +23924,29 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true spec: workflow_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -7237,12 +24151,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -7307,11 +24234,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -7323,12 +24253,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -7342,7 +24285,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -7353,12 +24298,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true security_context: run_as: @@ -7539,6 +24497,7 @@ definitions: description: "User-provided launch plan definition and configuration values." example: workflow_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -7743,12 +24702,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -7813,11 +24785,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -7829,12 +24804,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -7848,7 +24836,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -7859,12 +24849,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true security_context: run_as: @@ -7953,6 +24956,7 @@ definitions: configurations: - launch_plan: "launch_plan" workflow: "workflow" + org: "org" domain: "domain" project: "project" attributes: @@ -8060,6 +25064,7 @@ definitions: task_type: "task_type" - launch_plan: "launch_plan" workflow: "workflow" + org: "org" domain: "domain" project: "project" attributes: @@ -8193,13 +25198,18 @@ definitions: type: "string" launch_plan: type: "string" - description: "Represents a custom set of attributes applied for either a domain;\ - \ a domain and project; or\ndomain, project and workflow name.\nThese are used\ - \ to override system level defaults for kubernetes cluster resource management,\n\ - default execution values, and more all across different levels of specificity." + org: + type: "string" + description: "Optional, org key applied to the resource." + description: "Represents a custom set of attributes applied for either a domain\ + \ (and optional org); a domain and project (and optional org);\nor domain, project\ + \ and workflow name (and optional org).\nThese are used to override system level\ + \ defaults for kubernetes cluster resource management,\ndefault execution values,\ + \ and more all across different levels of specificity." example: launch_plan: "launch_plan" workflow: "workflow" + org: "org" domain: "domain" project: "project" attributes: @@ -8473,6 +25483,7 @@ definitions: state: {} resource_type: {} id: + org: "org" domain: "domain" name: "name" project: "project" @@ -8491,11 +25502,15 @@ definitions: title: "User provided value for the resource.\nThe combination of project\ \ + domain + name uniquely identifies the resource.\n+optional - in certain\ \ contexts - like 'List API', 'Launch plans'" + org: + type: "string" + description: "Optional, org key applied to the resource." description: "Encapsulation of fields that identifies a Flyte resource.\nA Flyte\ \ resource can be a task, workflow or launch plan.\nA resource can internally\ \ have multiple versions and is uniquely identified\nby project, domain, and\ \ name." example: + org: "org" domain: "domain" name: "name" project: "project" @@ -8515,10 +25530,12 @@ definitions: description: "Represents a list of NamedEntityIdentifiers." example: entities: - - domain: "domain" + - org: "org" + domain: "domain" name: "name" project: "project" - - domain: "domain" + - org: "org" + domain: "domain" name: "name" project: "project" token: "token" @@ -8543,6 +25560,7 @@ definitions: state: {} resource_type: {} id: + org: "org" domain: "domain" name: "name" project: "project" @@ -8551,6 +25569,7 @@ definitions: state: {} resource_type: {} id: + org: "org" domain: "domain" name: "name" project: "project" @@ -8652,6 +25671,7 @@ definitions: duration: "duration" workflow_node_metadata: executionId: + org: "org" domain: "domain" name: "name" project: "project" @@ -8660,6 +25680,7 @@ definitions: catalog_key: source_task_execution: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -8667,12 +25688,14 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" retry_attempt: 0 dataset_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -10222,6 +27245,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -10247,12 +27271,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -11378,6 +28404,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -11403,12 +28430,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -12533,6 +29562,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -12558,12 +29588,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -12572,6 +29604,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -12592,12 +29625,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -12662,7 +29708,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -12677,12 +29725,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -12747,7 +29808,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -14233,6 +31296,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -14258,12 +31322,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -15389,6 +32455,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -15414,12 +32481,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -16544,6 +33613,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -16569,12 +33639,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -16583,6 +33655,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -16603,12 +33676,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -16673,7 +33759,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -16688,12 +33776,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -16758,7 +33859,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -16876,6 +33979,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -16897,12 +34001,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -16967,7 +34084,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -16982,12 +34101,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -17052,7 +34184,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -17207,6 +34341,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -17228,12 +34363,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -17298,7 +34446,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -17313,12 +34463,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -17383,7 +34546,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -18908,6 +36073,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -18933,12 +36099,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -20064,6 +37232,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -20089,12 +37258,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21219,6 +38390,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21244,12 +38416,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21258,6 +38432,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21278,12 +38453,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -21348,7 +38536,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -21363,12 +38553,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -21433,7 +38636,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -21447,6 +38652,7 @@ definitions: - "ids" dynamic_job_spec_uri: "dynamic_job_spec_uri" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21482,6 +38688,7 @@ definitions: input_uri: "input_uri" id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -21491,6 +38698,7 @@ definitions: duration: "duration" workflow_node_metadata: executionId: + org: "org" domain: "domain" name: "name" project: "project" @@ -21499,6 +38707,7 @@ definitions: catalog_key: source_task_execution: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21506,12 +38715,14 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" retry_attempt: 0 dataset_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21543,6 +38754,7 @@ definitions: input_uri: "input_uri" id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -21552,6 +38764,7 @@ definitions: duration: "duration" workflow_node_metadata: executionId: + org: "org" domain: "domain" name: "name" project: "project" @@ -21560,6 +38773,7 @@ definitions: catalog_key: source_task_execution: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21567,12 +38781,14 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" retry_attempt: 0 dataset_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -21786,9 +39002,13 @@ definitions: $ref: "#/definitions/adminLabels" state: $ref: "#/definitions/ProjectProjectState" + org: + type: "string" + description: "Optional, org key applied to the resource." description: "Top-level namespace used to classify different entities like workflows\ \ and executions." example: + org: "org" name: "name" domains: - name: "name" @@ -21809,9 +39029,13 @@ definitions: description: "Unique project id for which this set of attributes will be applied." matching_attributes: $ref: "#/definitions/adminMatchingAttributes" + org: + type: "string" + description: "Optional, org key applied to the project." title: "Defines a set of custom matching attributes at the project level.\nFor\ \ more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" example: + org: "org" project: "project" matching_attributes: quality_of_service: @@ -21925,6 +39149,9 @@ definitions: resource_type: title: "Which type of matchable attributes to delete.\n+required" $ref: "#/definitions/adminMatchableResource" + org: + type: "string" + description: "Optional, org key applied to the project." title: "Request to delete a set matchable project level attribute override.\n\ For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" adminProjectAttributesDeleteResponse: @@ -21939,6 +39166,7 @@ definitions: \ info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" example: attributes: + org: "org" project: "project" matching_attributes: quality_of_service: @@ -22065,9 +39293,13 @@ definitions: description: "Unique domain id for which this set of attributes will be applied." matching_attributes: $ref: "#/definitions/adminMatchingAttributes" + org: + type: "string" + description: "Optional, org key applied to the attributes." title: "Defines a set of custom matching attributes which defines resource defaults\ \ for a project and domain.\nFor more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" example: + org: "org" domain: "domain" project: "project" matching_attributes: @@ -22185,6 +39417,9 @@ definitions: resource_type: title: "Which type of matchable attributes to delete.\n+required" $ref: "#/definitions/adminMatchableResource" + org: + type: "string" + description: "Optional, org key applied to the attributes." title: "Request to delete a set matchable project domain attribute override.\n\ For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" adminProjectDomainAttributesDeleteResponse: @@ -22199,6 +39434,7 @@ definitions: \ more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" example: attributes: + org: "org" domain: "domain" project: "project" matching_attributes: @@ -22345,7 +39581,8 @@ definitions: \ for more details" example: projects: - - name: "name" + - org: "org" + name: "name" domains: - name: "name" id: "id" @@ -22357,7 +39594,8 @@ definitions: labels: values: key: "values" - - name: "name" + - org: "org" + name: "name" domains: - name: "name" id: "id" @@ -22490,6 +39728,7 @@ definitions: example: short_description: "short_description" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -22603,6 +39842,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -22624,12 +39864,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -22694,7 +39947,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -22709,12 +39964,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -22779,7 +40047,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -22950,6 +40220,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -22971,12 +40242,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -23041,7 +40325,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -23056,12 +40342,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -23126,7 +40425,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -23392,6 +40693,7 @@ definitions: - input_uri: "input_uri" id: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -23399,6 +40701,7 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -23486,6 +40789,7 @@ definitions: - input_uri: "input_uri" id: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -23493,6 +40797,7 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -23597,6 +40902,7 @@ definitions: tasks: - short_description: "short_description" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -23710,6 +41016,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -23731,12 +41038,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -23801,7 +41121,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -23816,12 +41138,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -23886,7 +41221,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -23938,6 +41275,7 @@ definitions: statement: "statement" - short_description: "short_description" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -24051,6 +41389,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -24072,12 +41411,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -24142,7 +41494,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -24157,12 +41511,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -24227,7 +41594,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -24384,6 +41753,7 @@ definitions: example: short_description: "short_description" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -25866,6 +43236,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -25891,12 +43262,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -27022,6 +44395,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -27047,12 +44421,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -28177,6 +45553,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -28202,12 +45579,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -28216,6 +45595,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -28236,12 +45616,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -28306,7 +45699,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -28321,12 +45716,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -28391,7 +45799,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -29877,6 +47287,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -29902,12 +47313,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -31033,6 +48446,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -31058,12 +48472,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -32188,6 +49604,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -32213,12 +49630,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -32227,6 +49646,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -32247,12 +49667,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -32317,7 +49750,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -32332,12 +49767,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -32402,7 +49850,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -32520,6 +49970,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -32541,12 +49992,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -32611,7 +50075,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -32626,12 +50092,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -32696,7 +50175,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -32851,6 +50332,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -32872,12 +50354,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -32942,7 +50437,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -32957,12 +50454,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -33027,7 +50537,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -34552,6 +52064,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -34577,12 +52090,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -35708,6 +53223,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -35733,12 +53249,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -36863,6 +54381,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -36888,12 +54407,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -36902,6 +54423,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -36922,12 +54444,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -36992,7 +54527,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -37007,12 +54544,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -37077,7 +54627,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -37104,11 +54656,15 @@ definitions: description: "Workflow name for which this set of attributes will be applied." matching_attributes: $ref: "#/definitions/adminMatchingAttributes" + org: + type: "string" + description: "Optional, org key applied to the attributes." title: "Defines a set of custom matching attributes which defines resource defaults\ \ for a project, domain and workflow.\nFor more info on matchable attributes,\ \ see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" example: workflow: "workflow" + org: "org" domain: "domain" project: "project" matching_attributes: @@ -37229,6 +54785,9 @@ definitions: resource_type: title: "Which type of matchable attributes to delete.\n+required" $ref: "#/definitions/adminMatchableResource" + org: + type: "string" + description: "Optional, org key applied to the attributes." title: "Request to delete a set matchable workflow attribute override.\nFor more\ \ info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration`" adminWorkflowAttributesDeleteResponse: @@ -37243,6 +54802,7 @@ definitions: example: attributes: workflow: "workflow" + org: "org" domain: "domain" project: "project" matching_attributes: @@ -38848,6 +56408,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -38873,12 +56434,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -40004,6 +57567,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -40029,12 +57593,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -41159,6 +58725,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -41184,12 +58751,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -41198,6 +58767,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -41218,12 +58788,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -41288,7 +58871,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -41303,12 +58888,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -41373,7 +58971,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -42859,6 +60459,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -42884,12 +60485,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -44015,6 +61618,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -44040,12 +61644,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -45170,6 +62776,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -45195,12 +62802,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -45209,6 +62818,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -45229,12 +62839,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -45299,7 +62922,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -45314,12 +62939,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -45384,7 +63022,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -45502,6 +63142,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -45523,12 +63164,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -45593,7 +63247,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -45608,12 +63264,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -45678,7 +63347,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -45833,6 +63504,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -45854,12 +63526,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -45924,7 +63609,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -45939,12 +63626,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -46009,7 +63709,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -47534,6 +65236,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -47559,12 +65262,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -48690,6 +66395,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -48715,12 +66421,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -49845,6 +67553,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -49870,12 +67579,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -49884,6 +67595,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -49904,12 +67616,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -49974,7 +67699,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -49989,12 +67716,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -50059,7 +67799,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -50251,12 +67993,14 @@ definitions: - null - null workflow_id: + org: "org" domain: "domain" name: "name" project: "project" end_time: "2000-01-23T04:56:07.000+00:00" task_id: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -50264,6 +68008,7 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -50272,6 +68017,7 @@ definitions: operation_id: "operation_id" node_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -50295,6 +68041,7 @@ definitions: workflows: - short_description: "short_description" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -51777,6 +69524,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -51802,12 +69550,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -52933,6 +70683,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -52958,12 +70709,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -54088,6 +71841,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -54113,12 +71867,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -54127,6 +71883,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -54147,12 +71904,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -54217,7 +71987,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -54232,12 +72004,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -54302,7 +72087,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -55788,6 +73575,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -55813,12 +73601,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -56944,6 +74734,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -56969,12 +74760,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -58099,6 +75892,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -58124,12 +75918,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -58138,6 +75934,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -58158,12 +75955,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -58228,7 +76038,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -58243,12 +76055,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -58313,7 +76138,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -58431,6 +76258,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -58452,12 +76280,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -58522,7 +76363,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -58537,12 +76380,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -58607,7 +76463,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -58762,6 +76620,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -58783,12 +76642,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -58853,7 +76725,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -58868,12 +76742,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -58938,7 +76825,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -60463,6 +78352,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -60488,12 +78378,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -61619,6 +79511,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -61644,12 +79537,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -62774,6 +80669,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -62799,12 +80695,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -62813,6 +80711,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -62833,12 +80732,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -62903,7 +80815,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -62918,12 +80832,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -62988,7 +80915,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -63003,6 +80932,7 @@ definitions: created_at: "2000-01-23T04:56:07.000+00:00" - short_description: "short_description" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -64485,6 +82415,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -64510,12 +82441,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -65641,6 +83574,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -65666,12 +83600,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -66796,6 +84732,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -66821,12 +84758,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -66835,6 +84774,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -66855,12 +84795,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -66925,7 +84878,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -66940,12 +84895,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -67010,7 +84978,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -68496,6 +86466,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -68521,12 +86492,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -69652,6 +87625,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -69677,12 +87651,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -70807,6 +88783,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -70832,12 +88809,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -70846,6 +88825,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -70866,12 +88846,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -70936,7 +88929,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -70951,12 +88946,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -71021,7 +89029,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -71139,6 +89149,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -71160,12 +89171,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -71230,7 +89254,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -71245,12 +89271,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -71315,7 +89354,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -71470,6 +89511,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -71491,12 +89533,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -71561,7 +89616,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -71576,12 +89633,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -71646,7 +89716,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -73171,6 +91243,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -73196,12 +91269,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -74327,6 +92402,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -74352,12 +92428,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -75482,6 +93560,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -75507,12 +93586,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -75521,6 +93602,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -75541,12 +93623,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -75611,7 +93706,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -75626,12 +93723,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -75696,7 +93806,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -75800,13 +93912,17 @@ definitions: format: "int64" partition_key: type: "string" - title: "These two fields are only relevant in the partition value case" + bind_to_time_partition: + type: "boolean" + format: "boolean" transform: type: "string" + title: "This is only relevant in the time partition case" title: "Only valid for triggers" example: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 coreArtifactID: type: "object" @@ -75816,7 +93932,14 @@ definitions: version: type: "string" partitions: + description: "Think of a partition as a tag on an Artifact, except it's a\ + \ key-value pair.\nDifferent partitions naturally have different versions\ + \ (execution ids)." $ref: "#/definitions/corePartitions" + time_partition: + description: "There is no such thing as an empty time partition - if it's\ + \ not set, then there is no time partition." + $ref: "#/definitions/coreTimePartition" example: partitions: value: @@ -75827,12 +93950,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreArtifactKey: type: "object" properties: @@ -75870,6 +94006,7 @@ definitions: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -75881,12 +94018,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -75900,7 +94050,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreArtifactTag: type: "object" properties: @@ -75920,7 +94072,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreBinary: type: "object" properties: @@ -77338,7 +95492,8 @@ definitions: \ artifact was added to the cache\n - CACHE_LOOKUP_FAILURE: Used to indicate\ \ that cache lookup failed because of an error\n - CACHE_PUT_FAILURE: Used to\ \ indicate that cache lookup failed because of an error\n - CACHE_SKIPPED: Used\ - \ to indicate the cache lookup was skipped" + \ to indicate the cache lookup was skipped\n - CACHE_EVICTED: Used to indicate\ + \ that the cache was evicted" enum: - "CACHE_DISABLED" - "CACHE_MISS" @@ -77347,6 +95502,7 @@ definitions: - "CACHE_LOOKUP_FAILURE" - "CACHE_PUT_FAILURE" - "CACHE_SKIPPED" + - "CACHE_EVICTED" default: "CACHE_DISABLED" coreCatalogMetadata: type: "object" @@ -77365,6 +95521,7 @@ definitions: example: source_task_execution: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -77372,12 +95529,14 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" retry_attempt: 0 dataset_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -77738,6 +95897,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -77759,12 +95919,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -77829,7 +96002,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -77844,12 +96019,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -77914,7 +96102,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -79451,6 +97641,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -79476,12 +97667,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -80607,6 +98800,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -80632,12 +98826,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -81762,6 +99958,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -81787,12 +99984,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -81801,6 +100000,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -81821,12 +100021,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -81891,7 +100104,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -81906,12 +100121,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -81976,7 +100204,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -83489,6 +101719,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -83514,12 +101745,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -84645,6 +102878,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -84670,12 +102904,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -85800,6 +104036,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -85825,12 +104062,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -85839,6 +104078,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -85859,12 +104099,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -85929,7 +104182,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -85944,12 +104199,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -86014,7 +104282,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -87500,6 +105770,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -87525,12 +105796,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -88656,6 +106929,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -88681,12 +106955,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -89811,6 +108087,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -89836,12 +108113,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -89850,6 +108129,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -89870,12 +108150,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -89940,7 +108233,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -89955,12 +108250,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -90025,7 +108333,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -90143,6 +108453,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -90164,12 +108475,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -90234,7 +108558,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -90249,12 +108575,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -90319,7 +108658,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -90474,6 +108815,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -90495,12 +108837,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -90565,7 +108920,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -90580,12 +108937,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -90650,7 +109020,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -92175,6 +110547,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -92200,12 +110573,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -93331,6 +111706,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -93356,12 +111732,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -94486,6 +112864,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -94511,12 +112890,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -94525,6 +112906,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -94545,12 +112927,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -94615,7 +113010,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -94630,12 +113027,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -94700,7 +113110,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -95102,8 +113514,12 @@ definitions: version: type: "string" description: "Specific version of the resource." + org: + type: "string" + description: "Optional, org key applied to the resource." description: "Encapsulation of fields that uniquely identifies a Flyte resource." example: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -96187,6 +114603,11 @@ definitions: properties: static_value: type: "string" + title: "The string static value is for use in the Partitions object" + time_value: + type: "string" + format: "date-time" + title: "The time value is for use in the TimePartition case" triggered_binding: $ref: "#/definitions/coreArtifactBindingData" input_binding: @@ -96198,7 +114619,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreLiteral: type: "object" properties: @@ -97626,6 +116049,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -97651,12 +116075,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -97672,6 +116098,7 @@ definitions: description: "Encapsulation of fields that identify a Flyte node execution entity." example: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -98080,12 +116507,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -98150,11 +116590,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -98166,12 +116609,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -98185,7 +116641,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -98196,12 +116654,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true coreParameterMap: type: "object" @@ -98340,12 +116811,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -98410,11 +116894,14 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_query: binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 artifact_id: partitions: @@ -98426,12 +116913,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" uri: "uri" artifact_tag: artifact_key: @@ -98445,7 +116945,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_id: partitions: value: @@ -98456,12 +116958,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" required: true corePartitions: type: "object" @@ -98479,7 +116994,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" corePrimitive: type: "object" properties: @@ -99015,12 +117532,14 @@ definitions: - null - null workflow_id: + org: "org" domain: "domain" name: "name" project: "project" end_time: "2000-01-23T04:56:07.000+00:00" task_id: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -99028,6 +117547,7 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -99036,6 +117556,7 @@ definitions: operation_id: "operation_id" node_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -99140,6 +117661,7 @@ definitions: description: "Encapsulation of fields that identify a Flyte task execution entity." example: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -99147,6 +117669,7 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -99276,6 +117799,7 @@ definitions: description: "Refers to the task that the Node is to execute." example: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -99486,6 +118010,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -99507,12 +118032,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -99577,7 +118115,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -99592,12 +118132,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -99662,7 +118215,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -99712,6 +118267,22 @@ definitions: sql: dialect: {} statement: "statement" + coreTimePartition: + type: "object" + properties: + value: + $ref: "#/definitions/coreLabelValue" + example: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreTypeAnnotation: type: "object" properties: @@ -99776,12 +118347,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -99846,7 +118430,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -99861,12 +118447,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -99931,7 +118530,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreUnion: type: "object" properties: @@ -100102,12 +118703,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -100172,7 +118786,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreVariableMap: type: "object" properties: @@ -100196,12 +118812,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -100266,7 +118895,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" coreVoid: type: "object" description: "Used to denote a nil/null/None assignment to a scalar value. The\ @@ -100285,8 +118916,12 @@ definitions: name: type: "string" description: "User or system provided value for the resource." + org: + type: "string" + description: "Optional, org key applied to the resource." title: "Encapsulation of fields that uniquely identifies a Flyte workflow execution" example: + org: "org" domain: "domain" name: "name" project: "project" @@ -100361,12 +118996,14 @@ definitions: description: "Refers to a the workflow the node is to execute." example: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -101889,6 +120526,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -101914,12 +120552,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -103045,6 +121685,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -103070,12 +121711,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -104200,6 +122843,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -104225,12 +122869,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -104239,6 +122885,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -104259,12 +122906,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -104329,7 +122989,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -104344,12 +123006,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -104414,7 +123089,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" eventEventReason: type: "object" properties: @@ -106200,6 +124877,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -106225,12 +124903,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -107356,6 +126036,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -107381,12 +126062,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -108511,6 +127194,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -108536,12 +127220,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -108550,6 +127236,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -108570,12 +127257,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -108640,7 +127340,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -108655,12 +127357,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -108725,7 +127440,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -110211,6 +128928,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -110236,12 +128954,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -111367,6 +130087,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -111392,12 +130113,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -112522,6 +131245,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -112547,12 +131271,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -112561,6 +131287,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -112581,12 +131308,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -112651,7 +131391,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -112666,12 +131408,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -112736,7 +131491,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -112854,6 +131611,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -112875,12 +131633,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -112945,7 +131716,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -112960,12 +131733,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -113030,7 +131816,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -113185,6 +131973,7 @@ definitions: enabled: true input_path: "input_path" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -113206,12 +131995,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -113276,7 +132078,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -113291,12 +132095,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -113361,7 +132178,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" config: key: "config" security_context: @@ -114886,6 +133705,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -114911,12 +133731,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -116042,6 +134864,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -116067,12 +134890,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117197,6 +136022,7 @@ definitions: alias: "alias" task_node: reference_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117222,12 +136048,14 @@ definitions: id: "id" workflow_node: launchplan_ref: + org: "org" domain: "domain" resource_type: {} name: "name" project: "project" version: "version" sub_workflow_ref: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117236,6 +136064,7 @@ definitions: metadata_defaults: interruptible: true id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117256,12 +136085,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -117326,7 +136168,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" inputs: variables: key: @@ -117341,12 +136185,25 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" artifact_key: domain: "domain" name: "name" project: "project" version: "version" + time_partition: + value: + input_binding: + var: "var" + static_value: "static_value" + triggered_binding: + transform: "transform" + partition_key: "partition_key" + bind_to_time_partition: true + index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" type: schema: columns: @@ -117411,7 +136268,9 @@ definitions: triggered_binding: transform: "transform" partition_key: "partition_key" + bind_to_time_partition: true index: 1 + time_value: "2000-01-23T04:56:07.000+00:00" connections: upstream: key: @@ -117425,6 +136284,7 @@ definitions: - "ids" dynamic_job_spec_uri: "dynamic_job_spec_uri" id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117460,6 +136320,7 @@ definitions: input_uri: "input_uri" id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -117469,6 +136330,7 @@ definitions: duration: "duration" workflow_node_metadata: executionId: + org: "org" domain: "domain" name: "name" project: "project" @@ -117477,6 +136339,7 @@ definitions: catalog_key: source_task_execution: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117484,12 +136347,14 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" retry_attempt: 0 dataset_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117550,6 +136415,7 @@ definitions: input_uri: "input_uri" id: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117557,6 +136423,7 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" @@ -117658,6 +136525,7 @@ definitions: catalog_key: source_task_execution: task_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117665,12 +136533,14 @@ definitions: version: "version" node_execution_id: execution_id: + org: "org" domain: "domain" name: "name" project: "project" node_id: "node_id" retry_attempt: 0 dataset_id: + org: "org" domain: "domain" resource_type: {} name: "name" @@ -117690,6 +136560,7 @@ definitions: title: "Metadata for a WorkflowNode" example: executionId: + org: "org" domain: "domain" name: "name" project: "project" diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api_admin_service.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api_admin_service.go index 1ad9e1f583..bbb276b6aa 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api_admin_service.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/api_admin_service.go @@ -116,23 +116,25 @@ func (a *AdminServiceApiService) CreateExecution(ctx context.Context, body Admin } /* -AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition +AdminServiceApiService Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the resource. * @param body -@return AdminLaunchPlanCreateResponse +@return AdminExecutionCreateResponse */ -func (a *AdminServiceApiService) CreateLaunchPlan(ctx context.Context, body AdminLaunchPlanCreateRequest) (AdminLaunchPlanCreateResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateExecution2(ctx context.Context, org string, body AdminExecutionCreateRequest) (AdminExecutionCreateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Post") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminLaunchPlanCreateResponse + localVarReturnValue AdminExecutionCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans" + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/org/{org}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -188,7 +190,7 @@ func (a *AdminServiceApiService) CreateLaunchPlan(ctx context.Context, body Admi } if localVarHttpResponse.StatusCode == 200 { - var v AdminLaunchPlanCreateResponse + var v AdminExecutionCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -205,23 +207,23 @@ func (a *AdminServiceApiService) CreateLaunchPlan(ctx context.Context, body Admi } /* -AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. +AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param body -@return AdminNodeExecutionEventResponse +@return AdminLaunchPlanCreateResponse */ -func (a *AdminServiceApiService) CreateNodeEvent(ctx context.Context, body AdminNodeExecutionEventRequest) (AdminNodeExecutionEventResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateLaunchPlan(ctx context.Context, body AdminLaunchPlanCreateRequest) (AdminLaunchPlanCreateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNodeExecutionEventResponse + localVarReturnValue AdminLaunchPlanCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/events/nodes" + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -277,7 +279,7 @@ func (a *AdminServiceApiService) CreateNodeEvent(ctx context.Context, body Admin } if localVarHttpResponse.StatusCode == 200 { - var v AdminNodeExecutionEventResponse + var v AdminLaunchPlanCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -294,23 +296,25 @@ func (a *AdminServiceApiService) CreateNodeEvent(ctx context.Context, body Admin } /* -AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.Task` definition +AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. * @param body -@return FlyteidladminTaskCreateResponse +@return AdminLaunchPlanCreateResponse */ -func (a *AdminServiceApiService) CreateTask(ctx context.Context, body FlyteidladminTaskCreateRequest) (FlyteidladminTaskCreateResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateLaunchPlan2(ctx context.Context, idOrg string, body AdminLaunchPlanCreateRequest) (AdminLaunchPlanCreateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue FlyteidladminTaskCreateResponse + localVarReturnValue AdminLaunchPlanCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/tasks" + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/org/{id.org}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -366,7 +370,7 @@ func (a *AdminServiceApiService) CreateTask(ctx context.Context, body Flyteidlad } if localVarHttpResponse.StatusCode == 200 { - var v FlyteidladminTaskCreateResponse + var v AdminLaunchPlanCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -383,23 +387,23 @@ func (a *AdminServiceApiService) CreateTask(ctx context.Context, body Flyteidlad } /* -AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. +AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param body -@return AdminTaskExecutionEventResponse +@return AdminNodeExecutionEventResponse */ -func (a *AdminServiceApiService) CreateTaskEvent(ctx context.Context, body AdminTaskExecutionEventRequest) (AdminTaskExecutionEventResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateNodeEvent(ctx context.Context, body AdminNodeExecutionEventRequest) (AdminNodeExecutionEventResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminTaskExecutionEventResponse + localVarReturnValue AdminNodeExecutionEventResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/events/tasks" + localVarPath := a.client.cfg.BasePath + "/api/v1/events/nodes" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -455,7 +459,7 @@ func (a *AdminServiceApiService) CreateTaskEvent(ctx context.Context, body Admin } if localVarHttpResponse.StatusCode == 200 { - var v AdminTaskExecutionEventResponse + var v AdminNodeExecutionEventResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -472,23 +476,25 @@ func (a *AdminServiceApiService) CreateTaskEvent(ctx context.Context, body Admin } /* -AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition +AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param eventIdExecutionIdOrg Optional, org key applied to the resource. * @param body -@return AdminWorkflowCreateResponse +@return AdminNodeExecutionEventResponse */ -func (a *AdminServiceApiService) CreateWorkflow(ctx context.Context, body AdminWorkflowCreateRequest) (AdminWorkflowCreateResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateNodeEvent2(ctx context.Context, eventIdExecutionIdOrg string, body AdminNodeExecutionEventRequest) (AdminNodeExecutionEventResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowCreateResponse + localVarReturnValue AdminNodeExecutionEventResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflows" + localVarPath := a.client.cfg.BasePath + "/api/v1/events/org/{event.id.execution_id.org}/nodes" + localVarPath = strings.Replace(localVarPath, "{"+"event.id.execution_id.org"+"}", fmt.Sprintf("%v", eventIdExecutionIdOrg), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -544,7 +550,7 @@ func (a *AdminServiceApiService) CreateWorkflow(ctx context.Context, body AdminW } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowCreateResponse + var v AdminNodeExecutionEventResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -561,23 +567,23 @@ func (a *AdminServiceApiService) CreateWorkflow(ctx context.Context, body AdminW } /* -AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. +AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.Task` definition * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param body -@return AdminWorkflowExecutionEventResponse +@return FlyteidladminTaskCreateResponse */ -func (a *AdminServiceApiService) CreateWorkflowEvent(ctx context.Context, body AdminWorkflowExecutionEventRequest) (AdminWorkflowExecutionEventResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateTask(ctx context.Context, body FlyteidladminTaskCreateRequest) (FlyteidladminTaskCreateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowExecutionEventResponse + localVarReturnValue FlyteidladminTaskCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/events/workflows" + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -633,7 +639,7 @@ func (a *AdminServiceApiService) CreateWorkflowEvent(ctx context.Context, body A } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowExecutionEventResponse + var v FlyteidladminTaskCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -650,25 +656,25 @@ func (a *AdminServiceApiService) CreateWorkflowEvent(ctx context.Context, body A } /* -AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.Task` definition * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Unique project id which this set of attributes references. +required + * @param idOrg Optional, org key applied to the resource. * @param body -@return AdminProjectAttributesDeleteResponse +@return FlyteidladminTaskCreateResponse */ -func (a *AdminServiceApiService) DeleteProjectAttributes(ctx context.Context, project string, body AdminProjectAttributesDeleteRequest) (AdminProjectAttributesDeleteResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateTask2(ctx context.Context, idOrg string, body FlyteidladminTaskCreateRequest) (FlyteidladminTaskCreateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Delete") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectAttributesDeleteResponse + localVarReturnValue FlyteidladminTaskCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/project_attributes/{project}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/org/{id.org}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -724,7 +730,7 @@ func (a *AdminServiceApiService) DeleteProjectAttributes(ctx context.Context, pr } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectAttributesDeleteResponse + var v FlyteidladminTaskCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -741,27 +747,23 @@ func (a *AdminServiceApiService) DeleteProjectAttributes(ctx context.Context, pr } /* -AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Unique project id which this set of attributes references. +required - * @param domain Unique domain id which this set of attributes references. +required * @param body -@return AdminProjectDomainAttributesDeleteResponse +@return AdminTaskExecutionEventResponse */ -func (a *AdminServiceApiService) DeleteProjectDomainAttributes(ctx context.Context, project string, domain string, body AdminProjectDomainAttributesDeleteRequest) (AdminProjectDomainAttributesDeleteResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateTaskEvent(ctx context.Context, body AdminTaskExecutionEventRequest) (AdminTaskExecutionEventResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Delete") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectDomainAttributesDeleteResponse + localVarReturnValue AdminTaskExecutionEventResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/{project}/{domain}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/events/tasks" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -817,7 +819,7 @@ func (a *AdminServiceApiService) DeleteProjectDomainAttributes(ctx context.Conte } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectDomainAttributesDeleteResponse + var v AdminTaskExecutionEventResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -834,29 +836,25 @@ func (a *AdminServiceApiService) DeleteProjectDomainAttributes(ctx context.Conte } /* -AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Unique project id which this set of attributes references. +required - * @param domain Unique domain id which this set of attributes references. +required - * @param workflow Workflow name which this set of attributes references. +required + * @param eventParentNodeExecutionIdExecutionIdOrg Optional, org key applied to the resource. * @param body -@return AdminWorkflowAttributesDeleteResponse +@return AdminTaskExecutionEventResponse */ -func (a *AdminServiceApiService) DeleteWorkflowAttributes(ctx context.Context, project string, domain string, workflow string, body AdminWorkflowAttributesDeleteRequest) (AdminWorkflowAttributesDeleteResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateTaskEvent2(ctx context.Context, eventParentNodeExecutionIdExecutionIdOrg string, body AdminTaskExecutionEventRequest) (AdminTaskExecutionEventResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Delete") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowAttributesDeleteResponse + localVarReturnValue AdminTaskExecutionEventResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"workflow"+"}", fmt.Sprintf("%v", workflow), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks" + localVarPath = strings.Replace(localVarPath, "{"+"event.parent_node_execution_id.execution_id.org"+"}", fmt.Sprintf("%v", eventParentNodeExecutionIdExecutionIdOrg), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -912,7 +910,7 @@ func (a *AdminServiceApiService) DeleteWorkflowAttributes(ctx context.Context, p } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowAttributesDeleteResponse + var v AdminTaskExecutionEventResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -929,28 +927,23 @@ func (a *AdminServiceApiService) DeleteWorkflowAttributes(ctx context.Context, p } /* -AdminServiceApiService Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. +AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param body -@return AdminLaunchPlan +@return AdminWorkflowCreateResponse */ -func (a *AdminServiceApiService) GetActiveLaunchPlan(ctx context.Context, idProject string, idDomain string, idName string) (AdminLaunchPlan, *http.Response, error) { +func (a *AdminServiceApiService) CreateWorkflow(ctx context.Context, body AdminWorkflowCreateRequest) (AdminWorkflowCreateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminLaunchPlan + localVarReturnValue AdminWorkflowCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -973,6 +966,8 @@ func (a *AdminServiceApiService) GetActiveLaunchPlan(ctx context.Context, idProj if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1004,7 +999,7 @@ func (a *AdminServiceApiService) GetActiveLaunchPlan(ctx context.Context, idProj } if localVarHttpResponse.StatusCode == 200 { - var v AdminLaunchPlan + var v AdminWorkflowCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1021,32 +1016,25 @@ func (a *AdminServiceApiService) GetActiveLaunchPlan(ctx context.Context, idProj } /* -AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. +AdminServiceApiService Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idResourceType Identifies the specific type of resource that this identifier corresponds to. - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. - * @param idVersion Specific version of the resource. + * @param idOrg Optional, org key applied to the resource. + * @param body -@return AdminDescriptionEntity +@return AdminWorkflowCreateResponse */ -func (a *AdminServiceApiService) GetDescriptionEntity(ctx context.Context, idResourceType string, idProject string, idDomain string, idName string, idVersion string) (AdminDescriptionEntity, *http.Response, error) { +func (a *AdminServiceApiService) CreateWorkflow2(ctx context.Context, idOrg string, body AdminWorkflowCreateRequest) (AdminWorkflowCreateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminDescriptionEntity + localVarReturnValue AdminWorkflowCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}" - localVarPath = strings.Replace(localVarPath, "{"+"id.resource_type"+"}", fmt.Sprintf("%v", idResourceType), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/org/{id.org}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1069,6 +1057,8 @@ func (a *AdminServiceApiService) GetDescriptionEntity(ctx context.Context, idRes if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1100,7 +1090,7 @@ func (a *AdminServiceApiService) GetDescriptionEntity(ctx context.Context, idRes } if localVarHttpResponse.StatusCode == 200 { - var v AdminDescriptionEntity + var v AdminWorkflowCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1117,28 +1107,23 @@ func (a *AdminServiceApiService) GetDescriptionEntity(ctx context.Context, idRes } /* -AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.Execution`. +AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User or system provided value for the resource. + * @param body -@return AdminExecution +@return AdminWorkflowExecutionEventResponse */ -func (a *AdminServiceApiService) GetExecution(ctx context.Context, idProject string, idDomain string, idName string) (AdminExecution, *http.Response, error) { +func (a *AdminServiceApiService) CreateWorkflowEvent(ctx context.Context, body AdminWorkflowExecutionEventRequest) (AdminWorkflowExecutionEventResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminExecution + localVarReturnValue AdminWorkflowExecutionEventResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/events/workflows" localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1161,6 +1146,8 @@ func (a *AdminServiceApiService) GetExecution(ctx context.Context, idProject str if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1192,7 +1179,7 @@ func (a *AdminServiceApiService) GetExecution(ctx context.Context, idProject str } if localVarHttpResponse.StatusCode == 200 { - var v AdminExecution + var v AdminWorkflowExecutionEventResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1209,28 +1196,25 @@ func (a *AdminServiceApiService) GetExecution(ctx context.Context, idProject str } /* -AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. +AdminServiceApiService Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User or system provided value for the resource. + * @param eventExecutionIdOrg Optional, org key applied to the resource. + * @param body -@return AdminWorkflowExecutionGetDataResponse +@return AdminWorkflowExecutionEventResponse */ -func (a *AdminServiceApiService) GetExecutionData(ctx context.Context, idProject string, idDomain string, idName string) (AdminWorkflowExecutionGetDataResponse, *http.Response, error) { +func (a *AdminServiceApiService) CreateWorkflowEvent2(ctx context.Context, eventExecutionIdOrg string, body AdminWorkflowExecutionEventRequest) (AdminWorkflowExecutionEventResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowExecutionGetDataResponse + localVarReturnValue AdminWorkflowExecutionEventResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/data/executions/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/events/org/{event.execution_id.org}/workflows" + localVarPath = strings.Replace(localVarPath, "{"+"event.execution_id.org"+"}", fmt.Sprintf("%v", eventExecutionIdOrg), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1253,6 +1237,8 @@ func (a *AdminServiceApiService) GetExecutionData(ctx context.Context, idProject if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1284,7 +1270,7 @@ func (a *AdminServiceApiService) GetExecutionData(ctx context.Context, idProject } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowExecutionGetDataResponse + var v AdminWorkflowExecutionEventResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1301,43 +1287,30 @@ func (a *AdminServiceApiService) GetExecutionData(ctx context.Context, idProject } /* -AdminServiceApiService Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. +AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User or system provided value for the resource. - * @param optional nil or *GetExecutionMetricsOpts - Optional Parameters: - * @param "Depth" (optional.Int32) - depth defines the number of Flyte entity levels to traverse when breaking down execution details. + * @param project Unique project id which this set of attributes references. +required + * @param body -@return AdminWorkflowExecutionGetMetricsResponse +@return AdminProjectAttributesDeleteResponse */ - -type GetExecutionMetricsOpts struct { - Depth optional.Int32 -} - -func (a *AdminServiceApiService) GetExecutionMetrics(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *GetExecutionMetricsOpts) (AdminWorkflowExecutionGetMetricsResponse, *http.Response, error) { +func (a *AdminServiceApiService) DeleteProjectAttributes(ctx context.Context, project string, body AdminProjectAttributesDeleteRequest) (AdminProjectAttributesDeleteResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowExecutionGetMetricsResponse + localVarReturnValue AdminProjectAttributesDeleteResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/project_attributes/{project}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Depth.IsSet() { - localVarQueryParams.Add("depth", parameterToString(localVarOptionals.Depth.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -1355,6 +1328,8 @@ func (a *AdminServiceApiService) GetExecutionMetrics(ctx context.Context, idProj if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1386,7 +1361,7 @@ func (a *AdminServiceApiService) GetExecutionMetrics(ctx context.Context, idProj } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowExecutionGetMetricsResponse + var v AdminProjectAttributesDeleteResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1403,45 +1378,32 @@ func (a *AdminServiceApiService) GetExecutionMetrics(ctx context.Context, idProj } /* -AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. +AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. - * @param idVersion Specific version of the resource. - * @param optional nil or *GetLaunchPlanOpts - Optional Parameters: - * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param org Optional, org key applied to the project. + * @param project Unique project id which this set of attributes references. +required + * @param body -@return AdminLaunchPlan +@return AdminProjectAttributesDeleteResponse */ - -type GetLaunchPlanOpts struct { - IdResourceType optional.String -} - -func (a *AdminServiceApiService) GetLaunchPlan(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetLaunchPlanOpts) (AdminLaunchPlan, *http.Response, error) { +func (a *AdminServiceApiService) DeleteProjectAttributes2(ctx context.Context, org string, project string, body AdminProjectAttributesDeleteRequest) (AdminProjectAttributesDeleteResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminLaunchPlan + localVarReturnValue AdminProjectAttributesDeleteResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/org/{org}/{project}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { - localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -1459,6 +1421,8 @@ func (a *AdminServiceApiService) GetLaunchPlan(ctx context.Context, idProject st if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1490,7 +1454,7 @@ func (a *AdminServiceApiService) GetLaunchPlan(ctx context.Context, idProject st } if localVarHttpResponse.StatusCode == 200 { - var v AdminLaunchPlan + var v AdminProjectAttributesDeleteResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1507,30 +1471,27 @@ func (a *AdminServiceApiService) GetLaunchPlan(ctx context.Context, idProject st } /* -AdminServiceApiService Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. +AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param resourceType Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param project Unique project id which this set of attributes references. +required + * @param domain Unique domain id which this set of attributes references. +required + * @param body -@return AdminNamedEntity +@return AdminProjectDomainAttributesDeleteResponse */ -func (a *AdminServiceApiService) GetNamedEntity(ctx context.Context, resourceType string, idProject string, idDomain string, idName string) (AdminNamedEntity, *http.Response, error) { +func (a *AdminServiceApiService) DeleteProjectDomainAttributes(ctx context.Context, project string, domain string, body AdminProjectDomainAttributesDeleteRequest) (AdminProjectDomainAttributesDeleteResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNamedEntity + localVarReturnValue AdminProjectDomainAttributesDeleteResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1553,6 +1514,8 @@ func (a *AdminServiceApiService) GetNamedEntity(ctx context.Context, resourceTyp if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1584,7 +1547,7 @@ func (a *AdminServiceApiService) GetNamedEntity(ctx context.Context, resourceTyp } if localVarHttpResponse.StatusCode == 200 { - var v AdminNamedEntity + var v AdminProjectDomainAttributesDeleteResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1601,30 +1564,29 @@ func (a *AdminServiceApiService) GetNamedEntity(ctx context.Context, resourceTyp } /* -AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. +AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idExecutionIdProject Name of the project the resource belongs to. - * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idExecutionIdName User or system provided value for the resource. - * @param idNodeId + * @param org Optional, org key applied to the attributes. + * @param project Unique project id which this set of attributes references. +required + * @param domain Unique domain id which this set of attributes references. +required + * @param body -@return FlyteidladminNodeExecution +@return AdminProjectDomainAttributesDeleteResponse */ -func (a *AdminServiceApiService) GetNodeExecution(ctx context.Context, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string) (FlyteidladminNodeExecution, *http.Response, error) { +func (a *AdminServiceApiService) DeleteProjectDomainAttributes2(ctx context.Context, org string, project string, domain string, body AdminProjectDomainAttributesDeleteRequest) (AdminProjectDomainAttributesDeleteResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue FlyteidladminNodeExecution + localVarReturnValue AdminProjectDomainAttributesDeleteResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" - localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/org/{org}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1647,6 +1609,8 @@ func (a *AdminServiceApiService) GetNodeExecution(ctx context.Context, idExecuti if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1678,7 +1642,7 @@ func (a *AdminServiceApiService) GetNodeExecution(ctx context.Context, idExecuti } if localVarHttpResponse.StatusCode == 200 { - var v FlyteidladminNodeExecution + var v AdminProjectDomainAttributesDeleteResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1695,30 +1659,29 @@ func (a *AdminServiceApiService) GetNodeExecution(ctx context.Context, idExecuti } /* -AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. +AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idExecutionIdProject Name of the project the resource belongs to. - * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idExecutionIdName User or system provided value for the resource. - * @param idNodeId + * @param project Unique project id which this set of attributes references. +required + * @param domain Unique domain id which this set of attributes references. +required + * @param workflow Workflow name which this set of attributes references. +required + * @param body -@return AdminNodeExecutionGetDataResponse +@return AdminWorkflowAttributesDeleteResponse */ -func (a *AdminServiceApiService) GetNodeExecutionData(ctx context.Context, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string) (AdminNodeExecutionGetDataResponse, *http.Response, error) { +func (a *AdminServiceApiService) DeleteWorkflowAttributes(ctx context.Context, project string, domain string, workflow string, body AdminWorkflowAttributesDeleteRequest) (AdminWorkflowAttributesDeleteResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNodeExecutionGetDataResponse + localVarReturnValue AdminWorkflowAttributesDeleteResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" - localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow"+"}", fmt.Sprintf("%v", workflow), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1741,6 +1704,8 @@ func (a *AdminServiceApiService) GetNodeExecutionData(ctx context.Context, idExe if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1772,7 +1737,7 @@ func (a *AdminServiceApiService) GetNodeExecutionData(ctx context.Context, idExe } if localVarHttpResponse.StatusCode == 200 { - var v AdminNodeExecutionGetDataResponse + var v AdminWorkflowAttributesDeleteResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1789,39 +1754,36 @@ func (a *AdminServiceApiService) GetNodeExecutionData(ctx context.Context, idExe } /* -AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +AdminServiceApiService Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the attributes. * @param project Unique project id which this set of attributes references. +required - * @param optional nil or *GetProjectAttributesOpts - Optional Parameters: - * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param domain Unique domain id which this set of attributes references. +required + * @param workflow Workflow name which this set of attributes references. +required + * @param body -@return AdminProjectAttributesGetResponse +@return AdminWorkflowAttributesDeleteResponse */ - -type GetProjectAttributesOpts struct { - ResourceType optional.String -} - -func (a *AdminServiceApiService) GetProjectAttributes(ctx context.Context, project string, localVarOptionals *GetProjectAttributesOpts) (AdminProjectAttributesGetResponse, *http.Response, error) { +func (a *AdminServiceApiService) DeleteWorkflowAttributes2(ctx context.Context, org string, project string, domain string, workflow string, body AdminWorkflowAttributesDeleteRequest) (AdminWorkflowAttributesDeleteResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectAttributesGetResponse + localVarReturnValue AdminWorkflowAttributesDeleteResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/project_attributes/{project}" + localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow"+"}", fmt.Sprintf("%v", workflow), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { - localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -1839,6 +1801,8 @@ func (a *AdminServiceApiService) GetProjectAttributes(ctx context.Context, proje if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -1870,7 +1834,7 @@ func (a *AdminServiceApiService) GetProjectAttributes(ctx context.Context, proje } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectAttributesGetResponse + var v AdminWorkflowAttributesDeleteResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1887,40 +1851,42 @@ func (a *AdminServiceApiService) GetProjectAttributes(ctx context.Context, proje } /* -AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +AdminServiceApiService Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Unique project id which this set of attributes references. +required - * @param domain Unique domain id which this set of attributes references. +required - * @param optional nil or *GetProjectDomainAttributesOpts - Optional Parameters: - * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *GetActiveLaunchPlanOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminProjectDomainAttributesGetResponse +@return AdminLaunchPlan */ -type GetProjectDomainAttributesOpts struct { - ResourceType optional.String +type GetActiveLaunchPlanOpts struct { + IdOrg optional.String } -func (a *AdminServiceApiService) GetProjectDomainAttributes(ctx context.Context, project string, domain string, localVarOptionals *GetProjectDomainAttributesOpts) (AdminProjectDomainAttributesGetResponse, *http.Response, error) { +func (a *AdminServiceApiService) GetActiveLaunchPlan(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *GetActiveLaunchPlanOpts) (AdminLaunchPlan, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectDomainAttributesGetResponse + localVarReturnValue AdminLaunchPlan ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/{project}/{domain}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { - localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -1970,7 +1936,7 @@ func (a *AdminServiceApiService) GetProjectDomainAttributes(ctx context.Context, } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectDomainAttributesGetResponse + var v AdminLaunchPlan err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -1987,45 +1953,35 @@ func (a *AdminServiceApiService) GetProjectDomainAttributes(ctx context.Context, } /* -AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.Task` definition. +AdminServiceApiService Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. - * @param idVersion Specific version of the resource. - * @param optional nil or *GetTaskOpts - Optional Parameters: - * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' -@return AdminTask +@return AdminLaunchPlan */ - -type GetTaskOpts struct { - IdResourceType optional.String -} - -func (a *AdminServiceApiService) GetTask(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetTaskOpts) (AdminTask, *http.Response, error) { +func (a *AdminServiceApiService) GetActiveLaunchPlan2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string) (AdminLaunchPlan, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminTask + localVarReturnValue AdminLaunchPlan ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath := a.client.cfg.BasePath + "/api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { - localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2074,7 +2030,7 @@ func (a *AdminServiceApiService) GetTask(ctx context.Context, idProject string, } if localVarHttpResponse.StatusCode == 200 { - var v AdminTask + var v AdminLaunchPlan err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2091,54 +2047,46 @@ func (a *AdminServiceApiService) GetTask(ctx context.Context, idProject string, } /* -AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. - * @param idNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idNodeExecutionIdExecutionIdName User or system provided value for the resource. - * @param idNodeExecutionIdNodeId - * @param idTaskIdProject Name of the project the resource belongs to. - * @param idTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idTaskIdName User provided value for the resource. - * @param idTaskIdVersion Specific version of the resource. - * @param idRetryAttempt - * @param optional nil or *GetTaskExecutionOpts - Optional Parameters: - * @param "IdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param idResourceType Identifies the specific type of resource that this identifier corresponds to. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param optional nil or *GetDescriptionEntityOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. -@return FlyteidladminTaskExecution +@return AdminDescriptionEntity */ -type GetTaskExecutionOpts struct { - IdTaskIdResourceType optional.String +type GetDescriptionEntityOpts struct { + IdOrg optional.String } -func (a *AdminServiceApiService) GetTaskExecution(ctx context.Context, idNodeExecutionIdExecutionIdProject string, idNodeExecutionIdExecutionIdDomain string, idNodeExecutionIdExecutionIdName string, idNodeExecutionIdNodeId string, idTaskIdProject string, idTaskIdDomain string, idTaskIdName string, idTaskIdVersion string, idRetryAttempt int64, localVarOptionals *GetTaskExecutionOpts) (FlyteidladminTaskExecution, *http.Response, error) { +func (a *AdminServiceApiService) GetDescriptionEntity(ctx context.Context, idResourceType string, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetDescriptionEntityOpts) (AdminDescriptionEntity, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue FlyteidladminTaskExecution + localVarReturnValue AdminDescriptionEntity ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", idNodeExecutionIdNodeId), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.project"+"}", fmt.Sprintf("%v", idTaskIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.domain"+"}", fmt.Sprintf("%v", idTaskIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.name"+"}", fmt.Sprintf("%v", idTaskIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.version"+"}", fmt.Sprintf("%v", idTaskIdVersion), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.retry_attempt"+"}", fmt.Sprintf("%v", idRetryAttempt), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.resource_type"+"}", fmt.Sprintf("%v", idResourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdTaskIdResourceType.IsSet() { - localVarQueryParams.Add("id.task_id.resource_type", parameterToString(localVarOptionals.IdTaskIdResourceType.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2188,7 +2136,7 @@ func (a *AdminServiceApiService) GetTaskExecution(ctx context.Context, idNodeExe } if localVarHttpResponse.StatusCode == 200 { - var v FlyteidladminTaskExecution + var v AdminDescriptionEntity err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2205,55 +2153,39 @@ func (a *AdminServiceApiService) GetTaskExecution(ctx context.Context, idNodeExe } /* -AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. - * @param idNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idNodeExecutionIdExecutionIdName User or system provided value for the resource. - * @param idNodeExecutionIdNodeId - * @param idTaskIdProject Name of the project the resource belongs to. - * @param idTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idTaskIdName User provided value for the resource. - * @param idTaskIdVersion Specific version of the resource. - * @param idRetryAttempt - * @param optional nil or *GetTaskExecutionDataOpts - Optional Parameters: - * @param "IdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param idOrg Optional, org key applied to the resource. + * @param idResourceType Identifies the specific type of resource that this identifier corresponds to. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. -@return AdminTaskExecutionGetDataResponse +@return AdminDescriptionEntity */ - -type GetTaskExecutionDataOpts struct { - IdTaskIdResourceType optional.String -} - -func (a *AdminServiceApiService) GetTaskExecutionData(ctx context.Context, idNodeExecutionIdExecutionIdProject string, idNodeExecutionIdExecutionIdDomain string, idNodeExecutionIdExecutionIdName string, idNodeExecutionIdNodeId string, idTaskIdProject string, idTaskIdDomain string, idTaskIdName string, idTaskIdVersion string, idRetryAttempt int64, localVarOptionals *GetTaskExecutionDataOpts) (AdminTaskExecutionGetDataResponse, *http.Response, error) { +func (a *AdminServiceApiService) GetDescriptionEntity2(ctx context.Context, idOrg string, idResourceType string, idProject string, idDomain string, idName string, idVersion string) (AdminDescriptionEntity, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminTaskExecutionGetDataResponse + localVarReturnValue AdminDescriptionEntity ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", idNodeExecutionIdNodeId), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.project"+"}", fmt.Sprintf("%v", idTaskIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.domain"+"}", fmt.Sprintf("%v", idTaskIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.name"+"}", fmt.Sprintf("%v", idTaskIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.version"+"}", fmt.Sprintf("%v", idTaskIdVersion), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.retry_attempt"+"}", fmt.Sprintf("%v", idRetryAttempt), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.resource_type"+"}", fmt.Sprintf("%v", idResourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdTaskIdResourceType.IsSet() { - localVarQueryParams.Add("id.task_id.resource_type", parameterToString(localVarOptionals.IdTaskIdResourceType.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2302,7 +2234,7 @@ func (a *AdminServiceApiService) GetTaskExecutionData(ctx context.Context, idNod } if localVarHttpResponse.StatusCode == 200 { - var v AdminTaskExecutionGetDataResponse + var v AdminDescriptionEntity err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2319,27 +2251,45 @@ func (a *AdminServiceApiService) GetTaskExecutionData(ctx context.Context, idNod } /* -AdminServiceApiService +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idExecutionIdProject Name of the project the resource belongs to. + * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idExecutionIdName User or system provided value for the resource. + * @param idNodeId + * @param optional nil or *GetDynamicNodeWorkflowOpts - Optional Parameters: + * @param "IdExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminGetVersionResponse +@return AdminDynamicNodeWorkflowResponse */ -func (a *AdminServiceApiService) GetVersion(ctx context.Context) (AdminGetVersionResponse, *http.Response, error) { + +type GetDynamicNodeWorkflowOpts struct { + IdExecutionIdOrg optional.String +} + +func (a *AdminServiceApiService) GetDynamicNodeWorkflow(ctx context.Context, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string, localVarOptionals *GetDynamicNodeWorkflowOpts) (AdminDynamicNodeWorkflowResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminGetVersionResponse + localVarReturnValue AdminDynamicNodeWorkflowResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/version" + localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow" + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} + if localVarOptionals != nil && localVarOptionals.IdExecutionIdOrg.IsSet() { + localVarQueryParams.Add("id.execution_id.org", parameterToString(localVarOptionals.IdExecutionIdOrg.Value(), "")) + } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2388,7 +2338,7 @@ func (a *AdminServiceApiService) GetVersion(ctx context.Context) (AdminGetVersio } if localVarHttpResponse.StatusCode == 200 { - var v AdminGetVersionResponse + var v AdminDynamicNodeWorkflowResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2405,45 +2355,37 @@ func (a *AdminServiceApiService) GetVersion(ctx context.Context) (AdminGetVersio } /* -AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. - * @param idVersion Specific version of the resource. - * @param optional nil or *GetWorkflowOpts - Optional Parameters: - * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param idExecutionIdOrg Optional, org key applied to the resource. + * @param idExecutionIdProject Name of the project the resource belongs to. + * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idExecutionIdName User or system provided value for the resource. + * @param idNodeId -@return AdminWorkflow +@return AdminDynamicNodeWorkflowResponse */ - -type GetWorkflowOpts struct { - IdResourceType optional.String -} - -func (a *AdminServiceApiService) GetWorkflow(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetWorkflowOpts) (AdminWorkflow, *http.Response, error) { +func (a *AdminServiceApiService) GetDynamicNodeWorkflow2(ctx context.Context, idExecutionIdOrg string, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string) (AdminDynamicNodeWorkflowResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflow + localVarReturnValue AdminDynamicNodeWorkflowResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow" + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.org"+"}", fmt.Sprintf("%v", idExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { - localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2492,7 +2434,7 @@ func (a *AdminServiceApiService) GetWorkflow(ctx context.Context, idProject stri } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflow + var v AdminDynamicNodeWorkflowResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2509,42 +2451,42 @@ func (a *AdminServiceApiService) GetWorkflow(ctx context.Context, idProject stri } /* -AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Unique project id which this set of attributes references. +required - * @param domain Unique domain id which this set of attributes references. +required - * @param workflow Workflow name which this set of attributes references. +required - * @param optional nil or *GetWorkflowAttributesOpts - Optional Parameters: - * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User or system provided value for the resource. + * @param optional nil or *GetExecutionOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminWorkflowAttributesGetResponse +@return AdminExecution */ -type GetWorkflowAttributesOpts struct { - ResourceType optional.String +type GetExecutionOpts struct { + IdOrg optional.String } -func (a *AdminServiceApiService) GetWorkflowAttributes(ctx context.Context, project string, domain string, workflow string, localVarOptionals *GetWorkflowAttributesOpts) (AdminWorkflowAttributesGetResponse, *http.Response, error) { +func (a *AdminServiceApiService) GetExecution(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *GetExecutionOpts) (AdminExecution, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowAttributesGetResponse + localVarReturnValue AdminExecution ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"workflow"+"}", fmt.Sprintf("%v", workflow), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { - localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2594,7 +2536,7 @@ func (a *AdminServiceApiService) GetWorkflowAttributes(ctx context.Context, proj } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowAttributesGetResponse + var v AdminExecution err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2611,56 +2553,35 @@ func (a *AdminServiceApiService) GetWorkflowAttributes(ctx context.Context, proj } /* -AdminServiceApiService List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Name of the project that contains the identifiers. +required. - * @param domain Name of the domain the identifiers belongs to within the project. +required. - * @param optional nil or *ListActiveLaunchPlansOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User or system provided value for the resource. -@return AdminLaunchPlanList +@return AdminExecution */ - -type ListActiveLaunchPlansOpts struct { - Limit optional.Int64 - Token optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListActiveLaunchPlans(ctx context.Context, project string, domain string, localVarOptionals *ListActiveLaunchPlansOpts) (AdminLaunchPlanList, *http.Response, error) { +func (a *AdminServiceApiService) GetExecution2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string) (AdminExecution, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminLaunchPlanList + localVarReturnValue AdminExecution ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/active_launch_plans/{project}/{domain}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2709,7 +2630,7 @@ func (a *AdminServiceApiService) ListActiveLaunchPlans(ctx context.Context, proj } if localVarHttpResponse.StatusCode == 200 { - var v AdminLaunchPlanList + var v AdminExecution err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2726,42 +2647,32 @@ func (a *AdminServiceApiService) ListActiveLaunchPlans(ctx context.Context, proj } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param resourceType Identifies the specific type of resource that this identifier corresponds to. * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' - * @param optional nil or *ListDescriptionEntitiesOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User or system provided value for the resource. + * @param optional nil or *GetExecutionDataOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminDescriptionEntityList +@return AdminWorkflowExecutionGetDataResponse */ -type ListDescriptionEntitiesOpts struct { - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String +type GetExecutionDataOpts struct { + IdOrg optional.String } -func (a *AdminServiceApiService) ListDescriptionEntities(ctx context.Context, resourceType string, idProject string, idDomain string, idName string, localVarOptionals *ListDescriptionEntitiesOpts) (AdminDescriptionEntityList, *http.Response, error) { +func (a *AdminServiceApiService) GetExecutionData(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *GetExecutionDataOpts) (AdminWorkflowExecutionGetDataResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminDescriptionEntityList + localVarReturnValue AdminWorkflowExecutionGetDataResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/data/executions/{id.project}/{id.domain}/{id.name}" localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) @@ -2770,20 +2681,8 @@ func (a *AdminServiceApiService) ListDescriptionEntities(ctx context.Context, re localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2833,7 +2732,7 @@ func (a *AdminServiceApiService) ListDescriptionEntities(ctx context.Context, re } if localVarHttpResponse.StatusCode == 200 { - var v AdminDescriptionEntityList + var v AdminWorkflowExecutionGetDataResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2850,68 +2749,35 @@ func (a *AdminServiceApiService) ListDescriptionEntities(ctx context.Context, re } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param resourceType Identifies the specific type of resource that this identifier corresponds to. + * @param idOrg Optional, org key applied to the resource. * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param optional nil or *ListDescriptionEntities2Opts - Optional Parameters: - * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User or system provided value for the resource. -@return AdminDescriptionEntityList +@return AdminWorkflowExecutionGetDataResponse */ - -type ListDescriptionEntities2Opts struct { - IdName optional.String - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListDescriptionEntities2(ctx context.Context, resourceType string, idProject string, idDomain string, localVarOptionals *ListDescriptionEntities2Opts) (AdminDescriptionEntityList, *http.Response, error) { +func (a *AdminServiceApiService) GetExecutionData2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string) (AdminWorkflowExecutionGetDataResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminDescriptionEntityList + localVarReturnValue AdminWorkflowExecutionGetDataResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}" - localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { - localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -2960,7 +2826,7 @@ func (a *AdminServiceApiService) ListDescriptionEntities2(ctx context.Context, r } if localVarHttpResponse.StatusCode == 200 { - var v AdminDescriptionEntityList + var v AdminWorkflowExecutionGetDataResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -2977,65 +2843,47 @@ func (a *AdminServiceApiService) ListDescriptionEntities2(ctx context.Context, r } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Execution`. +AdminServiceApiService Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param optional nil or *ListExecutionsOpts - Optional Parameters: - * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User or system provided value for the resource. + * @param optional nil or *GetExecutionMetricsOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Depth" (optional.Int32) - depth defines the number of Flyte entity levels to traverse when breaking down execution details. -@return AdminExecutionList +@return AdminWorkflowExecutionGetMetricsResponse */ -type ListExecutionsOpts struct { - IdName optional.String - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} +type GetExecutionMetricsOpts struct { + IdOrg optional.String + Depth optional.Int32 +} -func (a *AdminServiceApiService) ListExecutions(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListExecutionsOpts) (AdminExecutionList, *http.Response, error) { +func (a *AdminServiceApiService) GetExecutionMetrics(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *GetExecutionMetricsOpts) (AdminWorkflowExecutionGetMetricsResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminExecutionList + localVarReturnValue AdminWorkflowExecutionGetMetricsResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}" + localVarPath := a.client.cfg.BasePath + "/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}" localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { - localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + if localVarOptionals != nil && localVarOptionals.Depth.IsSet() { + localVarQueryParams.Add("depth", parameterToString(localVarOptionals.Depth.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3085,7 +2933,7 @@ func (a *AdminServiceApiService) ListExecutions(ctx context.Context, idProject s } if localVarHttpResponse.StatusCode == 200 { - var v AdminExecutionList + var v AdminWorkflowExecutionGetMetricsResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3102,60 +2950,44 @@ func (a *AdminServiceApiService) ListExecutions(ctx context.Context, idProject s } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. +AdminServiceApiService Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Name of the project that contains the identifiers. +required - * @param domain Name of the domain the identifiers belongs to within the project. +required - * @param optional nil or *ListLaunchPlanIdsOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User or system provided value for the resource. + * @param optional nil or *GetExecutionMetrics2Opts - Optional Parameters: + * @param "Depth" (optional.Int32) - depth defines the number of Flyte entity levels to traverse when breaking down execution details. -@return AdminNamedEntityIdentifierList +@return AdminWorkflowExecutionGetMetricsResponse */ -type ListLaunchPlanIdsOpts struct { - Limit optional.Int64 - Token optional.String - SortByKey optional.String - SortByDirection optional.String - Filters optional.String +type GetExecutionMetrics2Opts struct { + Depth optional.Int32 } -func (a *AdminServiceApiService) ListLaunchPlanIds(ctx context.Context, project string, domain string, localVarOptionals *ListLaunchPlanIdsOpts) (AdminNamedEntityIdentifierList, *http.Response, error) { +func (a *AdminServiceApiService) GetExecutionMetrics2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, localVarOptionals *GetExecutionMetrics2Opts) (AdminWorkflowExecutionGetMetricsResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNamedEntityIdentifierList + localVarReturnValue AdminWorkflowExecutionGetMetricsResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plan_ids/{project}/{domain}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + if localVarOptionals != nil && localVarOptionals.Depth.IsSet() { + localVarQueryParams.Add("depth", parameterToString(localVarOptionals.Depth.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3205,7 +3037,7 @@ func (a *AdminServiceApiService) ListLaunchPlanIds(ctx context.Context, project } if localVarHttpResponse.StatusCode == 200 { - var v AdminNamedEntityIdentifierList + var v AdminWorkflowExecutionGetMetricsResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3222,62 +3054,49 @@ func (a *AdminServiceApiService) ListLaunchPlanIds(ctx context.Context, project } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' - * @param optional nil or *ListLaunchPlansOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param optional nil or *GetLaunchPlanOpts - Optional Parameters: + * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminLaunchPlanList +@return AdminLaunchPlan */ -type ListLaunchPlansOpts struct { - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String +type GetLaunchPlanOpts struct { + IdResourceType optional.String + IdOrg optional.String } -func (a *AdminServiceApiService) ListLaunchPlans(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *ListLaunchPlansOpts) (AdminLaunchPlanList, *http.Response, error) { +func (a *AdminServiceApiService) GetLaunchPlan(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetLaunchPlanOpts) (AdminLaunchPlan, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminLaunchPlanList + localVarReturnValue AdminLaunchPlan ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}" + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { + localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3327,7 +3146,7 @@ func (a *AdminServiceApiService) ListLaunchPlans(ctx context.Context, idProject } if localVarHttpResponse.StatusCode == 200 { - var v AdminLaunchPlanList + var v AdminLaunchPlan err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3344,65 +3163,46 @@ func (a *AdminServiceApiService) ListLaunchPlans(ctx context.Context, idProject } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param optional nil or *ListLaunchPlans2Opts - Optional Parameters: - * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param optional nil or *GetLaunchPlan2Opts - Optional Parameters: + * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects -@return AdminLaunchPlanList +@return AdminLaunchPlan */ -type ListLaunchPlans2Opts struct { - IdName optional.String - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String +type GetLaunchPlan2Opts struct { + IdResourceType optional.String } -func (a *AdminServiceApiService) ListLaunchPlans2(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListLaunchPlans2Opts) (AdminLaunchPlanList, *http.Response, error) { +func (a *AdminServiceApiService) GetLaunchPlan2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetLaunchPlan2Opts) (AdminLaunchPlan, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminLaunchPlanList + localVarReturnValue AdminLaunchPlan ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}" + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { - localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { + localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3452,7 +3252,7 @@ func (a *AdminServiceApiService) ListLaunchPlans2(ctx context.Context, idProject } if localVarHttpResponse.StatusCode == 200 { - var v AdminLaunchPlanList + var v AdminLaunchPlan err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3469,36 +3269,44 @@ func (a *AdminServiceApiService) ListLaunchPlans2(ctx context.Context, idProject } /* -AdminServiceApiService Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. +AdminServiceApiService Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param optional nil or *ListMatchableAttributesOpts - Optional Parameters: - * @param "ResourceType" (optional.String) - +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param resourceType Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *GetNamedEntityOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminListMatchableAttributesResponse +@return AdminNamedEntity */ -type ListMatchableAttributesOpts struct { - ResourceType optional.String +type GetNamedEntityOpts struct { + IdOrg optional.String } -func (a *AdminServiceApiService) ListMatchableAttributes(ctx context.Context, localVarOptionals *ListMatchableAttributesOpts) (AdminListMatchableAttributesResponse, *http.Response, error) { +func (a *AdminServiceApiService) GetNamedEntity(ctx context.Context, resourceType string, idProject string, idDomain string, idName string, localVarOptionals *GetNamedEntityOpts) (AdminNamedEntity, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminListMatchableAttributesResponse + localVarReturnValue AdminNamedEntity ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/matchable_attributes" + localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { - localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3548,7 +3356,7 @@ func (a *AdminServiceApiService) ListMatchableAttributes(ctx context.Context, lo } if localVarHttpResponse.StatusCode == 200 { - var v AdminListMatchableAttributesResponse + var v AdminNamedEntity err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3565,63 +3373,37 @@ func (a *AdminServiceApiService) ListMatchableAttributes(ctx context.Context, lo } /* -AdminServiceApiService Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. +AdminServiceApiService Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param resourceType Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required - * @param project Name of the project that contains the identifiers. +required - * @param domain Name of the domain the identifiers belongs to within the project. - * @param optional nil or *ListNamedEntitiesOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param idOrg Optional, org key applied to the resource. + * @param resourceType Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' -@return AdminNamedEntityList +@return AdminNamedEntity */ - -type ListNamedEntitiesOpts struct { - Limit optional.Int64 - Token optional.String - SortByKey optional.String - SortByDirection optional.String - Filters optional.String -} - -func (a *AdminServiceApiService) ListNamedEntities(ctx context.Context, resourceType string, project string, domain string, localVarOptionals *ListNamedEntitiesOpts) (AdminNamedEntityList, *http.Response, error) { +func (a *AdminServiceApiService) GetNamedEntity2(ctx context.Context, idOrg string, resourceType string, idProject string, idDomain string, idName string) (AdminNamedEntity, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNamedEntityList + localVarReturnValue AdminNamedEntity ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/{resource_type}/{project}/{domain}" + localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3670,7 +3452,7 @@ func (a *AdminServiceApiService) ListNamedEntities(ctx context.Context, resource } if localVarHttpResponse.StatusCode == 200 { - var v AdminNamedEntityList + var v AdminNamedEntity err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3687,67 +3469,44 @@ func (a *AdminServiceApiService) ListNamedEntities(ctx context.Context, resource } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param workflowExecutionIdProject Name of the project the resource belongs to. - * @param workflowExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param workflowExecutionIdName User or system provided value for the resource. - * @param optional nil or *ListNodeExecutionsOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - * @param "UniqueParentId" (optional.String) - Unique identifier of the parent node in the execution +optional. + * @param idExecutionIdProject Name of the project the resource belongs to. + * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idExecutionIdName User or system provided value for the resource. + * @param idNodeId + * @param optional nil or *GetNodeExecutionOpts - Optional Parameters: + * @param "IdExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminNodeExecutionList +@return FlyteidladminNodeExecution */ -type ListNodeExecutionsOpts struct { - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String - UniqueParentId optional.String +type GetNodeExecutionOpts struct { + IdExecutionIdOrg optional.String } -func (a *AdminServiceApiService) ListNodeExecutions(ctx context.Context, workflowExecutionIdProject string, workflowExecutionIdDomain string, workflowExecutionIdName string, localVarOptionals *ListNodeExecutionsOpts) (AdminNodeExecutionList, *http.Response, error) { +func (a *AdminServiceApiService) GetNodeExecution(ctx context.Context, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string, localVarOptionals *GetNodeExecutionOpts) (FlyteidladminNodeExecution, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNodeExecutionList + localVarReturnValue FlyteidladminNodeExecution ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.project"+"}", fmt.Sprintf("%v", workflowExecutionIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.domain"+"}", fmt.Sprintf("%v", workflowExecutionIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.name"+"}", fmt.Sprintf("%v", workflowExecutionIdName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.UniqueParentId.IsSet() { - localVarQueryParams.Add("unique_parent_id", parameterToString(localVarOptionals.UniqueParentId.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdExecutionIdOrg.IsSet() { + localVarQueryParams.Add("id.execution_id.org", parameterToString(localVarOptionals.IdExecutionIdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3797,7 +3556,7 @@ func (a *AdminServiceApiService) ListNodeExecutions(ctx context.Context, workflo } if localVarHttpResponse.StatusCode == 200 { - var v AdminNodeExecutionList + var v FlyteidladminNodeExecution err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3814,80 +3573,37 @@ func (a *AdminServiceApiService) ListNodeExecutions(ctx context.Context, workflo } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param taskExecutionIdNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. - * @param taskExecutionIdNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param taskExecutionIdNodeExecutionIdExecutionIdName User or system provided value for the resource. - * @param taskExecutionIdNodeExecutionIdNodeId - * @param taskExecutionIdTaskIdProject Name of the project the resource belongs to. - * @param taskExecutionIdTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param taskExecutionIdTaskIdName User provided value for the resource. - * @param taskExecutionIdTaskIdVersion Specific version of the resource. - * @param taskExecutionIdRetryAttempt - * @param optional nil or *ListNodeExecutionsForTaskOpts - Optional Parameters: - * @param "TaskExecutionIdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idExecutionIdOrg Optional, org key applied to the resource. + * @param idExecutionIdProject Name of the project the resource belongs to. + * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idExecutionIdName User or system provided value for the resource. + * @param idNodeId -@return AdminNodeExecutionList +@return FlyteidladminNodeExecution */ - -type ListNodeExecutionsForTaskOpts struct { - TaskExecutionIdTaskIdResourceType optional.String - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListNodeExecutionsForTask(ctx context.Context, taskExecutionIdNodeExecutionIdExecutionIdProject string, taskExecutionIdNodeExecutionIdExecutionIdDomain string, taskExecutionIdNodeExecutionIdExecutionIdName string, taskExecutionIdNodeExecutionIdNodeId string, taskExecutionIdTaskIdProject string, taskExecutionIdTaskIdDomain string, taskExecutionIdTaskIdName string, taskExecutionIdTaskIdVersion string, taskExecutionIdRetryAttempt int64, localVarOptionals *ListNodeExecutionsForTaskOpts) (AdminNodeExecutionList, *http.Response, error) { +func (a *AdminServiceApiService) GetNodeExecution2(ctx context.Context, idExecutionIdOrg string, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string) (FlyteidladminNodeExecution, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNodeExecutionList + localVarReturnValue FlyteidladminNodeExecution ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}" - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdNodeId), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.project"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.domain"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.name"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.version"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdVersion), -1) - localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.retry_attempt"+"}", fmt.Sprintf("%v", taskExecutionIdRetryAttempt), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.org"+"}", fmt.Sprintf("%v", idExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.TaskExecutionIdTaskIdResourceType.IsSet() { - localVarQueryParams.Add("task_execution_id.task_id.resource_type", parameterToString(localVarOptionals.TaskExecutionIdTaskIdResourceType.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -3936,7 +3652,7 @@ func (a *AdminServiceApiService) ListNodeExecutionsForTask(ctx context.Context, } if localVarHttpResponse.StatusCode == 200 { - var v AdminNodeExecutionList + var v FlyteidladminNodeExecution err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -3953,56 +3669,44 @@ func (a *AdminServiceApiService) ListNodeExecutionsForTask(ctx context.Context, } /* -AdminServiceApiService Fetches a list of :ref:`ref_flyteidl.admin.Project` +AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param optional nil or *ListProjectsOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of projects to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idExecutionIdProject Name of the project the resource belongs to. + * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idExecutionIdName User or system provided value for the resource. + * @param idNodeId + * @param optional nil or *GetNodeExecutionDataOpts - Optional Parameters: + * @param "IdExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. -@return AdminProjects +@return AdminNodeExecutionGetDataResponse */ -type ListProjectsOpts struct { - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String +type GetNodeExecutionDataOpts struct { + IdExecutionIdOrg optional.String } -func (a *AdminServiceApiService) ListProjects(ctx context.Context, localVarOptionals *ListProjectsOpts) (AdminProjects, *http.Response, error) { +func (a *AdminServiceApiService) GetNodeExecutionData(ctx context.Context, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string, localVarOptionals *GetNodeExecutionDataOpts) (AdminNodeExecutionGetDataResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjects + localVarReturnValue AdminNodeExecutionGetDataResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/projects" + localVarPath := a.client.cfg.BasePath + "/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + if localVarOptionals != nil && localVarOptionals.IdExecutionIdOrg.IsSet() { + localVarQueryParams.Add("id.execution_id.org", parameterToString(localVarOptionals.IdExecutionIdOrg.Value(), "")) } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4052,7 +3756,7 @@ func (a *AdminServiceApiService) ListProjects(ctx context.Context, localVarOptio } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjects + var v AdminNodeExecutionGetDataResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4069,65 +3773,37 @@ func (a *AdminServiceApiService) ListProjects(ctx context.Context, localVarOptio } /* -AdminServiceApiService Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. +AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param nodeExecutionIdExecutionIdProject Name of the project the resource belongs to. - * @param nodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param nodeExecutionIdExecutionIdName User or system provided value for the resource. - * @param nodeExecutionIdNodeId - * @param optional nil or *ListTaskExecutionsOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idExecutionIdOrg Optional, org key applied to the resource. + * @param idExecutionIdProject Name of the project the resource belongs to. + * @param idExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idExecutionIdName User or system provided value for the resource. + * @param idNodeId -@return AdminTaskExecutionList +@return AdminNodeExecutionGetDataResponse */ - -type ListTaskExecutionsOpts struct { - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListTaskExecutions(ctx context.Context, nodeExecutionIdExecutionIdProject string, nodeExecutionIdExecutionIdDomain string, nodeExecutionIdExecutionIdName string, nodeExecutionIdNodeId string, localVarOptionals *ListTaskExecutionsOpts) (AdminTaskExecutionList, *http.Response, error) { +func (a *AdminServiceApiService) GetNodeExecutionData2(ctx context.Context, idExecutionIdOrg string, idExecutionIdProject string, idExecutionIdDomain string, idExecutionIdName string, idNodeId string) (AdminNodeExecutionGetDataResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Get") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminTaskExecutionList + localVarReturnValue AdminNodeExecutionGetDataResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" - localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.node_id"+"}", fmt.Sprintf("%v", nodeExecutionIdNodeId), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.org"+"}", fmt.Sprintf("%v", idExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.project"+"}", fmt.Sprintf("%v", idExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.domain"+"}", fmt.Sprintf("%v", idExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.execution_id.name"+"}", fmt.Sprintf("%v", idExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_id"+"}", fmt.Sprintf("%v", idNodeId), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4176,7 +3852,6858 @@ func (a *AdminServiceApiService) ListTaskExecutions(ctx context.Context, nodeExe } if localVarHttpResponse.StatusCode == 200 { - var v AdminTaskExecutionList + var v AdminNodeExecutionGetDataResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param project Unique project id which this set of attributes references. +required + * @param optional nil or *GetProjectAttributesOpts - Optional Parameters: + * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param "Org" (optional.String) - Optional, org key applied to the project. + +@return AdminProjectAttributesGetResponse +*/ + +type GetProjectAttributesOpts struct { + ResourceType optional.String + Org optional.String +} + +func (a *AdminServiceApiService) GetProjectAttributes(ctx context.Context, project string, localVarOptionals *GetProjectAttributesOpts) (AdminProjectAttributesGetResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjectAttributesGetResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/project_attributes/{project}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjectAttributesGetResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the project. + * @param project Unique project id which this set of attributes references. +required + * @param optional nil or *GetProjectAttributes2Opts - Optional Parameters: + * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + +@return AdminProjectAttributesGetResponse +*/ + +type GetProjectAttributes2Opts struct { + ResourceType optional.String +} + +func (a *AdminServiceApiService) GetProjectAttributes2(ctx context.Context, org string, project string, localVarOptionals *GetProjectAttributes2Opts) (AdminProjectAttributesGetResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjectAttributesGetResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/org/{org}/{project}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjectAttributesGetResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param project Unique project id which this set of attributes references. +required + * @param domain Unique domain id which this set of attributes references. +required + * @param optional nil or *GetProjectDomainAttributesOpts - Optional Parameters: + * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param "Org" (optional.String) - Optional, org key applied to the attributes. + +@return AdminProjectDomainAttributesGetResponse +*/ + +type GetProjectDomainAttributesOpts struct { + ResourceType optional.String + Org optional.String +} + +func (a *AdminServiceApiService) GetProjectDomainAttributes(ctx context.Context, project string, domain string, localVarOptionals *GetProjectDomainAttributesOpts) (AdminProjectDomainAttributesGetResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjectDomainAttributesGetResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjectDomainAttributesGetResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the attributes. + * @param project Unique project id which this set of attributes references. +required + * @param domain Unique domain id which this set of attributes references. +required + * @param optional nil or *GetProjectDomainAttributes2Opts - Optional Parameters: + * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + +@return AdminProjectDomainAttributesGetResponse +*/ + +type GetProjectDomainAttributes2Opts struct { + ResourceType optional.String +} + +func (a *AdminServiceApiService) GetProjectDomainAttributes2(ctx context.Context, org string, project string, domain string, localVarOptionals *GetProjectDomainAttributes2Opts) (AdminProjectDomainAttributesGetResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjectDomainAttributesGetResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/org/{org}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjectDomainAttributesGetResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.Task` definition. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param optional nil or *GetTaskOpts - Optional Parameters: + * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + +@return AdminTask +*/ + +type GetTaskOpts struct { + IdResourceType optional.String + IdOrg optional.String +} + +func (a *AdminServiceApiService) GetTask(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetTaskOpts) (AdminTask, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTask + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { + localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTask + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.Task` definition. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param optional nil or *GetTask2Opts - Optional Parameters: + * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + +@return AdminTask +*/ + +type GetTask2Opts struct { + IdResourceType optional.String +} + +func (a *AdminServiceApiService) GetTask2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetTask2Opts) (AdminTask, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTask + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { + localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTask + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param idNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idNodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param idNodeExecutionIdNodeId + * @param idTaskIdProject Name of the project the resource belongs to. + * @param idTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idTaskIdName User provided value for the resource. + * @param idTaskIdVersion Specific version of the resource. + * @param idRetryAttempt + * @param optional nil or *GetTaskExecutionOpts - Optional Parameters: + * @param "IdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "IdTaskIdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "IdNodeExecutionIdExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. + +@return FlyteidladminTaskExecution +*/ + +type GetTaskExecutionOpts struct { + IdTaskIdResourceType optional.String + IdTaskIdOrg optional.String + IdNodeExecutionIdExecutionIdOrg optional.String +} + +func (a *AdminServiceApiService) GetTaskExecution(ctx context.Context, idNodeExecutionIdExecutionIdProject string, idNodeExecutionIdExecutionIdDomain string, idNodeExecutionIdExecutionIdName string, idNodeExecutionIdNodeId string, idTaskIdProject string, idTaskIdDomain string, idTaskIdName string, idTaskIdVersion string, idRetryAttempt int64, localVarOptionals *GetTaskExecutionOpts) (FlyteidladminTaskExecution, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue FlyteidladminTaskExecution + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", idNodeExecutionIdNodeId), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.project"+"}", fmt.Sprintf("%v", idTaskIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.domain"+"}", fmt.Sprintf("%v", idTaskIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.name"+"}", fmt.Sprintf("%v", idTaskIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.version"+"}", fmt.Sprintf("%v", idTaskIdVersion), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.retry_attempt"+"}", fmt.Sprintf("%v", idRetryAttempt), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdTaskIdResourceType.IsSet() { + localVarQueryParams.Add("id.task_id.resource_type", parameterToString(localVarOptionals.IdTaskIdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdTaskIdOrg.IsSet() { + localVarQueryParams.Add("id.task_id.org", parameterToString(localVarOptionals.IdTaskIdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdNodeExecutionIdExecutionIdOrg.IsSet() { + localVarQueryParams.Add("id.node_execution_id.execution_id.org", parameterToString(localVarOptionals.IdNodeExecutionIdExecutionIdOrg.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v FlyteidladminTaskExecution + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idNodeExecutionIdExecutionIdOrg Optional, org key applied to the resource. + * @param idNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param idNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idNodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param idNodeExecutionIdNodeId + * @param idTaskIdProject Name of the project the resource belongs to. + * @param idTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idTaskIdName User provided value for the resource. + * @param idTaskIdVersion Specific version of the resource. + * @param idRetryAttempt + * @param optional nil or *GetTaskExecution2Opts - Optional Parameters: + * @param "IdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "IdTaskIdOrg" (optional.String) - Optional, org key applied to the resource. + +@return FlyteidladminTaskExecution +*/ + +type GetTaskExecution2Opts struct { + IdTaskIdResourceType optional.String + IdTaskIdOrg optional.String +} + +func (a *AdminServiceApiService) GetTaskExecution2(ctx context.Context, idNodeExecutionIdExecutionIdOrg string, idNodeExecutionIdExecutionIdProject string, idNodeExecutionIdExecutionIdDomain string, idNodeExecutionIdExecutionIdName string, idNodeExecutionIdNodeId string, idTaskIdProject string, idTaskIdDomain string, idTaskIdName string, idTaskIdVersion string, idRetryAttempt int64, localVarOptionals *GetTaskExecution2Opts) (FlyteidladminTaskExecution, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue FlyteidladminTaskExecution + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.org"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", idNodeExecutionIdNodeId), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.project"+"}", fmt.Sprintf("%v", idTaskIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.domain"+"}", fmt.Sprintf("%v", idTaskIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.name"+"}", fmt.Sprintf("%v", idTaskIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.version"+"}", fmt.Sprintf("%v", idTaskIdVersion), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.retry_attempt"+"}", fmt.Sprintf("%v", idRetryAttempt), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdTaskIdResourceType.IsSet() { + localVarQueryParams.Add("id.task_id.resource_type", parameterToString(localVarOptionals.IdTaskIdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdTaskIdOrg.IsSet() { + localVarQueryParams.Add("id.task_id.org", parameterToString(localVarOptionals.IdTaskIdOrg.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v FlyteidladminTaskExecution + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param idNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idNodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param idNodeExecutionIdNodeId + * @param idTaskIdProject Name of the project the resource belongs to. + * @param idTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idTaskIdName User provided value for the resource. + * @param idTaskIdVersion Specific version of the resource. + * @param idRetryAttempt + * @param optional nil or *GetTaskExecutionDataOpts - Optional Parameters: + * @param "IdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "IdTaskIdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "IdNodeExecutionIdExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. + +@return AdminTaskExecutionGetDataResponse +*/ + +type GetTaskExecutionDataOpts struct { + IdTaskIdResourceType optional.String + IdTaskIdOrg optional.String + IdNodeExecutionIdExecutionIdOrg optional.String +} + +func (a *AdminServiceApiService) GetTaskExecutionData(ctx context.Context, idNodeExecutionIdExecutionIdProject string, idNodeExecutionIdExecutionIdDomain string, idNodeExecutionIdExecutionIdName string, idNodeExecutionIdNodeId string, idTaskIdProject string, idTaskIdDomain string, idTaskIdName string, idTaskIdVersion string, idRetryAttempt int64, localVarOptionals *GetTaskExecutionDataOpts) (AdminTaskExecutionGetDataResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskExecutionGetDataResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", idNodeExecutionIdNodeId), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.project"+"}", fmt.Sprintf("%v", idTaskIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.domain"+"}", fmt.Sprintf("%v", idTaskIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.name"+"}", fmt.Sprintf("%v", idTaskIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.version"+"}", fmt.Sprintf("%v", idTaskIdVersion), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.retry_attempt"+"}", fmt.Sprintf("%v", idRetryAttempt), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdTaskIdResourceType.IsSet() { + localVarQueryParams.Add("id.task_id.resource_type", parameterToString(localVarOptionals.IdTaskIdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdTaskIdOrg.IsSet() { + localVarQueryParams.Add("id.task_id.org", parameterToString(localVarOptionals.IdTaskIdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdNodeExecutionIdExecutionIdOrg.IsSet() { + localVarQueryParams.Add("id.node_execution_id.execution_id.org", parameterToString(localVarOptionals.IdNodeExecutionIdExecutionIdOrg.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskExecutionGetDataResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idNodeExecutionIdExecutionIdOrg Optional, org key applied to the resource. + * @param idNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param idNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idNodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param idNodeExecutionIdNodeId + * @param idTaskIdProject Name of the project the resource belongs to. + * @param idTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idTaskIdName User provided value for the resource. + * @param idTaskIdVersion Specific version of the resource. + * @param idRetryAttempt + * @param optional nil or *GetTaskExecutionData2Opts - Optional Parameters: + * @param "IdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "IdTaskIdOrg" (optional.String) - Optional, org key applied to the resource. + +@return AdminTaskExecutionGetDataResponse +*/ + +type GetTaskExecutionData2Opts struct { + IdTaskIdResourceType optional.String + IdTaskIdOrg optional.String +} + +func (a *AdminServiceApiService) GetTaskExecutionData2(ctx context.Context, idNodeExecutionIdExecutionIdOrg string, idNodeExecutionIdExecutionIdProject string, idNodeExecutionIdExecutionIdDomain string, idNodeExecutionIdExecutionIdName string, idNodeExecutionIdNodeId string, idTaskIdProject string, idTaskIdDomain string, idTaskIdName string, idTaskIdVersion string, idRetryAttempt int64, localVarOptionals *GetTaskExecutionData2Opts) (AdminTaskExecutionGetDataResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskExecutionGetDataResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.org"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", idNodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", idNodeExecutionIdNodeId), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.project"+"}", fmt.Sprintf("%v", idTaskIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.domain"+"}", fmt.Sprintf("%v", idTaskIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.name"+"}", fmt.Sprintf("%v", idTaskIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.task_id.version"+"}", fmt.Sprintf("%v", idTaskIdVersion), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.retry_attempt"+"}", fmt.Sprintf("%v", idRetryAttempt), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdTaskIdResourceType.IsSet() { + localVarQueryParams.Add("id.task_id.resource_type", parameterToString(localVarOptionals.IdTaskIdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdTaskIdOrg.IsSet() { + localVarQueryParams.Add("id.task_id.org", parameterToString(localVarOptionals.IdTaskIdOrg.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskExecutionGetDataResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + +@return AdminGetVersionResponse +*/ +func (a *AdminServiceApiService) GetVersion(ctx context.Context) (AdminGetVersionResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminGetVersionResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/version" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminGetVersionResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param optional nil or *GetWorkflowOpts - Optional Parameters: + * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + +@return AdminWorkflow +*/ + +type GetWorkflowOpts struct { + IdResourceType optional.String + IdOrg optional.String +} + +func (a *AdminServiceApiService) GetWorkflow(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetWorkflowOpts) (AdminWorkflow, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflow + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { + localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflow + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param optional nil or *GetWorkflow2Opts - Optional Parameters: + * @param "IdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + +@return AdminWorkflow +*/ + +type GetWorkflow2Opts struct { + IdResourceType optional.String +} + +func (a *AdminServiceApiService) GetWorkflow2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, idVersion string, localVarOptionals *GetWorkflow2Opts) (AdminWorkflow, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflow + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdResourceType.IsSet() { + localVarQueryParams.Add("id.resource_type", parameterToString(localVarOptionals.IdResourceType.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflow + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param project Unique project id which this set of attributes references. +required + * @param domain Unique domain id which this set of attributes references. +required + * @param workflow Workflow name which this set of attributes references. +required + * @param optional nil or *GetWorkflowAttributesOpts - Optional Parameters: + * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param "Org" (optional.String) - Optional, org key applied to the attributes. + +@return AdminWorkflowAttributesGetResponse +*/ + +type GetWorkflowAttributesOpts struct { + ResourceType optional.String + Org optional.String +} + +func (a *AdminServiceApiService) GetWorkflowAttributes(ctx context.Context, project string, domain string, workflow string, localVarOptionals *GetWorkflowAttributesOpts) (AdminWorkflowAttributesGetResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflowAttributesGetResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow"+"}", fmt.Sprintf("%v", workflow), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflowAttributesGetResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the attributes. + * @param project Unique project id which this set of attributes references. +required + * @param domain Unique domain id which this set of attributes references. +required + * @param workflow Workflow name which this set of attributes references. +required + * @param optional nil or *GetWorkflowAttributes2Opts - Optional Parameters: + * @param "ResourceType" (optional.String) - Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + +@return AdminWorkflowAttributesGetResponse +*/ + +type GetWorkflowAttributes2Opts struct { + ResourceType optional.String +} + +func (a *AdminServiceApiService) GetWorkflowAttributes2(ctx context.Context, org string, project string, domain string, workflow string, localVarOptionals *GetWorkflowAttributes2Opts) (AdminWorkflowAttributesGetResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflowAttributesGetResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow"+"}", fmt.Sprintf("%v", workflow), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflowAttributesGetResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param project Name of the project that contains the identifiers. +required. + * @param domain Name of the domain the identifiers belongs to within the project. +required. + * @param optional nil or *ListActiveLaunchPlansOpts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Org" (optional.String) - Optional, org key applied to the resource. + +@return AdminLaunchPlanList +*/ + +type ListActiveLaunchPlansOpts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Org optional.String +} + +func (a *AdminServiceApiService) ListActiveLaunchPlans(ctx context.Context, project string, domain string, localVarOptionals *ListActiveLaunchPlansOpts) (AdminLaunchPlanList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminLaunchPlanList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/active_launch_plans/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminLaunchPlanList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the resource. + * @param project Name of the project that contains the identifiers. +required. + * @param domain Name of the domain the identifiers belongs to within the project. +required. + * @param optional nil or *ListActiveLaunchPlans2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminLaunchPlanList +*/ + +type ListActiveLaunchPlans2Opts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListActiveLaunchPlans2(ctx context.Context, org string, project string, domain string, localVarOptionals *ListActiveLaunchPlans2Opts) (AdminLaunchPlanList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminLaunchPlanList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/active_launch_plans/org/{org}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminLaunchPlanList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param resourceType Identifies the specific type of resource that this identifier corresponds to. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListDescriptionEntitiesOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminDescriptionEntityList +*/ + +type ListDescriptionEntitiesOpts struct { + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListDescriptionEntities(ctx context.Context, resourceType string, idProject string, idDomain string, idName string, localVarOptionals *ListDescriptionEntitiesOpts) (AdminDescriptionEntityList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminDescriptionEntityList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminDescriptionEntityList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param resourceType Identifies the specific type of resource that this identifier corresponds to. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListDescriptionEntities2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminDescriptionEntityList +*/ + +type ListDescriptionEntities2Opts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListDescriptionEntities2(ctx context.Context, idOrg string, resourceType string, idProject string, idDomain string, idName string, localVarOptionals *ListDescriptionEntities2Opts) (AdminDescriptionEntityList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminDescriptionEntityList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminDescriptionEntityList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param resourceType Identifies the specific type of resource that this identifier corresponds to. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListDescriptionEntities3Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminDescriptionEntityList +*/ + +type ListDescriptionEntities3Opts struct { + IdName optional.String + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListDescriptionEntities3(ctx context.Context, resourceType string, idProject string, idDomain string, localVarOptionals *ListDescriptionEntities3Opts) (AdminDescriptionEntityList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminDescriptionEntityList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminDescriptionEntityList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param resourceType Identifies the specific type of resource that this identifier corresponds to. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListDescriptionEntities4Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminDescriptionEntityList +*/ + +type ListDescriptionEntities4Opts struct { + IdName optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListDescriptionEntities4(ctx context.Context, idOrg string, resourceType string, idProject string, idDomain string, localVarOptionals *ListDescriptionEntities4Opts) (AdminDescriptionEntityList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminDescriptionEntityList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminDescriptionEntityList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Execution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListExecutionsOpts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminExecutionList +*/ + +type ListExecutionsOpts struct { + IdName optional.String + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListExecutions(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListExecutionsOpts) (AdminExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Execution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListExecutions2Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminExecutionList +*/ + +type ListExecutions2Opts struct { + IdName optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListExecutions2(ctx context.Context, idOrg string, idProject string, idDomain string, localVarOptionals *ListExecutions2Opts) (AdminExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. +required + * @param optional nil or *ListLaunchPlanIdsOpts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param "Org" (optional.String) - Optional, org key applied to the resource. + +@return AdminNamedEntityIdentifierList +*/ + +type ListLaunchPlanIdsOpts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String + Org optional.String +} + +func (a *AdminServiceApiService) ListLaunchPlanIds(ctx context.Context, project string, domain string, localVarOptionals *ListLaunchPlanIdsOpts) (AdminNamedEntityIdentifierList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityIdentifierList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plan_ids/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityIdentifierList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the resource. + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. +required + * @param optional nil or *ListLaunchPlanIds2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + +@return AdminNamedEntityIdentifierList +*/ + +type ListLaunchPlanIds2Opts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String +} + +func (a *AdminServiceApiService) ListLaunchPlanIds2(ctx context.Context, org string, project string, domain string, localVarOptionals *ListLaunchPlanIds2Opts) (AdminNamedEntityIdentifierList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityIdentifierList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plan_ids/org/{org}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityIdentifierList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListLaunchPlansOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminLaunchPlanList +*/ + +type ListLaunchPlansOpts struct { + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListLaunchPlans(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *ListLaunchPlansOpts) (AdminLaunchPlanList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminLaunchPlanList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminLaunchPlanList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListLaunchPlans2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminLaunchPlanList +*/ + +type ListLaunchPlans2Opts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListLaunchPlans2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, localVarOptionals *ListLaunchPlans2Opts) (AdminLaunchPlanList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminLaunchPlanList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminLaunchPlanList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListLaunchPlans3Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminLaunchPlanList +*/ + +type ListLaunchPlans3Opts struct { + IdName optional.String + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListLaunchPlans3(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListLaunchPlans3Opts) (AdminLaunchPlanList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminLaunchPlanList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminLaunchPlanList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListLaunchPlans4Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminLaunchPlanList +*/ + +type ListLaunchPlans4Opts struct { + IdName optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListLaunchPlans4(ctx context.Context, idOrg string, idProject string, idDomain string, localVarOptionals *ListLaunchPlans4Opts) (AdminLaunchPlanList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminLaunchPlanList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminLaunchPlanList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param optional nil or *ListMatchableAttributesOpts - Optional Parameters: + * @param "ResourceType" (optional.String) - +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + * @param "Org" (optional.String) - Optional, org filter applied to list project requests. + +@return AdminListMatchableAttributesResponse +*/ + +type ListMatchableAttributesOpts struct { + ResourceType optional.String + Org optional.String +} + +func (a *AdminServiceApiService) ListMatchableAttributes(ctx context.Context, localVarOptionals *ListMatchableAttributesOpts) (AdminListMatchableAttributesResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminListMatchableAttributesResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/matchable_attributes" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminListMatchableAttributesResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org filter applied to list project requests. + * @param optional nil or *ListMatchableAttributes2Opts - Optional Parameters: + * @param "ResourceType" (optional.String) - +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + +@return AdminListMatchableAttributesResponse +*/ + +type ListMatchableAttributes2Opts struct { + ResourceType optional.String +} + +func (a *AdminServiceApiService) ListMatchableAttributes2(ctx context.Context, org string, localVarOptionals *ListMatchableAttributes2Opts) (AdminListMatchableAttributesResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminListMatchableAttributesResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/matchable_attributes/org/{org}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.ResourceType.IsSet() { + localVarQueryParams.Add("resource_type", parameterToString(localVarOptionals.ResourceType.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminListMatchableAttributesResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param resourceType Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. + * @param optional nil or *ListNamedEntitiesOpts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param "Org" (optional.String) - Optional, org key applied to the resource. + +@return AdminNamedEntityList +*/ + +type ListNamedEntitiesOpts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String + Org optional.String +} + +func (a *AdminServiceApiService) ListNamedEntities(ctx context.Context, resourceType string, project string, domain string, localVarOptionals *ListNamedEntitiesOpts) (AdminNamedEntityList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/{resource_type}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the resource. + * @param resourceType Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. + * @param optional nil or *ListNamedEntities2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + +@return AdminNamedEntityList +*/ + +type ListNamedEntities2Opts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String +} + +func (a *AdminServiceApiService) ListNamedEntities2(ctx context.Context, org string, resourceType string, project string, domain string, localVarOptionals *ListNamedEntities2Opts) (AdminNamedEntityList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param workflowExecutionIdProject Name of the project the resource belongs to. + * @param workflowExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param workflowExecutionIdName User or system provided value for the resource. + * @param optional nil or *ListNodeExecutionsOpts - Optional Parameters: + * @param "WorkflowExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "UniqueParentId" (optional.String) - Unique identifier of the parent node in the execution +optional. + +@return AdminNodeExecutionList +*/ + +type ListNodeExecutionsOpts struct { + WorkflowExecutionIdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String + UniqueParentId optional.String +} + +func (a *AdminServiceApiService) ListNodeExecutions(ctx context.Context, workflowExecutionIdProject string, workflowExecutionIdDomain string, workflowExecutionIdName string, localVarOptionals *ListNodeExecutionsOpts) (AdminNodeExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNodeExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.project"+"}", fmt.Sprintf("%v", workflowExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.domain"+"}", fmt.Sprintf("%v", workflowExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.name"+"}", fmt.Sprintf("%v", workflowExecutionIdName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.WorkflowExecutionIdOrg.IsSet() { + localVarQueryParams.Add("workflow_execution_id.org", parameterToString(localVarOptionals.WorkflowExecutionIdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.UniqueParentId.IsSet() { + localVarQueryParams.Add("unique_parent_id", parameterToString(localVarOptionals.UniqueParentId.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNodeExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param workflowExecutionIdOrg Optional, org key applied to the resource. + * @param workflowExecutionIdProject Name of the project the resource belongs to. + * @param workflowExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param workflowExecutionIdName User or system provided value for the resource. + * @param optional nil or *ListNodeExecutions2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "UniqueParentId" (optional.String) - Unique identifier of the parent node in the execution +optional. + +@return AdminNodeExecutionList +*/ + +type ListNodeExecutions2Opts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String + UniqueParentId optional.String +} + +func (a *AdminServiceApiService) ListNodeExecutions2(ctx context.Context, workflowExecutionIdOrg string, workflowExecutionIdProject string, workflowExecutionIdDomain string, workflowExecutionIdName string, localVarOptionals *ListNodeExecutions2Opts) (AdminNodeExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNodeExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.org"+"}", fmt.Sprintf("%v", workflowExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.project"+"}", fmt.Sprintf("%v", workflowExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.domain"+"}", fmt.Sprintf("%v", workflowExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"workflow_execution_id.name"+"}", fmt.Sprintf("%v", workflowExecutionIdName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.UniqueParentId.IsSet() { + localVarQueryParams.Add("unique_parent_id", parameterToString(localVarOptionals.UniqueParentId.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNodeExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param taskExecutionIdNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param taskExecutionIdNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param taskExecutionIdNodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param taskExecutionIdNodeExecutionIdNodeId + * @param taskExecutionIdTaskIdProject Name of the project the resource belongs to. + * @param taskExecutionIdTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param taskExecutionIdTaskIdName User provided value for the resource. + * @param taskExecutionIdTaskIdVersion Specific version of the resource. + * @param taskExecutionIdRetryAttempt + * @param optional nil or *ListNodeExecutionsForTaskOpts - Optional Parameters: + * @param "TaskExecutionIdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "TaskExecutionIdTaskIdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "TaskExecutionIdNodeExecutionIdExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminNodeExecutionList +*/ + +type ListNodeExecutionsForTaskOpts struct { + TaskExecutionIdTaskIdResourceType optional.String + TaskExecutionIdTaskIdOrg optional.String + TaskExecutionIdNodeExecutionIdExecutionIdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListNodeExecutionsForTask(ctx context.Context, taskExecutionIdNodeExecutionIdExecutionIdProject string, taskExecutionIdNodeExecutionIdExecutionIdDomain string, taskExecutionIdNodeExecutionIdExecutionIdName string, taskExecutionIdNodeExecutionIdNodeId string, taskExecutionIdTaskIdProject string, taskExecutionIdTaskIdDomain string, taskExecutionIdTaskIdName string, taskExecutionIdTaskIdVersion string, taskExecutionIdRetryAttempt int64, localVarOptionals *ListNodeExecutionsForTaskOpts) (AdminNodeExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNodeExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}" + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdNodeId), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.project"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.domain"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.name"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.version"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdVersion), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.retry_attempt"+"}", fmt.Sprintf("%v", taskExecutionIdRetryAttempt), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.TaskExecutionIdTaskIdResourceType.IsSet() { + localVarQueryParams.Add("task_execution_id.task_id.resource_type", parameterToString(localVarOptionals.TaskExecutionIdTaskIdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.TaskExecutionIdTaskIdOrg.IsSet() { + localVarQueryParams.Add("task_execution_id.task_id.org", parameterToString(localVarOptionals.TaskExecutionIdTaskIdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.TaskExecutionIdNodeExecutionIdExecutionIdOrg.IsSet() { + localVarQueryParams.Add("task_execution_id.node_execution_id.execution_id.org", parameterToString(localVarOptionals.TaskExecutionIdNodeExecutionIdExecutionIdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNodeExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param taskExecutionIdNodeExecutionIdExecutionIdOrg Optional, org key applied to the resource. + * @param taskExecutionIdNodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param taskExecutionIdNodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param taskExecutionIdNodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param taskExecutionIdNodeExecutionIdNodeId + * @param taskExecutionIdTaskIdProject Name of the project the resource belongs to. + * @param taskExecutionIdTaskIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param taskExecutionIdTaskIdName User provided value for the resource. + * @param taskExecutionIdTaskIdVersion Specific version of the resource. + * @param taskExecutionIdRetryAttempt + * @param optional nil or *ListNodeExecutionsForTask2Opts - Optional Parameters: + * @param "TaskExecutionIdTaskIdResourceType" (optional.String) - Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + * @param "TaskExecutionIdTaskIdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminNodeExecutionList +*/ + +type ListNodeExecutionsForTask2Opts struct { + TaskExecutionIdTaskIdResourceType optional.String + TaskExecutionIdTaskIdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListNodeExecutionsForTask2(ctx context.Context, taskExecutionIdNodeExecutionIdExecutionIdOrg string, taskExecutionIdNodeExecutionIdExecutionIdProject string, taskExecutionIdNodeExecutionIdExecutionIdDomain string, taskExecutionIdNodeExecutionIdExecutionIdName string, taskExecutionIdNodeExecutionIdNodeId string, taskExecutionIdTaskIdProject string, taskExecutionIdTaskIdDomain string, taskExecutionIdTaskIdName string, taskExecutionIdTaskIdVersion string, taskExecutionIdRetryAttempt int64, localVarOptionals *ListNodeExecutionsForTask2Opts) (AdminNodeExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNodeExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}" + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.org"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.node_execution_id.node_id"+"}", fmt.Sprintf("%v", taskExecutionIdNodeExecutionIdNodeId), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.project"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.domain"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.name"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.task_id.version"+"}", fmt.Sprintf("%v", taskExecutionIdTaskIdVersion), -1) + localVarPath = strings.Replace(localVarPath, "{"+"task_execution_id.retry_attempt"+"}", fmt.Sprintf("%v", taskExecutionIdRetryAttempt), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.TaskExecutionIdTaskIdResourceType.IsSet() { + localVarQueryParams.Add("task_execution_id.task_id.resource_type", parameterToString(localVarOptionals.TaskExecutionIdTaskIdResourceType.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.TaskExecutionIdTaskIdOrg.IsSet() { + localVarQueryParams.Add("task_execution_id.task_id.org", parameterToString(localVarOptionals.TaskExecutionIdTaskIdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNodeExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches a list of :ref:`ref_flyteidl.admin.Project` + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param optional nil or *ListProjectsOpts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of projects to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Org" (optional.String) - Optional, org filter applied to list project requests. + +@return AdminProjects +*/ + +type ListProjectsOpts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String + Org optional.String +} + +func (a *AdminServiceApiService) ListProjects(ctx context.Context, localVarOptionals *ListProjectsOpts) (AdminProjects, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjects + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/projects" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjects + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches a list of :ref:`ref_flyteidl.admin.Project` + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org filter applied to list project requests. + * @param optional nil or *ListProjects2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of projects to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminProjects +*/ + +type ListProjects2Opts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListProjects2(ctx context.Context, org string, localVarOptionals *ListProjects2Opts) (AdminProjects, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjects + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/projects/org/{org}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjects + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param nodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param nodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param nodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param nodeExecutionIdNodeId + * @param optional nil or *ListTaskExecutionsOpts - Optional Parameters: + * @param "NodeExecutionIdExecutionIdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminTaskExecutionList +*/ + +type ListTaskExecutionsOpts struct { + NodeExecutionIdExecutionIdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListTaskExecutions(ctx context.Context, nodeExecutionIdExecutionIdProject string, nodeExecutionIdExecutionIdDomain string, nodeExecutionIdExecutionIdName string, nodeExecutionIdNodeId string, localVarOptionals *ListTaskExecutionsOpts) (AdminTaskExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.node_id"+"}", fmt.Sprintf("%v", nodeExecutionIdNodeId), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.NodeExecutionIdExecutionIdOrg.IsSet() { + localVarQueryParams.Add("node_execution_id.execution_id.org", parameterToString(localVarOptionals.NodeExecutionIdExecutionIdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param nodeExecutionIdExecutionIdOrg Optional, org key applied to the resource. + * @param nodeExecutionIdExecutionIdProject Name of the project the resource belongs to. + * @param nodeExecutionIdExecutionIdDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param nodeExecutionIdExecutionIdName User or system provided value for the resource. + * @param nodeExecutionIdNodeId + * @param optional nil or *ListTaskExecutions2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminTaskExecutionList +*/ + +type ListTaskExecutions2Opts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListTaskExecutions2(ctx context.Context, nodeExecutionIdExecutionIdOrg string, nodeExecutionIdExecutionIdProject string, nodeExecutionIdExecutionIdDomain string, nodeExecutionIdExecutionIdName string, nodeExecutionIdNodeId string, localVarOptionals *ListTaskExecutions2Opts) (AdminTaskExecutionList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskExecutionList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.org"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.project"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.domain"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.execution_id.name"+"}", fmt.Sprintf("%v", nodeExecutionIdExecutionIdName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"node_execution_id.node_id"+"}", fmt.Sprintf("%v", nodeExecutionIdNodeId), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskExecutionList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. +required + * @param optional nil or *ListTaskIdsOpts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param "Org" (optional.String) - Optional, org key applied to the resource. + +@return AdminNamedEntityIdentifierList +*/ + +type ListTaskIdsOpts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String + Org optional.String +} + +func (a *AdminServiceApiService) ListTaskIds(ctx context.Context, project string, domain string, localVarOptionals *ListTaskIdsOpts) (AdminNamedEntityIdentifierList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityIdentifierList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/task_ids/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityIdentifierList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the resource. + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. +required + * @param optional nil or *ListTaskIds2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + +@return AdminNamedEntityIdentifierList +*/ + +type ListTaskIds2Opts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String +} + +func (a *AdminServiceApiService) ListTaskIds2(ctx context.Context, org string, project string, domain string, localVarOptionals *ListTaskIds2Opts) (AdminNamedEntityIdentifierList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityIdentifierList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/org/{org}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityIdentifierList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListTasksOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminTaskList +*/ + +type ListTasksOpts struct { + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListTasks(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *ListTasksOpts) (AdminTaskList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListTasks2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminTaskList +*/ + +type ListTasks2Opts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListTasks2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, localVarOptionals *ListTasks2Opts) (AdminTaskList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListTasks3Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminTaskList +*/ + +type ListTasks3Opts struct { + IdName optional.String + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListTasks3(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListTasks3Opts) (AdminTaskList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListTasks4Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminTaskList +*/ + +type ListTasks4Opts struct { + IdName optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListTasks4(ctx context.Context, idOrg string, idProject string, idDomain string, localVarOptionals *ListTasks4Opts) (AdminTaskList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminTaskList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminTaskList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. +required + * @param optional nil or *ListWorkflowIdsOpts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param "Org" (optional.String) - Optional, org key applied to the resource. + +@return AdminNamedEntityIdentifierList +*/ + +type ListWorkflowIdsOpts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String + Org optional.String +} + +func (a *AdminServiceApiService) ListWorkflowIds(ctx context.Context, project string, domain string, localVarOptionals *ListWorkflowIdsOpts) (AdminNamedEntityIdentifierList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityIdentifierList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_ids/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Org.IsSet() { + localVarQueryParams.Add("org", parameterToString(localVarOptionals.Org.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityIdentifierList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param org Optional, org key applied to the resource. + * @param project Name of the project that contains the identifiers. +required + * @param domain Name of the domain the identifiers belongs to within the project. +required + * @param optional nil or *ListWorkflowIds2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + +@return AdminNamedEntityIdentifierList +*/ + +type ListWorkflowIds2Opts struct { + Limit optional.Int64 + Token optional.String + SortByKey optional.String + SortByDirection optional.String + Filters optional.String +} + +func (a *AdminServiceApiService) ListWorkflowIds2(ctx context.Context, org string, project string, domain string, localVarOptionals *ListWorkflowIds2Opts) (AdminNamedEntityIdentifierList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminNamedEntityIdentifierList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/org/{org}/{project}/{domain}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminNamedEntityIdentifierList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListWorkflowsOpts - Optional Parameters: + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminWorkflowList +*/ + +type ListWorkflowsOpts struct { + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListWorkflows(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *ListWorkflowsOpts) (AdminWorkflowList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflowList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflowList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param optional nil or *ListWorkflows2Opts - Optional Parameters: + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminWorkflowList +*/ + +type ListWorkflows2Opts struct { + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListWorkflows2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, localVarOptionals *ListWorkflows2Opts) (AdminWorkflowList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflowList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflowList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListWorkflows3Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "IdOrg" (optional.String) - Optional, org key applied to the resource. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminWorkflowList +*/ + +type ListWorkflows3Opts struct { + IdName optional.String + IdOrg optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListWorkflows3(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListWorkflows3Opts) (AdminWorkflowList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflowList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.IdOrg.IsSet() { + localVarQueryParams.Add("id.org", parameterToString(localVarOptionals.IdOrg.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflowList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param optional nil or *ListWorkflows4Opts - Optional Parameters: + * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. + * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. + * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. + * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + +@return AdminWorkflowList +*/ + +type ListWorkflows4Opts struct { + IdName optional.String + Limit optional.Int64 + Token optional.String + Filters optional.String + SortByKey optional.String + SortByDirection optional.String +} + +func (a *AdminServiceApiService) ListWorkflows4(ctx context.Context, idOrg string, idProject string, idDomain string, localVarOptionals *ListWorkflows4Opts) (AdminWorkflowList, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminWorkflowList + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { + localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { + localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Token.IsSet() { + localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { + localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { + localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) + } + if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { + localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) + } + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminWorkflowList + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param body + +@return AdminExecutionCreateResponse +*/ +func (a *AdminServiceApiService) RecoverExecution(ctx context.Context, body AdminExecutionRecoverRequest) (AdminExecutionCreateResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminExecutionCreateResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/recover" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminExecutionCreateResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param body + +@return AdminExecutionCreateResponse +*/ +func (a *AdminServiceApiService) RecoverExecution2(ctx context.Context, idOrg string, body AdminExecutionRecoverRequest) (AdminExecutionCreateResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminExecutionCreateResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/org/{id.org}/recover" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminExecutionCreateResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param body + +@return AdminProjectRegisterResponse +*/ +func (a *AdminServiceApiService) RegisterProject(ctx context.Context, body AdminProjectRegisterRequest) (AdminProjectRegisterResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjectRegisterResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/projects" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjectRegisterResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param projectOrg Optional, org key applied to the resource. + * @param body + +@return AdminProjectRegisterResponse +*/ +func (a *AdminServiceApiService) RegisterProject2(ctx context.Context, projectOrg string, body AdminProjectRegisterRequest) (AdminProjectRegisterResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminProjectRegisterResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/projects/org/{project.org}" + localVarPath = strings.Replace(localVarPath, "{"+"project.org"+"}", fmt.Sprintf("%v", projectOrg), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminProjectRegisterResponse + err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHttpResponse, newErr + } + newErr.model = v + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, newErr + } + + return localVarReturnValue, localVarHttpResponse, nil +} + +/* +AdminServiceApiService Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` + * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param body + +@return AdminExecutionCreateResponse +*/ +func (a *AdminServiceApiService) RelaunchExecution(ctx context.Context, body AdminExecutionRelaunchRequest) (AdminExecutionCreateResponse, *http.Response, error) { + var ( + localVarHttpMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + localVarReturnValue AdminExecutionCreateResponse + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/relaunch" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + // to determine the Content-Type header + localVarHttpContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHttpContentType := selectHeaderContentType(localVarHttpContentTypes) + if localVarHttpContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHttpContentType + } + + // to determine the Accept header + localVarHttpHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHttpHeaderAccept := selectHeaderAccept(localVarHttpHeaderAccepts) + if localVarHttpHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHttpHeaderAccept + } + // body params + localVarPostBody = &body + r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHttpResponse, err := a.client.callAPI(r) + if err != nil || localVarHttpResponse == nil { + return localVarReturnValue, localVarHttpResponse, err + } + + localVarBody, err := ioutil.ReadAll(localVarHttpResponse.Body) + localVarHttpResponse.Body.Close() + if err != nil { + return localVarReturnValue, localVarHttpResponse, err + } + + if localVarHttpResponse.StatusCode < 300 { + // If we succeed, return the data, otherwise pass on to decode error. + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); + if err == nil { + return localVarReturnValue, localVarHttpResponse, err + } + } + + if localVarHttpResponse.StatusCode >= 300 { + newErr := GenericSwaggerError{ + body: localVarBody, + error: localVarHttpResponse.Status, + } + + if localVarHttpResponse.StatusCode == 200 { + var v AdminExecutionCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4193,61 +10720,30 @@ func (a *AdminServiceApiService) ListTaskExecutions(ctx context.Context, nodeExe } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. +AdminServiceApiService Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Name of the project that contains the identifiers. +required - * @param domain Name of the domain the identifiers belongs to within the project. +required - * @param optional nil or *ListTaskIdsOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param idOrg Optional, org key applied to the resource. + * @param body -@return AdminNamedEntityIdentifierList +@return AdminExecutionCreateResponse */ - -type ListTaskIdsOpts struct { - Limit optional.Int64 - Token optional.String - SortByKey optional.String - SortByDirection optional.String - Filters optional.String -} - -func (a *AdminServiceApiService) ListTaskIds(ctx context.Context, project string, domain string, localVarOptionals *ListTaskIdsOpts) (AdminNamedEntityIdentifierList, *http.Response, error) { +func (a *AdminServiceApiService) RelaunchExecution2(ctx context.Context, idOrg string, body AdminExecutionRelaunchRequest) (AdminExecutionCreateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Post") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNamedEntityIdentifierList + localVarReturnValue AdminExecutionCreateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/task_ids/{project}/{domain}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/org/{id.org}/relaunch" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4265,6 +10761,8 @@ func (a *AdminServiceApiService) ListTaskIds(ctx context.Context, project string if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -4296,7 +10794,7 @@ func (a *AdminServiceApiService) ListTaskIds(ctx context.Context, project string } if localVarHttpResponse.StatusCode == 200 { - var v AdminNamedEntityIdentifierList + var v AdminExecutionCreateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4313,40 +10811,26 @@ func (a *AdminServiceApiService) ListTaskIds(ctx context.Context, project string } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +AdminServiceApiService Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' - * @param optional nil or *ListTasksOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User or system provided value for the resource. + * @param body -@return AdminTaskList +@return AdminExecutionTerminateResponse */ - -type ListTasksOpts struct { - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListTasks(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *ListTasksOpts) (AdminTaskList, *http.Response, error) { +func (a *AdminServiceApiService) TerminateExecution(ctx context.Context, idProject string, idDomain string, idName string, body AdminExecutionTerminateRequest) (AdminExecutionTerminateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminTaskList + localVarReturnValue AdminExecutionTerminateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/{id.project}/{id.domain}/{id.name}" + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}/{id.name}" localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) @@ -4355,21 +10839,6 @@ func (a *AdminServiceApiService) ListTasks(ctx context.Context, idProject string localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4387,6 +10856,8 @@ func (a *AdminServiceApiService) ListTasks(ctx context.Context, idProject string if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -4418,7 +10889,7 @@ func (a *AdminServiceApiService) ListTasks(ctx context.Context, idProject string } if localVarHttpResponse.StatusCode == 200 { - var v AdminTaskList + var v AdminExecutionTerminateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4435,66 +10906,36 @@ func (a *AdminServiceApiService) ListTasks(ctx context.Context, idProject string } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +AdminServiceApiService Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param optional nil or *ListTasks2Opts - Optional Parameters: - * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User or system provided value for the resource. + * @param body -@return AdminTaskList +@return AdminExecutionTerminateResponse */ - -type ListTasks2Opts struct { - IdName optional.String - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListTasks2(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListTasks2Opts) (AdminTaskList, *http.Response, error) { +func (a *AdminServiceApiService) TerminateExecution2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, body AdminExecutionTerminateRequest) (AdminExecutionTerminateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Delete") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminTaskList + localVarReturnValue AdminExecutionTerminateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/tasks/{id.project}/{id.domain}" + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { - localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4512,6 +10953,8 @@ func (a *AdminServiceApiService) ListTasks2(ctx context.Context, idProject strin if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -4543,7 +10986,7 @@ func (a *AdminServiceApiService) ListTasks2(ctx context.Context, idProject strin } if localVarHttpResponse.StatusCode == 200 { - var v AdminTaskList + var v AdminExecutionTerminateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4560,61 +11003,34 @@ func (a *AdminServiceApiService) ListTasks2(ctx context.Context, idProject strin } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. +AdminServiceApiService Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param project Name of the project that contains the identifiers. +required - * @param domain Name of the domain the identifiers belongs to within the project. +required - * @param optional nil or *ListWorkflowIdsOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. +optional. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User or system provided value for the resource. + * @param body -@return AdminNamedEntityIdentifierList +@return AdminExecutionUpdateResponse */ - -type ListWorkflowIdsOpts struct { - Limit optional.Int64 - Token optional.String - SortByKey optional.String - SortByDirection optional.String - Filters optional.String -} - -func (a *AdminServiceApiService) ListWorkflowIds(ctx context.Context, project string, domain string, localVarOptionals *ListWorkflowIdsOpts) (AdminNamedEntityIdentifierList, *http.Response, error) { +func (a *AdminServiceApiService) UpdateExecution(ctx context.Context, idProject string, idDomain string, idName string, body AdminExecutionUpdateRequest) (AdminExecutionUpdateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNamedEntityIdentifierList + localVarReturnValue AdminExecutionUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_ids/{project}/{domain}" - localVarPath = strings.Replace(localVarPath, "{"+"project"+"}", fmt.Sprintf("%v", project), -1) - localVarPath = strings.Replace(localVarPath, "{"+"domain"+"}", fmt.Sprintf("%v", domain), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4632,6 +11048,8 @@ func (a *AdminServiceApiService) ListWorkflowIds(ctx context.Context, project st if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -4663,7 +11081,7 @@ func (a *AdminServiceApiService) ListWorkflowIds(ctx context.Context, project st } if localVarHttpResponse.StatusCode == 200 { - var v AdminNamedEntityIdentifierList + var v AdminExecutionUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4680,63 +11098,36 @@ func (a *AdminServiceApiService) ListWorkflowIds(ctx context.Context, project st } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +AdminServiceApiService Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' - * @param optional nil or *ListWorkflowsOpts - Optional Parameters: - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User or system provided value for the resource. + * @param body -@return AdminWorkflowList +@return AdminExecutionUpdateResponse */ - -type ListWorkflowsOpts struct { - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListWorkflows(ctx context.Context, idProject string, idDomain string, idName string, localVarOptionals *ListWorkflowsOpts) (AdminWorkflowList, *http.Response, error) { +func (a *AdminServiceApiService) UpdateExecution2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, body AdminExecutionUpdateRequest) (AdminExecutionUpdateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowList + localVarReturnValue AdminExecutionUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/{id.project}/{id.domain}/{id.name}" + localVarPath := a.client.cfg.BasePath + "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) - - localVarHeaderParams := make(map[string]string) - localVarQueryParams := url.Values{} - localVarFormParams := url.Values{} - - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4754,6 +11145,8 @@ func (a *AdminServiceApiService) ListWorkflows(ctx context.Context, idProject st if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -4785,7 +11178,7 @@ func (a *AdminServiceApiService) ListWorkflows(ctx context.Context, idProject st } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowList + var v AdminExecutionUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4802,66 +11195,36 @@ func (a *AdminServiceApiService) ListWorkflows(ctx context.Context, idProject st } /* -AdminServiceApiService Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +AdminServiceApiService Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param idProject Name of the project the resource belongs to. * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param optional nil or *ListWorkflows2Opts - Optional Parameters: - * @param "IdName" (optional.String) - User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - * @param "Limit" (optional.Int64) - Indicates the number of resources to be returned. +required. - * @param "Token" (optional.String) - In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - * @param "Filters" (optional.String) - Indicates a list of filters passed as string. More info on constructing filters : <Link> +optional. - * @param "SortByKey" (optional.String) - Indicates an attribute to sort the response values. +required. - * @param "SortByDirection" (optional.String) - Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. + * @param body -@return AdminWorkflowList +@return AdminLaunchPlanUpdateResponse */ - -type ListWorkflows2Opts struct { - IdName optional.String - Limit optional.Int64 - Token optional.String - Filters optional.String - SortByKey optional.String - SortByDirection optional.String -} - -func (a *AdminServiceApiService) ListWorkflows2(ctx context.Context, idProject string, idDomain string, localVarOptionals *ListWorkflows2Opts) (AdminWorkflowList, *http.Response, error) { +func (a *AdminServiceApiService) UpdateLaunchPlan(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, body AdminLaunchPlanUpdateRequest) (AdminLaunchPlanUpdateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Get") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminWorkflowList + localVarReturnValue AdminLaunchPlanUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflows/{id.project}/{id.domain}" + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} localVarFormParams := url.Values{} - if localVarOptionals != nil && localVarOptionals.IdName.IsSet() { - localVarQueryParams.Add("id.name", parameterToString(localVarOptionals.IdName.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Limit.IsSet() { - localVarQueryParams.Add("limit", parameterToString(localVarOptionals.Limit.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Token.IsSet() { - localVarQueryParams.Add("token", parameterToString(localVarOptionals.Token.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.Filters.IsSet() { - localVarQueryParams.Add("filters", parameterToString(localVarOptionals.Filters.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByKey.IsSet() { - localVarQueryParams.Add("sort_by.key", parameterToString(localVarOptionals.SortByKey.Value(), "")) - } - if localVarOptionals != nil && localVarOptionals.SortByDirection.IsSet() { - localVarQueryParams.Add("sort_by.direction", parameterToString(localVarOptionals.SortByDirection.Value(), "")) - } // to determine the Content-Type header localVarHttpContentTypes := []string{"application/json"} @@ -4879,6 +11242,8 @@ func (a *AdminServiceApiService) ListWorkflows2(ctx context.Context, idProject s if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } + // body params + localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -4910,7 +11275,7 @@ func (a *AdminServiceApiService) ListWorkflows2(ctx context.Context, idProject s } if localVarHttpResponse.StatusCode == 200 { - var v AdminWorkflowList + var v AdminLaunchPlanUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -4927,23 +11292,32 @@ func (a *AdminServiceApiService) ListWorkflows2(ctx context.Context, idProject s } /* -AdminServiceApiService Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. +AdminServiceApiService Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param body + * @param idOrg Optional, org key applied to the resource. + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. + * @param idVersion Specific version of the resource. -@return AdminExecutionCreateResponse +@return AdminLaunchPlanUpdateResponse */ -func (a *AdminServiceApiService) RecoverExecution(ctx context.Context, body AdminExecutionRecoverRequest) (AdminExecutionCreateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateLaunchPlan2(ctx context.Context, idOrg string, idProject string, idDomain string, idName string, idVersion string) (AdminLaunchPlanUpdateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Post") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminExecutionCreateResponse + localVarReturnValue AdminLaunchPlanUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/executions/recover" + localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -4966,8 +11340,6 @@ func (a *AdminServiceApiService) RecoverExecution(ctx context.Context, body Admi if localVarHttpHeaderAccept != "" { localVarHeaderParams["Accept"] = localVarHttpHeaderAccept } - // body params - localVarPostBody = &body r, err := a.client.prepareRequest(ctx, localVarPath, localVarHttpMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) if err != nil { return localVarReturnValue, nil, err @@ -4999,7 +11371,7 @@ func (a *AdminServiceApiService) RecoverExecution(ctx context.Context, body Admi } if localVarHttpResponse.StatusCode == 200 { - var v AdminExecutionCreateResponse + var v AdminLaunchPlanUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5016,23 +11388,31 @@ func (a *AdminServiceApiService) RecoverExecution(ctx context.Context, body Admi } /* -AdminServiceApiService Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. +AdminServiceApiService Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param resourceType Resource type of the metadata to update +required + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' * @param body -@return AdminProjectRegisterResponse +@return AdminNamedEntityUpdateResponse */ -func (a *AdminServiceApiService) RegisterProject(ctx context.Context, body AdminProjectRegisterRequest) (AdminProjectRegisterResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateNamedEntity(ctx context.Context, resourceType string, idProject string, idDomain string, idName string, body AdminNamedEntityUpdateRequest) (AdminNamedEntityUpdateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Post") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectRegisterResponse + localVarReturnValue AdminNamedEntityUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/projects" + localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5088,7 +11468,7 @@ func (a *AdminServiceApiService) RegisterProject(ctx context.Context, body Admin } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectRegisterResponse + var v AdminNamedEntityUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5105,23 +11485,33 @@ func (a *AdminServiceApiService) RegisterProject(ctx context.Context, body Admin } /* -AdminServiceApiService Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` +AdminServiceApiService Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param idOrg Optional, org key applied to the resource. + * @param resourceType Resource type of the metadata to update +required + * @param idProject Name of the project the resource belongs to. + * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. + * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' * @param body -@return AdminExecutionCreateResponse +@return AdminNamedEntityUpdateResponse */ -func (a *AdminServiceApiService) RelaunchExecution(ctx context.Context, body AdminExecutionRelaunchRequest) (AdminExecutionCreateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateNamedEntity2(ctx context.Context, idOrg string, resourceType string, idProject string, idDomain string, idName string, body AdminNamedEntityUpdateRequest) (AdminNamedEntityUpdateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Post") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminExecutionCreateResponse + localVarReturnValue AdminNamedEntityUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/executions/relaunch" + localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}" + localVarPath = strings.Replace(localVarPath, "{"+"id.org"+"}", fmt.Sprintf("%v", idOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5177,7 +11567,7 @@ func (a *AdminServiceApiService) RelaunchExecution(ctx context.Context, body Adm } if localVarHttpResponse.StatusCode == 200 { - var v AdminExecutionCreateResponse + var v AdminNamedEntityUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5194,29 +11584,25 @@ func (a *AdminServiceApiService) RelaunchExecution(ctx context.Context, body Adm } /* -AdminServiceApiService Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. +AdminServiceApiService Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User or system provided value for the resource. + * @param id Globally unique project name. * @param body -@return AdminExecutionTerminateResponse +@return AdminProjectUpdateResponse */ -func (a *AdminServiceApiService) TerminateExecution(ctx context.Context, idProject string, idDomain string, idName string, body AdminExecutionTerminateRequest) (AdminExecutionTerminateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateProject(ctx context.Context, id string, body AdminProject) (AdminProjectUpdateResponse, *http.Response, error) { var ( - localVarHttpMethod = strings.ToUpper("Delete") + localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminExecutionTerminateResponse + localVarReturnValue AdminProjectUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/projects/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", fmt.Sprintf("%v", id), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5272,7 +11658,7 @@ func (a *AdminServiceApiService) TerminateExecution(ctx context.Context, idProje } if localVarHttpResponse.StatusCode == 200 { - var v AdminExecutionTerminateResponse + var v AdminProjectUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5289,29 +11675,27 @@ func (a *AdminServiceApiService) TerminateExecution(ctx context.Context, idProje } /* -AdminServiceApiService Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. +AdminServiceApiService Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User or system provided value for the resource. + * @param org Optional, org key applied to the resource. + * @param id Globally unique project name. * @param body -@return AdminExecutionUpdateResponse +@return AdminProjectUpdateResponse */ -func (a *AdminServiceApiService) UpdateExecution(ctx context.Context, idProject string, idDomain string, idName string, body AdminExecutionUpdateRequest) (AdminExecutionUpdateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateProject2(ctx context.Context, org string, id string, body AdminProject) (AdminProjectUpdateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminExecutionUpdateResponse + localVarReturnValue AdminProjectUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/executions/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/projects/org/{org}/{id}" + localVarPath = strings.Replace(localVarPath, "{"+"org"+"}", fmt.Sprintf("%v", org), -1) + localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", fmt.Sprintf("%v", id), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5367,7 +11751,7 @@ func (a *AdminServiceApiService) UpdateExecution(ctx context.Context, idProject } if localVarHttpResponse.StatusCode == 200 { - var v AdminExecutionUpdateResponse + var v AdminProjectUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5384,31 +11768,25 @@ func (a *AdminServiceApiService) UpdateExecution(ctx context.Context, idProject } /* -AdminServiceApiService Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. +AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. - * @param idVersion Specific version of the resource. + * @param attributesProject Unique project id for which this set of attributes will be applied. * @param body -@return AdminLaunchPlanUpdateResponse +@return AdminProjectAttributesUpdateResponse */ -func (a *AdminServiceApiService) UpdateLaunchPlan(ctx context.Context, idProject string, idDomain string, idName string, idVersion string, body AdminLaunchPlanUpdateRequest) (AdminLaunchPlanUpdateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateProjectAttributes(ctx context.Context, attributesProject string, body AdminProjectAttributesUpdateRequest) (AdminProjectAttributesUpdateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminLaunchPlanUpdateResponse + localVarReturnValue AdminProjectAttributesUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.version"+"}", fmt.Sprintf("%v", idVersion), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/project_attributes/{attributes.project}" + localVarPath = strings.Replace(localVarPath, "{"+"attributes.project"+"}", fmt.Sprintf("%v", attributesProject), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5464,7 +11842,7 @@ func (a *AdminServiceApiService) UpdateLaunchPlan(ctx context.Context, idProject } if localVarHttpResponse.StatusCode == 200 { - var v AdminLaunchPlanUpdateResponse + var v AdminProjectAttributesUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5481,31 +11859,27 @@ func (a *AdminServiceApiService) UpdateLaunchPlan(ctx context.Context, idProject } /* -AdminServiceApiService Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. +AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param resourceType Resource type of the metadata to update +required - * @param idProject Name of the project the resource belongs to. - * @param idDomain Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. - * @param idName User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' + * @param attributesOrg Optional, org key applied to the project. + * @param attributesProject Unique project id for which this set of attributes will be applied. * @param body -@return AdminNamedEntityUpdateResponse +@return AdminProjectAttributesUpdateResponse */ -func (a *AdminServiceApiService) UpdateNamedEntity(ctx context.Context, resourceType string, idProject string, idDomain string, idName string, body AdminNamedEntityUpdateRequest) (AdminNamedEntityUpdateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateProjectAttributes2(ctx context.Context, attributesOrg string, attributesProject string, body AdminProjectAttributesUpdateRequest) (AdminProjectAttributesUpdateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminNamedEntityUpdateResponse + localVarReturnValue AdminProjectAttributesUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" - localVarPath = strings.Replace(localVarPath, "{"+"resource_type"+"}", fmt.Sprintf("%v", resourceType), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.project"+"}", fmt.Sprintf("%v", idProject), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.domain"+"}", fmt.Sprintf("%v", idDomain), -1) - localVarPath = strings.Replace(localVarPath, "{"+"id.name"+"}", fmt.Sprintf("%v", idName), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}" + localVarPath = strings.Replace(localVarPath, "{"+"attributes.org"+"}", fmt.Sprintf("%v", attributesOrg), -1) + localVarPath = strings.Replace(localVarPath, "{"+"attributes.project"+"}", fmt.Sprintf("%v", attributesProject), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5561,7 +11935,7 @@ func (a *AdminServiceApiService) UpdateNamedEntity(ctx context.Context, resource } if localVarHttpResponse.StatusCode == 200 { - var v AdminNamedEntityUpdateResponse + var v AdminProjectAttributesUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5578,25 +11952,27 @@ func (a *AdminServiceApiService) UpdateNamedEntity(ctx context.Context, resource } /* -AdminServiceApiService Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. +AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). - * @param id Globally unique project name. + * @param attributesProject Unique project id for which this set of attributes will be applied. + * @param attributesDomain Unique domain id for which this set of attributes will be applied. * @param body -@return AdminProjectUpdateResponse +@return AdminProjectDomainAttributesUpdateResponse */ -func (a *AdminServiceApiService) UpdateProject(ctx context.Context, id string, body AdminProject) (AdminProjectUpdateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateProjectDomainAttributes(ctx context.Context, attributesProject string, attributesDomain string, body AdminProjectDomainAttributesUpdateRequest) (AdminProjectDomainAttributesUpdateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectUpdateResponse + localVarReturnValue AdminProjectDomainAttributesUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/projects/{id}" - localVarPath = strings.Replace(localVarPath, "{"+"id"+"}", fmt.Sprintf("%v", id), -1) + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"attributes.project"+"}", fmt.Sprintf("%v", attributesProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"attributes.domain"+"}", fmt.Sprintf("%v", attributesDomain), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5652,7 +12028,7 @@ func (a *AdminServiceApiService) UpdateProject(ctx context.Context, id string, b } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectUpdateResponse + var v AdminProjectDomainAttributesUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5669,25 +12045,29 @@ func (a *AdminServiceApiService) UpdateProject(ctx context.Context, id string, b } /* -AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level +AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param attributesOrg Optional, org key applied to the attributes. * @param attributesProject Unique project id for which this set of attributes will be applied. + * @param attributesDomain Unique domain id for which this set of attributes will be applied. * @param body -@return AdminProjectAttributesUpdateResponse +@return AdminProjectDomainAttributesUpdateResponse */ -func (a *AdminServiceApiService) UpdateProjectAttributes(ctx context.Context, attributesProject string, body AdminProjectAttributesUpdateRequest) (AdminProjectAttributesUpdateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateProjectDomainAttributes2(ctx context.Context, attributesOrg string, attributesProject string, attributesDomain string, body AdminProjectDomainAttributesUpdateRequest) (AdminProjectDomainAttributesUpdateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectAttributesUpdateResponse + localVarReturnValue AdminProjectDomainAttributesUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/project_attributes/{attributes.project}" + localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}" + localVarPath = strings.Replace(localVarPath, "{"+"attributes.org"+"}", fmt.Sprintf("%v", attributesOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"attributes.project"+"}", fmt.Sprintf("%v", attributesProject), -1) + localVarPath = strings.Replace(localVarPath, "{"+"attributes.domain"+"}", fmt.Sprintf("%v", attributesDomain), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5743,7 +12123,7 @@ func (a *AdminServiceApiService) UpdateProjectAttributes(ctx context.Context, at } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectAttributesUpdateResponse + var v AdminProjectDomainAttributesUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5760,27 +12140,29 @@ func (a *AdminServiceApiService) UpdateProjectAttributes(ctx context.Context, at } /* -AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). * @param attributesProject Unique project id for which this set of attributes will be applied. * @param attributesDomain Unique domain id for which this set of attributes will be applied. + * @param attributesWorkflow Workflow name for which this set of attributes will be applied. * @param body -@return AdminProjectDomainAttributesUpdateResponse +@return AdminWorkflowAttributesUpdateResponse */ -func (a *AdminServiceApiService) UpdateProjectDomainAttributes(ctx context.Context, attributesProject string, attributesDomain string, body AdminProjectDomainAttributesUpdateRequest) (AdminProjectDomainAttributesUpdateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateWorkflowAttributes(ctx context.Context, attributesProject string, attributesDomain string, attributesWorkflow string, body AdminWorkflowAttributesUpdateRequest) (AdminWorkflowAttributesUpdateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} localVarFileName string localVarFileBytes []byte - localVarReturnValue AdminProjectDomainAttributesUpdateResponse + localVarReturnValue AdminWorkflowAttributesUpdateResponse ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}" + localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}" localVarPath = strings.Replace(localVarPath, "{"+"attributes.project"+"}", fmt.Sprintf("%v", attributesProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"attributes.domain"+"}", fmt.Sprintf("%v", attributesDomain), -1) + localVarPath = strings.Replace(localVarPath, "{"+"attributes.workflow"+"}", fmt.Sprintf("%v", attributesWorkflow), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -5836,7 +12218,7 @@ func (a *AdminServiceApiService) UpdateProjectDomainAttributes(ctx context.Conte } if localVarHttpResponse.StatusCode == 200 { - var v AdminProjectDomainAttributesUpdateResponse + var v AdminWorkflowAttributesUpdateResponse err = a.client.decode(&v, localVarBody, localVarHttpResponse.Header.Get("Content-Type")); if err != nil { newErr.error = err.Error() @@ -5855,6 +12237,7 @@ func (a *AdminServiceApiService) UpdateProjectDomainAttributes(ctx context.Conte /* AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. * @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + * @param attributesOrg Optional, org key applied to the attributes. * @param attributesProject Unique project id for which this set of attributes will be applied. * @param attributesDomain Unique domain id for which this set of attributes will be applied. * @param attributesWorkflow Workflow name for which this set of attributes will be applied. @@ -5862,7 +12245,7 @@ AdminServiceApiService Creates or updates custom :ref:`ref_flyteidl.admin.M @return AdminWorkflowAttributesUpdateResponse */ -func (a *AdminServiceApiService) UpdateWorkflowAttributes(ctx context.Context, attributesProject string, attributesDomain string, attributesWorkflow string, body AdminWorkflowAttributesUpdateRequest) (AdminWorkflowAttributesUpdateResponse, *http.Response, error) { +func (a *AdminServiceApiService) UpdateWorkflowAttributes2(ctx context.Context, attributesOrg string, attributesProject string, attributesDomain string, attributesWorkflow string, body AdminWorkflowAttributesUpdateRequest) (AdminWorkflowAttributesUpdateResponse, *http.Response, error) { var ( localVarHttpMethod = strings.ToUpper("Put") localVarPostBody interface{} @@ -5872,7 +12255,8 @@ func (a *AdminServiceApiService) UpdateWorkflowAttributes(ctx context.Context, a ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}" + localVarPath := a.client.cfg.BasePath + "/api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow}" + localVarPath = strings.Replace(localVarPath, "{"+"attributes.org"+"}", fmt.Sprintf("%v", attributesOrg), -1) localVarPath = strings.Replace(localVarPath, "{"+"attributes.project"+"}", fmt.Sprintf("%v", attributesProject), -1) localVarPath = strings.Replace(localVarPath, "{"+"attributes.domain"+"}", fmt.Sprintf("%v", attributesDomain), -1) localVarPath = strings.Replace(localVarPath, "{"+"attributes.workflow"+"}", fmt.Sprintf("%v", attributesWorkflow), -1) diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_dynamic_node_workflow_response.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_dynamic_node_workflow_response.go new file mode 100644 index 0000000000..a431f23d17 --- /dev/null +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_dynamic_node_workflow_response.go @@ -0,0 +1,14 @@ +/* + * flyteidl/service/admin.proto + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * API version: version not set + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package flyteadmin + +type AdminDynamicNodeWorkflowResponse struct { + CompiledWorkflow *CoreCompiledWorkflowClosure `json:"compiled_workflow,omitempty"` +} diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_execution_create_request.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_execution_create_request.go index b8da1d4fca..c7eff0aded 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_execution_create_request.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_execution_create_request.go @@ -16,4 +16,6 @@ type AdminExecutionCreateRequest struct { Name string `json:"name,omitempty"` Spec *AdminExecutionSpec `json:"spec,omitempty"` Inputs *CoreLiteralMap `json:"inputs,omitempty"` + // Optional, org key applied to the resource. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_matchable_attributes_configuration.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_matchable_attributes_configuration.go index df6a99884c..a9381abc9b 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_matchable_attributes_configuration.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_matchable_attributes_configuration.go @@ -9,11 +9,13 @@ package flyteadmin -// Represents a custom set of attributes applied for either a domain; a domain and project; or domain, project and workflow name. These are used to override system level defaults for kubernetes cluster resource management, default execution values, and more all across different levels of specificity. +// Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org); or domain, project and workflow name (and optional org). These are used to override system level defaults for kubernetes cluster resource management, default execution values, and more all across different levels of specificity. type AdminMatchableAttributesConfiguration struct { Attributes *AdminMatchingAttributes `json:"attributes,omitempty"` Domain string `json:"domain,omitempty"` Project string `json:"project,omitempty"` Workflow string `json:"workflow,omitempty"` LaunchPlan string `json:"launch_plan,omitempty"` + // Optional, org key applied to the resource. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_named_entity_identifier.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_named_entity_identifier.go index 9fa96c57b2..ac55ee330b 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_named_entity_identifier.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_named_entity_identifier.go @@ -16,4 +16,6 @@ type AdminNamedEntityIdentifier struct { // Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. Domain string `json:"domain,omitempty"` Name string `json:"name,omitempty"` + // Optional, org key applied to the resource. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project.go index 59da87f6cc..fafa42e708 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project.go @@ -20,4 +20,6 @@ type AdminProject struct { // Leverage Labels from flyteidl.admin.common.proto to tag projects with ownership information. Labels *AdminLabels `json:"labels,omitempty"` State *ProjectProjectState `json:"state,omitempty"` + // Optional, org key applied to the resource. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes.go index a719e4737b..7772a1b781 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes.go @@ -13,4 +13,6 @@ type AdminProjectAttributes struct { // Unique project id for which this set of attributes will be applied. Project string `json:"project,omitempty"` MatchingAttributes *AdminMatchingAttributes `json:"matching_attributes,omitempty"` + // Optional, org key applied to the project. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes_delete_request.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes_delete_request.go index 3081254e92..c94bdf8d78 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes_delete_request.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_attributes_delete_request.go @@ -12,4 +12,6 @@ package flyteadmin type AdminProjectAttributesDeleteRequest struct { Project string `json:"project,omitempty"` ResourceType *AdminMatchableResource `json:"resource_type,omitempty"` + // Optional, org key applied to the project. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes.go index 8cc141bc42..7f25ab536c 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes.go @@ -15,4 +15,6 @@ type AdminProjectDomainAttributes struct { // Unique domain id for which this set of attributes will be applied. Domain string `json:"domain,omitempty"` MatchingAttributes *AdminMatchingAttributes `json:"matching_attributes,omitempty"` + // Optional, org key applied to the attributes. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes_delete_request.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes_delete_request.go index 00b251417d..6431b0ffc9 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes_delete_request.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_project_domain_attributes_delete_request.go @@ -13,4 +13,6 @@ type AdminProjectDomainAttributesDeleteRequest struct { Project string `json:"project,omitempty"` Domain string `json:"domain,omitempty"` ResourceType *AdminMatchableResource `json:"resource_type,omitempty"` + // Optional, org key applied to the attributes. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes.go index dba5c88346..f8fdf30748 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes.go @@ -17,4 +17,6 @@ type AdminWorkflowAttributes struct { // Workflow name for which this set of attributes will be applied. Workflow string `json:"workflow,omitempty"` MatchingAttributes *AdminMatchingAttributes `json:"matching_attributes,omitempty"` + // Optional, org key applied to the attributes. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes_delete_request.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes_delete_request.go index 31bca658db..26df12fc94 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes_delete_request.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_admin_workflow_attributes_delete_request.go @@ -14,4 +14,6 @@ type AdminWorkflowAttributesDeleteRequest struct { Domain string `json:"domain,omitempty"` Workflow string `json:"workflow,omitempty"` ResourceType *AdminMatchableResource `json:"resource_type,omitempty"` + // Optional, org key applied to the attributes. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_binding_data.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_binding_data.go index fe0083167e..e9df809c3b 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_binding_data.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_binding_data.go @@ -12,5 +12,6 @@ package flyteadmin type CoreArtifactBindingData struct { Index int64 `json:"index,omitempty"` PartitionKey string `json:"partition_key,omitempty"` + BindToTimePartition bool `json:"bind_to_time_partition,omitempty"` Transform string `json:"transform,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_id.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_id.go index 13c054e25a..701466b9c3 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_id.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_artifact_id.go @@ -12,5 +12,8 @@ package flyteadmin type CoreArtifactId struct { ArtifactKey *CoreArtifactKey `json:"artifact_key,omitempty"` Version string `json:"version,omitempty"` + // Think of a partition as a tag on an Artifact, except it's a key-value pair. Different partitions naturally have different versions (execution ids). Partitions *CorePartitions `json:"partitions,omitempty"` + // There is no such thing as an empty time partition - if it's not set, then there is no time partition. + TimePartition *CoreTimePartition `json:"time_partition,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_catalog_cache_status.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_catalog_cache_status.go index 6e50baf228..855a733fbc 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_catalog_cache_status.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_catalog_cache_status.go @@ -8,7 +8,7 @@ */ package flyteadmin -// CoreCatalogCacheStatus : - CACHE_DISABLED: Used to indicate that caching was disabled - CACHE_MISS: Used to indicate that the cache lookup resulted in no matches - CACHE_HIT: used to indicate that the associated artifact was a result of a previous execution - CACHE_POPULATED: used to indicate that the resultant artifact was added to the cache - CACHE_LOOKUP_FAILURE: Used to indicate that cache lookup failed because of an error - CACHE_PUT_FAILURE: Used to indicate that cache lookup failed because of an error - CACHE_SKIPPED: Used to indicate the cache lookup was skipped +// CoreCatalogCacheStatus : - CACHE_DISABLED: Used to indicate that caching was disabled - CACHE_MISS: Used to indicate that the cache lookup resulted in no matches - CACHE_HIT: used to indicate that the associated artifact was a result of a previous execution - CACHE_POPULATED: used to indicate that the resultant artifact was added to the cache - CACHE_LOOKUP_FAILURE: Used to indicate that cache lookup failed because of an error - CACHE_PUT_FAILURE: Used to indicate that cache lookup failed because of an error - CACHE_SKIPPED: Used to indicate the cache lookup was skipped - CACHE_EVICTED: Used to indicate that the cache was evicted type CoreCatalogCacheStatus string // List of coreCatalogCacheStatus @@ -20,4 +20,5 @@ const ( CoreCatalogCacheStatusLOOKUP_FAILURE CoreCatalogCacheStatus = "CACHE_LOOKUP_FAILURE" CoreCatalogCacheStatusPUT_FAILURE CoreCatalogCacheStatus = "CACHE_PUT_FAILURE" CoreCatalogCacheStatusSKIPPED CoreCatalogCacheStatus = "CACHE_SKIPPED" + CoreCatalogCacheStatusEVICTED CoreCatalogCacheStatus = "CACHE_EVICTED" ) diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_identifier.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_identifier.go index 36a08549cb..33cc281aa9 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_identifier.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_identifier.go @@ -21,4 +21,6 @@ type CoreIdentifier struct { Name string `json:"name,omitempty"` // Specific version of the resource. Version string `json:"version,omitempty"` + // Optional, org key applied to the resource. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_label_value.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_label_value.go index 772c27188e..12e4783cfd 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_label_value.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_label_value.go @@ -9,8 +9,13 @@ package flyteadmin +import ( + "time" +) + type CoreLabelValue struct { StaticValue string `json:"static_value,omitempty"` + TimeValue time.Time `json:"time_value,omitempty"` TriggeredBinding *CoreArtifactBindingData `json:"triggered_binding,omitempty"` InputBinding *CoreInputBindingData `json:"input_binding,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_time_partition.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_time_partition.go new file mode 100644 index 0000000000..ea4002a6f0 --- /dev/null +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_time_partition.go @@ -0,0 +1,14 @@ +/* + * flyteidl/service/admin.proto + * + * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) + * + * API version: version not set + * Generated by: Swagger Codegen (https://github.com/swagger-api/swagger-codegen.git) + */ + +package flyteadmin + +type CoreTimePartition struct { + Value *CoreLabelValue `json:"value,omitempty"` +} diff --git a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_workflow_execution_identifier.go b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_workflow_execution_identifier.go index cf61bea6ca..92f6945956 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_workflow_execution_identifier.go +++ b/flyteidl/gen/pb-go/flyteidl/service/flyteadmin/model_core_workflow_execution_identifier.go @@ -16,4 +16,6 @@ type CoreWorkflowExecutionIdentifier struct { Domain string `json:"domain,omitempty"` // User or system provided value for the resource. Name string `json:"name,omitempty"` + // Optional, org key applied to the resource. + Org string `json:"org,omitempty"` } diff --git a/flyteidl/gen/pb-go/flyteidl/service/openapi.go b/flyteidl/gen/pb-go/flyteidl/service/openapi.go index 84081d9498..b7b2aef4e0 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/openapi.go +++ b/flyteidl/gen/pb-go/flyteidl/service/openapi.go @@ -78,7 +78,7 @@ func (fi bindataFileInfo) Sys() interface{} { return nil } -var _adminSwaggerJson = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\xfd\x7b\x73\xeb\xb8\x95\x2f\x0c\xff\x3f\x9f\x02\x67\xcf\xa9\xea\xee\x44\xb6\x3b\xc9\x4c\xde\x94\xa7\x4e\xbd\x8f\xda\xd6\xde\xad\xd3\xbe\xc5\x97\xee\xe9\x67\x94\x52\x43\x24\x24\x21\x26\x01\x06\x00\xed\xad\x4e\xe5\xbb\x3f\x85\x85\x0b\x41\x8a\x94\xa8\x8b\x6d\x79\x37\x67\xaa\xd2\xde\x22\x89\xeb\xc2\xc2\xba\xfe\xd6\x3f\xff\x0d\xa1\x0f\xf2\x19\xcf\x66\x44\x7c\x38\x45\x1f\xfe\x78\xfc\xed\x87\x9e\xfe\x8d\xb2\x29\xff\x70\x8a\xf4\x73\x84\x3e\x28\xaa\x12\xa2\x9f\x4f\x93\x85\x22\x34\x4e\x4e\x24\x11\x4f\x34\x22\x27\x38\x4e\x29\x3b\xce\x04\x57\x1c\x3e\x44\xe8\xc3\x13\x11\x92\x72\xa6\x5f\xb7\x7f\x22\xc6\x15\x92\x44\x7d\xf8\x37\x84\xfe\x05\xcd\xcb\x68\x4e\x52\x22\x3f\x9c\xa2\xff\x31\x1f\xcd\x95\xca\x5c\x03\xfa\x6f\xa9\xdf\xfd\x1b\xbc\x1b\x71\x26\xf3\xd2\xcb\x38\xcb\x12\x1a\x61\x45\x39\x3b\xf9\xbb\xe4\xac\x78\x37\x13\x3c\xce\xa3\x96\xef\x62\x35\x97\xc5\x1c\x4f\x70\x46\x4f\x9e\xfe\x70\x82\x23\x45\x9f\xc8\x38\xc1\x39\x8b\xe6\xe3\x2c\xc1\x4c\x9e\xfc\x93\xc6\x7a\x8e\x7f\x27\x91\xfa\x17\xfc\x23\xe6\x29\xa6\xcc\xfc\xcd\x70\x4a\xfe\xe5\xdb\x41\xe8\xc3\x8c\xa8\xe0\x9f\x7a\xb6\x79\x9a\x62\xb1\xd0\x2b\xf2\x91\xa8\x68\x8e\xd4\x9c\x20\xd3\x0f\x72\x4b\xc4\xa7\x08\xa3\x53\x41\xa6\xa7\xbf\x08\x32\x1d\xbb\x85\x3e\x36\x0b\x7c\x01\xa3\xb9\x49\x30\xfb\xe5\xd8\x2e\x13\xb4\xcc\x33\x22\x60\x6e\xc3\x58\xb7\xfe\x89\xa8\x3e\x34\x5b\xbc\x1f\xbe\x2d\x88\xcc\x38\x93\x44\x96\x86\x87\xd0\x87\x3f\x7e\xfb\x6d\xe5\x27\x84\x3e\xc4\x44\x46\x82\x66\xca\xee\x65\x1f\xc9\x3c\x8a\x88\x94\xd3\x3c\x41\xae\xa5\x70\x30\x66\xaa\x7a\x63\xf1\x52\x63\x08\x7d\xf8\xdf\x82\x4c\x75\x3b\xff\x7e\x12\x93\x29\x65\x54\xb7\x2b\x0d\xfd\x04\xa3\x2d\x7d\xf5\xaf\x7f\xab\xfb\xfb\x5f\xc1\x8c\x32\x2c\x70\x4a\x14\x11\xc5\x8e\x9b\xff\xab\xcc\x45\xef\x91\xee\xbc\xd8\xc7\xea\xc0\x2b\xb3\xbd\xc2\x29\xd1\x7b\xa2\x77\xca\x7e\x01\x7f\x0b\x22\x79\x2e\x22\x82\x26\x24\xe1\x6c\x26\x91\xe2\x4b\x6b\x40\xa1\x05\x4d\x5e\xd5\x27\x82\xfc\x23\xa7\x82\xe8\xbd\x52\x22\x27\x95\xa7\x6a\x91\xc1\x20\xa5\x12\x94\xcd\xc2\xa5\xf8\x57\xaf\xd5\xd4\x0c\x55\x6e\x30\x33\xf3\x41\xe3\xc4\x46\xac\xef\x5e\x89\x30\x43\x13\x82\xf4\x59\xa4\x31\x11\x24\x46\x58\x22\x8c\x64\x3e\x91\x44\xa1\x67\xaa\xe6\x94\xe9\x7f\x67\x24\xa2\x53\x1a\xb9\x35\x3b\x9c\xb5\x81\x3f\x57\xaf\xcc\x83\x24\x42\x0f\xfc\x89\xc6\x24\x46\x4f\x38\xc9\x09\x9a\x72\x51\x5a\x9e\xe3\x11\xbb\x9f\xeb\x75\x48\x27\x94\xc1\xc9\xd3\x6b\xe9\x28\xe4\xf7\x6e\xb9\x7e\x8f\x74\x7f\x28\x67\xf4\x1f\x39\x49\x16\x88\xc6\x84\x29\x3a\xa5\x44\x56\x5b\xfb\x3d\x87\xfe\x71\x82\x8e\x90\x5e\x67\x22\x14\xac\x37\x67\x8a\x7c\x56\x12\x1d\xa1\x84\x3e\x12\xf4\xd5\x05\x95\x0a\xf5\x6f\x86\x5f\xf5\xd0\x57\xe6\xbc\x20\xe0\x4d\x5f\xbd\xc2\x0a\xfb\xbf\xff\x16\x1c\x3d\x85\x67\xd5\x43\xf7\xa1\xaf\x4f\xf3\x9d\xb9\x1a\x8a\x16\xfe\xf6\x6f\x61\x3b\x76\xbf\x56\xf3\xdb\x82\xd9\x5a\x4e\xdb\x96\xbf\xc2\x32\x95\x59\xab\xd4\x3b\xb4\x2b\x67\xd5\xed\x56\x59\xab\x7c\x5f\xbc\x55\x4f\xe1\xa5\xf9\xeb\x2e\xcc\x15\x2b\xa0\x7a\x4c\x99\x39\x24\xfe\xcc\x08\xa9\xcf\x89\xa3\xde\x03\x61\x29\xbb\xf0\xda\x60\x66\x01\xbb\x75\x5c\x34\x58\x95\x03\x9c\x77\x42\x53\xba\x6e\x7f\x87\x2c\xd6\x22\x97\x65\x76\x2c\x4f\x27\x44\xe8\x65\x70\x6c\x0f\x66\x3b\xd1\x6c\x50\xe5\x82\x91\xb8\xc5\x34\xff\x91\x13\xb1\x58\x31\xcf\x29\x4e\x64\xd3\x44\x29\x53\x44\xcb\xb7\x95\xc7\x53\x2e\x52\xac\xec\x0b\x7f\xfe\x8f\x4d\x17\x42\xf1\x47\xb2\x6e\xff\x87\x66\x37\x23\x2c\x81\x0c\xd2\x3c\x51\x34\x4b\x08\xca\xf0\x8c\x48\xbb\x22\x79\xa2\x64\x0f\x5e\xd3\x32\x35\x11\x47\xfe\x06\x82\x1e\xdc\xcd\x9b\x4b\xf8\x05\x4d\xbd\x00\xc9\xc8\x67\x05\x2d\x8d\x18\xdc\xbd\xb0\x44\xe1\x8d\xf2\x02\x4b\xb9\x1d\xcd\x48\x2e\xd4\x78\xb2\x38\x7e\x24\x4b\xfd\x36\x52\x0e\x66\x08\x2b\x25\xe8\x24\x57\x44\xcf\x5b\xb7\xe1\xee\x4e\x60\x8f\xe6\x82\x6e\xc3\x1a\xde\x6e\xc2\x31\x15\x24\x82\xb9\x6d\x72\x60\xfc\x57\x7a\xde\x5a\x7f\x59\x98\xd9\x3f\x92\x05\xc8\x23\x35\x2b\xe0\xb7\x7c\xc4\x46\x0c\x1d\xa1\xf3\xc1\xdd\xd9\xe0\xea\x7c\x78\xf5\xe9\x14\x7d\xb7\x40\x31\x99\xe2\x3c\x51\x3d\x34\xa5\x24\x89\x25\xc2\x82\x40\x93\x24\xd6\x32\x87\x1e\x0c\x61\x31\x65\x33\xc4\x45\x4c\xc4\xcb\x2d\x63\xe5\x29\x61\x79\x5a\xb9\x57\xe0\xf7\x62\xf4\x95\x2f\xb4\x88\xe1\x1f\x95\x9e\xfc\x6d\x69\x81\x61\xc6\xba\xef\xa0\xb5\x57\x13\x6a\xa2\x39\x4d\x62\x41\xd8\x89\xc2\xf2\x71\x4c\x3e\x93\x28\x37\x77\xf2\x3f\xcb\x3f\x8c\xb5\x64\xca\x63\x52\xfe\xa5\xf4\x8f\x42\x14\xda\xf8\x53\xaf\xa5\x6e\xfc\x25\xe8\xb4\xed\xbe\x83\x5f\x68\x5c\xfb\x36\xfc\xb2\x66\x0e\xee\x9d\x15\x83\x75\xaf\x34\x8e\xca\xbd\x60\x25\xbe\xda\x77\x04\x51\x62\x31\xc6\x4a\x91\x34\x53\x1b\xea\xeb\x18\x25\x5a\xae\x5c\x25\x47\x5e\xf1\x98\x0c\x5c\x7f\xbf\x20\x23\xce\x92\x18\x4d\x16\x96\x6b\x4d\x89\x20\x2c\x22\xcd\x2d\xdc\x63\xf9\x58\xb4\xb0\x4e\x18\x2d\xf5\x27\x3f\x72\xa1\x3f\x7f\x0f\x02\x69\x69\xe0\xaf\x21\x93\x6e\x7b\xe2\xbe\x38\x0b\xc1\x96\xfc\xa3\xb3\x27\xec\xbe\x92\x6d\xad\x0f\x5c\x20\xb9\x90\x8a\xa4\x6b\xed\x10\xef\x67\x21\xec\x05\x71\xa8\x03\xae\xdc\x51\xbf\x81\x53\x5f\xbe\x71\xbb\xe3\xbd\xc1\x92\xed\xcb\x8a\x78\xe8\xf3\x74\x3e\x9c\xd5\x53\xbd\x73\xdb\x17\x38\x31\xde\xc5\x34\x4b\xb2\xe0\xbe\x07\xf9\x42\xe6\x86\xc6\xbd\x72\xab\x3d\x86\x01\xac\x51\x34\xcb\x76\x68\x7f\xfe\xf4\xa7\xa1\x85\xc6\x98\xe3\xd4\x9c\xca\xc0\x58\x85\x22\x2e\x8c\x2c\x18\xdb\xf3\x6e\x74\xcd\xfe\x7d\xff\x6e\x70\x7f\x8a\xfa\x28\xc6\x0a\xeb\x03\x2e\x48\x26\x88\x24\x4c\x81\x1e\xaf\xbf\x57\x0b\x94\xf2\x98\x24\x46\xe3\xfc\xa8\x25\x5f\x74\x8e\x15\x3e\xc3\x0a\x27\x7c\x76\x8c\xfa\xf0\x4f\xfd\x31\x95\x08\x27\x92\x23\xec\xc8\x8a\xc4\xae\x09\xcc\x62\xc7\x5a\x30\x8a\x78\x9a\xd1\xc4\xdb\xe0\xbd\x71\x85\xb2\x98\x3e\xd1\x38\xc7\x09\xe2\x13\xcd\x55\xb4\x86\x3c\x78\x22\x4c\xe5\x38\x49\x16\x08\x27\x09\xb2\xdd\xba\x17\x90\x9c\xf3\x3c\x89\x75\xbb\x6e\x94\x92\xa6\x34\xc1\x42\xab\xe0\x66\xb4\xd7\xb6\x2d\x74\x3f\x27\x7e\xac\x30\x2e\xbd\x9a\x29\x7e\x24\x12\x51\x85\x32\x2e\x25\x9d\x24\xc5\x99\x7f\x18\x22\x18\xf7\xd9\xc5\x10\xf4\xf9\x48\x21\x6e\x78\xa8\xeb\xdc\xda\x6f\x5c\x8f\x29\x66\x8c\x40\xc7\x5c\xcd\x89\xb0\xdd\xdb\x97\xdf\x5a\x35\x7f\xb8\xba\xbb\x19\x9c\x0d\x3f\x0e\x07\xe7\xcb\xba\xf9\x7d\xff\xee\x87\xe5\x5f\x7f\xba\xbe\xfd\xe1\xe3\xc5\xf5\x4f\xcb\x4f\x2e\xfa\x0f\x57\x67\xdf\x8f\x6f\x2e\xfa\x57\xcb\x0f\x2d\x59\xb5\x56\xf3\xc3\x91\x6d\x78\xb6\x3a\x9b\xe6\x4b\xd9\x34\x7b\x5f\xae\x51\x73\x4a\x13\xd0\x41\x5b\x1b\x34\xbd\x0d\xc1\x7e\x89\x32\x2c\xa5\x91\x8c\xcc\x08\x8e\x47\xec\x92\x0b\xcd\xc0\xa6\x5c\xf3\x08\x2d\x3d\x29\x91\x47\x8a\xb2\x99\xff\xe8\x14\x8d\xf2\x6f\xbf\xfd\x53\x74\x41\xd9\x23\xfc\x45\x0e\x71\x71\x3a\x8b\x6f\x67\xf1\xfd\x6d\x59\x7c\xb5\xe8\x73\x12\x1a\x7a\xf7\x1b\x32\xa4\x85\x0b\x96\xe5\x0a\x44\x09\x9e\x2b\xfd\xa7\xee\x12\xc8\x63\x45\xe0\x50\x3b\x83\xe2\x27\xa2\xfc\x8b\x5a\xb4\x79\x0f\x76\xc4\x9f\xb8\x78\x9c\x26\xfc\xd9\x0f\xfc\x13\x51\x7a\xec\xb7\xb6\x97\x2e\x94\xa8\x0b\x25\x7a\xdb\x50\xa2\x83\x32\xe6\xbd\x3c\xf3\x2b\x5b\xfe\x0c\x07\x6c\xf0\x64\x35\x3a\xaa\x1a\xfc\x50\x81\x9b\xe9\x55\xb8\x66\xd9\x99\xb3\x86\x73\x96\x5e\x7e\x2f\xdc\xb3\x34\xe8\xd7\xe7\x9c\xbf\x09\x7f\x4b\xe7\x4e\xd9\x72\xa1\xde\x25\x83\x6d\x79\x77\xbc\x9a\x33\xe4\xe5\x19\xfe\x52\x6c\xc3\x26\xc1\x0c\x1b\x44\x2f\xb4\x0e\x57\x58\x13\x9f\x50\x1b\x90\x50\x17\x81\xb0\x1c\x72\x50\x1b\x63\xb0\x5b\x50\xc1\xb6\x77\x53\xfb\x30\x81\x4f\x44\x95\x5e\x7e\x2f\x77\x53\x69\xd0\xaf\x7f\x37\xfd\x46\xa3\x03\xba\x70\x80\x17\x5c\xba\x2f\xfd\x46\x3b\x5c\x87\xff\x6f\xc0\xc3\xdf\xb9\xf4\x37\x5a\xa3\x2f\xcb\x87\xff\xa5\x3a\xed\xdf\xa7\x97\xbe\x73\xcb\x77\x6e\xf9\xb7\xf0\x9f\xbc\x3f\xb7\xfc\xcb\xaa\xa7\xc5\xf1\x1a\x3b\x5a\xb0\xfa\x5a\x70\x28\xff\xd5\xc2\x49\x03\x7f\x39\x95\x6f\xd3\xa0\xf1\x46\x1d\xee\xbc\x18\xdf\x00\x8e\xd0\x2f\x96\x90\xd6\xa8\x73\x4b\xdf\xbd\x07\x75\x6e\x79\xd0\x2f\xaf\xc3\xbd\x19\xf3\x7d\xa1\xcb\xf3\x9d\xb0\x81\xcd\x6f\xcb\x2f\x58\x26\xef\x64\xf1\x97\xcf\xc6\x3f\x98\x09\xbd\x1f\xd9\xfb\x0d\x2e\xde\x96\xb7\xee\xde\x73\xb2\x6a\xae\xd9\xe0\x76\x5a\x97\x61\x55\xfd\x9a\x12\xf9\xc7\x77\x79\xdf\xbe\x46\x92\x55\x77\xe1\x76\x17\xae\x6d\xaa\xbb\x70\xbf\xe0\x0b\xf7\xe0\xe0\x6f\x0e\x26\x02\xb4\x0b\x22\xef\x80\x31\xba\x18\xf2\x3d\x2e\x4e\x17\x43\xde\xc5\x90\xff\xc6\x62\xc8\x77\xd1\x9e\xb6\xc5\xa2\x7c\x0b\x3d\xaa\x53\xa3\x3a\x35\x2a\xfc\xbd\x53\xa3\x3a\x35\xaa\x53\xa3\xbe\x70\x14\xd1\x4e\x87\x6a\xbf\x10\x9d\x0e\xd5\x7a\xa9\x3a\x1d\x6a\xc5\xe2\x74\x3a\x54\xa7\x43\xfd\xb6\x74\x28\xf2\x44\x98\x92\x90\x8c\x16\x6a\x14\x1f\x32\x2e\x9b\x35\xa1\x90\x3b\xd4\x68\x41\xd0\x66\x39\x29\x0c\x02\x97\x7e\x41\x73\x2c\x11\x8f\xa2\x5c\x54\xce\x40\x55\x0f\x3a\x13\x04\x2b\x02\x2d\xe8\x0f\xdf\x83\xfe\xb3\x3c\xdd\xd7\x8a\xc1\x9f\xf0\x78\x89\xda\xcd\x41\xa8\x7b\xb2\x5a\x1e\xd9\xdb\xd4\xff\x91\x93\x76\xea\xdf\x0b\x12\xb5\xc2\xf2\x71\xcf\x44\x5d\xca\xb5\xd8\x8a\xa8\xa1\x85\xf7\x42\xd4\xcb\xd3\xfd\xcd\x10\x75\xdd\xd4\x0f\x81\xa8\x9f\x6d\x1e\xff\x9e\x09\x7b\x09\x1e\x60\x2b\xe2\xf6\xad\xbc\x17\x02\xaf\x9f\xf6\x6f\x86\xc8\x9b\xa6\xff\xb6\x84\xee\x53\x24\x5b\x93\xf8\xbd\xa0\xb3\x99\x56\x33\x40\xc3\xd3\xa4\xb8\xbe\x46\x50\x91\x14\xb8\x96\xac\xfd\xab\xef\x81\xa4\xfd\x60\xcd\xd8\x7f\x33\xb4\xbc\x34\xef\x03\x21\xe2\x13\x41\x22\xfe\x04\xf5\xc2\xda\x11\xf3\x2d\x01\x0a\x06\x7e\x9d\x09\xf2\x44\x79\x2e\x93\xc5\x91\xc8\x19\x72\xcc\x1f\xf9\xe6\x8d\xb5\xfa\x99\x26\x09\xe2\x4c\xeb\x5f\x0a\x0b\xe5\x1e\x6b\xfd\x5b\xf0\x14\x4e\x45\x82\xa5\x42\x8f\x8c\x3f\x33\x34\xc5\x34\xc9\x05\x41\x19\xa7\x4c\x1d\x8f\xd8\x90\xa1\x5b\x33\x46\xc8\x1b\xe8\xa1\x5c\xea\xb3\x14\x61\xc6\xb8\x42\xd1\x1c\xb3\x19\x41\x98\x2d\x6c\x02\x6e\x41\x19\x88\x0b\x94\x67\x31\xd6\x8a\xef\x9c\x54\xa3\xf4\xfc\x18\xc1\x7c\x47\x25\xa2\x12\x91\xcf\x4a\x90\x94\x24\x0b\xdd\x87\xa6\x7d\xc5\x91\x5d\x1f\x33\x54\x9b\xce\x47\x84\xe0\x42\x42\xc6\xc1\x64\xf1\x2b\x66\x8a\x32\x82\x40\x51\x92\xc6\x34\x77\x84\x2e\xb8\x04\xb3\xcd\x0f\x7f\x91\x28\x4a\x72\xa9\x88\xe8\xa1\x49\x3e\x93\x5a\x53\xcc\x12\xac\xa6\x5c\xa4\x7a\x84\x94\x49\x85\x27\x34\xa1\x6a\xd1\x43\x29\x8e\xe6\xa6\x2d\x58\x03\xd9\x1b\xb1\x98\x3f\x33\xa9\x04\xc1\xbe\x77\xf7\x10\x7d\x1d\x3e\x33\x04\x20\xbf\xe9\x41\xda\x21\x4d\xb5\xba\x1b\x0c\xbf\xd8\x71\xb3\x27\xba\x11\x12\xa3\x09\x89\x70\x2e\xad\x87\x41\x89\x05\x22\x9f\xe7\x38\x97\xb0\x77\x7a\x7a\x36\x67\x23\xe2\x69\x96\x10\x45\x10\x9d\x22\x25\x28\x89\x11\x9e\x61\xaa\x97\xee\x8e\xac\x00\x41\xf7\x44\x6f\x37\xd0\x52\xfd\x2f\xa0\x7e\xa7\x5c\x10\x14\x13\x85\x69\xb2\xd2\xeb\x64\xbf\xed\xb8\xdc\x7b\xe2\x72\xe5\x0d\x3f\x08\x36\x67\x40\xfc\xf7\x70\x69\x33\x6b\xba\x8f\x70\xb2\xe3\xfd\x7d\x6b\x07\xd5\xd1\xf6\xfb\xa2\x6d\xb3\x6b\x87\x43\xdc\xaf\x16\x83\xdd\xbe\xa2\x45\x51\xcd\xe2\x5d\xd1\xf4\x6b\x84\x05\x74\x0e\xe7\xce\xe1\xdc\xb8\x32\xef\xd3\xe1\x7c\x30\x1e\xa3\xce\xe7\xfc\x42\x3e\x67\x2a\x3b\xa7\x73\xe7\x74\x6e\xbb\x40\x9d\xd3\xb9\x73\x3a\xbf\x5f\xa7\xf3\x4b\xe2\x3e\xef\x15\xdd\xf9\x5d\x89\xd6\x9d\x58\xdd\x89\xd5\x1d\x84\xb3\x9f\xda\xbe\x58\x98\xfb\xfa\x43\x4c\x12\xa2\x48\xb3\x3d\x8b\x88\x54\x6b\x0b\xe6\x7a\xa6\x4c\xcb\x71\x33\x41\xa4\xdc\x95\x21\xf9\x86\xdf\x27\x5b\xf2\xc3\xef\xa0\xe6\x3b\x3e\xd5\xf1\xa9\x6d\xa6\x76\x38\xa6\xd9\xe0\x30\xbf\x96\x6d\xd6\xf3\xdf\x2c\x6f\x96\xfe\x1e\x8c\x1b\xb2\xf0\x8b\x1a\x0a\xd7\x52\xbb\xe2\xfe\x70\x5b\x3a\xdf\x91\x1f\x9b\xbe\xde\x27\x33\x36\x63\xef\x38\x71\xc7\x89\x3b\x4e\xfc\xbe\x39\xb1\x3b\xc9\x6f\xea\x22\x33\x7e\xba\x71\x96\x60\x36\xa6\xb1\x3c\xf9\x67\xa1\xcb\xbf\x94\x87\x4c\x1f\xa8\xd8\xa4\x98\xfa\x94\x4e\xf1\x8b\xfe\x24\x29\x0c\xe6\x1e\x33\x73\x8d\x13\xcd\xd8\xd8\x6f\x12\xcc\x86\xf1\xbb\xf0\xa3\xd5\xce\xfe\x35\x7c\x6a\xbb\xf0\x71\xac\xc0\xd3\x81\x29\x33\x26\xbc\x22\xaf\xb6\x64\xa0\x3c\x8c\x13\xbe\x0b\x57\x0f\x26\x16\x30\x76\xc7\xaf\x83\x45\x39\xbc\x69\x77\x7e\x9d\x2e\x97\xb0\xf3\x5c\xb4\x9c\x70\xe7\xb9\x38\x5c\xcf\xc5\x5b\xb9\x23\x5f\xf9\x78\xbe\x96\x58\xd7\x3e\x08\xdf\x44\xab\x41\x50\x6b\x9e\x25\x1c\xc7\xab\x5c\x31\x85\xe0\x15\x82\xa3\xac\x8d\xc4\x2f\x3e\x7b\x0f\xc2\x5a\x31\xda\xdf\x58\x24\xdf\xf2\xc4\x0f\x45\x4b\x79\xc5\x50\xbe\x7a\x12\xdf\x40\x25\x79\x1f\xf8\xa9\xc5\x78\xbb\xd0\xbe\xce\xa2\xf4\xf6\x16\xa5\x2e\xb4\xaf\x53\x01\x0f\x4c\x05\xec\x42\xfb\xba\xd0\xbe\x4e\x41\x5e\x3d\xed\x4e\x41\xfe\x22\x42\xfb\x5a\x89\xda\x2f\x88\xbd\xb9\xbb\xd0\xdd\xc9\xdc\xee\xbd\x4e\xe6\xee\x64\xee\x2f\x54\xe6\x3e\x8c\x15\xee\x04\xee\x4e\xe0\xee\x04\xee\x4e\xe0\xee\x04\xee\xbd\x2f\x63\x27\x70\xbf\x66\x81\xce\x7a\xa9\x7b\x4d\x92\xcd\x7b\xf5\xe5\x74\xe2\x76\x27\x6e\x1f\xb6\xb8\x7d\x30\x13\x7a\x3f\x65\x1e\xdb\xcd\xa7\x2b\x52\xde\x15\x29\xef\x8a\x94\xbf\x78\x91\x72\xf7\x75\x8b\x8c\x0f\x7b\xb8\x14\x56\xb9\x34\x80\x8f\x82\xcc\xa8\x54\xc0\xfe\xdb\xc8\x2b\xeb\x13\x3d\xde\xab\x9c\xd2\xa5\x7a\xf8\xa7\x9d\xd4\xd2\x49\x2d\xbf\x51\xa9\xe5\x80\x62\xc1\x0e\x22\x63\x25\xc5\x2a\x9a\xe3\x49\x42\xc6\xde\xe8\x23\xdb\xea\xc1\x17\x54\x2a\x89\xa2\x5c\x2a\x9e\x36\x5f\x2e\x97\xae\x87\xbe\xef\xe0\x8c\xb3\x29\x9d\xe5\xe6\x6e\x31\xe0\x9c\xc1\x89\x2e\x24\xc1\x45\x46\xd6\x79\xaa\x6a\x5a\x7f\x17\xd7\x52\xfd\xd0\x5f\xeb\x76\xda\x44\x70\x2f\x8c\x7c\x56\xea\xd6\xb2\xd6\xf8\x76\x70\x77\xfd\x70\x7b\x36\x38\x45\xfd\x2c\x4b\xa8\xb1\xbb\x1b\x52\xa0\xbf\xea\x49\x21\x85\xe5\x63\xb1\x97\xc2\x90\xb9\xc1\xb0\x05\x43\xbf\x96\x8d\xd1\x11\x3a\xbb\x78\xb8\xbb\x1f\xdc\x36\x34\x68\x09\x05\xf2\x56\x49\x9a\x25\x58\x91\x18\x3d\xe6\x13\x22\x18\xd1\xd2\x8e\x45\xba\x2d\xcc\xff\xa6\xd1\xc1\x7f\x0f\xce\x1e\xee\x87\xd7\x57\xe3\xbf\x3e\x0c\x1e\x06\xa7\xc8\x51\x9c\x6e\x56\x8f\x4b\x8f\x22\x5e\x30\x9c\x6a\x0d\x44\xff\x50\x64\xca\xfe\x23\x27\x39\x41\x58\x4a\x3a\x63\x29\x01\x44\xe0\x52\x8b\x6e\xc0\x17\xfd\xef\x06\x17\xe5\x96\xe7\x24\x84\xdf\x45\x09\x9e\x90\xc4\xfa\x23\xc0\xc4\xae\x09\x3d\x80\x2a\x36\x8e\x8a\xdc\xac\xea\x5f\x1f\xfa\x17\xc3\xfb\x9f\xc7\xd7\x1f\xc7\x77\x83\xdb\x1f\x87\x67\x83\xb1\x95\x2a\xcf\xfa\xba\xdf\x52\x4f\x56\xf8\x44\xff\xc8\x71\xa2\xb5\x13\x3e\x75\x78\xbc\xe8\x79\x4e\x18\xca\x19\x50\x9c\x51\x79\xb4\x1e\xe4\x3b\xd5\xa7\xcc\xcc\xe8\xe6\xe2\xe1\xd3\xf0\x6a\x7c\xfd\xe3\xe0\xf6\x76\x78\x3e\x38\x45\x77\x24\x01\xa5\xc0\x2d\x3a\xec\x62\x96\xe4\x33\xca\x10\x4d\xb3\x84\xe8\xd5\xc0\x36\x9b\x78\x8e\x9f\x28\x17\xf6\xe8\xce\xe8\x13\x61\x66\x1d\xe1\xcc\x42\xfb\x4e\xf8\x1e\x07\x4b\x77\x7d\xf5\x71\xf8\xe9\x14\xf5\xe3\xd8\xcf\x41\x42\x1b\x25\xca\x71\xb0\xce\x47\xe5\x61\x6b\xe6\x00\xdd\x1b\x22\xe2\x4f\x44\x08\x1a\x93\x0a\x1d\xf5\xef\xee\x86\x9f\xae\x2e\x07\x57\xf7\xb0\x62\x4a\xf0\x44\xa2\x39\x7f\x06\x53\x36\xcc\x10\x2c\xdc\x4f\x98\x26\xd0\x99\xdb\x2c\xce\xd0\xf3\x9c\x82\xfb\x03\x80\x99\x7d\xcf\x46\x3f\x13\x39\x7b\x73\xeb\x6c\xe9\xe0\x2d\xab\x2d\xd5\x93\xb4\xfc\x46\xe5\x58\xac\x7a\xa1\x44\xe5\xcb\x2f\xae\xa3\xd6\xe5\x2f\x2a\xe4\xd6\xac\xac\x2d\xd1\x4b\xf3\x4c\x8b\xbd\x6e\xad\xab\x95\xd7\xf0\xf5\xae\x59\xa2\x04\x8d\xe4\xcb\x42\x3d\x89\x9c\x29\x9a\x12\x64\x3b\xb3\x87\x73\x8f\xf0\x4f\x97\xa6\xe1\xf7\x70\xc1\x2e\x95\x72\xf8\x44\x94\x1d\x7e\xa7\x02\x76\x2a\xe0\x61\xa8\x80\xef\x2d\xdb\x3f\x26\xd9\x72\x87\x95\x89\xc1\x3b\xc6\xeb\xb5\x14\xa4\x61\xec\x89\xd6\xa2\x9a\x90\x27\x92\x80\x94\xa7\x04\xd6\x4a\xa3\x95\x5d\x26\x82\xe0\x47\x2d\xf0\xc5\xfc\x39\x94\x5c\x6a\x90\xfb\xd1\x7e\x6e\xe1\x36\x41\x1c\x7f\xfa\xe3\xeb\x5d\x16\x7a\xb9\xe3\xd7\x28\xe1\x7d\x0b\x41\x32\x2b\x31\x02\x83\x04\xfb\x5f\xac\x25\x78\xcd\x6d\x11\x7c\xf1\x1e\x2e\x8a\x70\xb8\x07\xa4\x75\xdd\x86\x4a\xb0\x63\xa1\x29\x51\x38\xc6\x0a\xeb\x43\x33\x23\xea\x18\x5d\x33\x78\x76\x8f\xe5\x63\x0f\xb9\x2b\x4f\xb3\x95\xc2\xca\xf0\x0a\xa9\xf5\xef\xc4\x80\xbf\x39\x1f\xef\xae\xef\xee\xfa\xae\x5f\x99\x2e\xcc\xb3\x61\x85\xf7\x75\x31\x6e\xe4\xf3\xda\xdf\xfd\x65\x5a\x7c\xbf\x57\xd8\xeb\x3a\xb9\xf6\x7a\xa1\x99\xca\x59\xdd\x6d\x65\xfe\xaf\xbb\xad\xba\xdb\xaa\xbb\xad\x0e\x60\x85\xdf\xdc\x61\x58\xc3\xdd\xdf\xd4\x63\xb8\x4e\x3b\xdd\x1a\xf2\xae\xd0\x46\x37\x01\xbd\xfb\xa5\x2d\xb6\x5d\xf1\x0d\x7d\x1f\x3e\xc2\x60\x92\xaf\x91\xd6\xb6\xd7\xcb\xdc\xe4\x8b\x74\xfa\xe9\x0b\xde\xf8\x1d\x02\xe1\x5e\x11\x08\x0f\x63\xae\x2f\x92\x02\xf7\x36\x16\xd3\xb7\x4f\x7b\xeb\xa0\x06\xbb\xc4\xae\x2e\xb1\x0b\x7e\xef\xa0\x06\xf7\x47\xad\x2f\x2b\x5d\xf3\x98\x8c\x2b\x51\x02\xfe\x9f\xe3\xaa\xe7\xa7\xf4\x24\x74\x03\x95\x1e\x14\x99\x6e\xd0\x3a\x8d\xf7\x59\x44\xea\x8a\xc7\xa4\x75\x24\x41\xe9\xe5\x03\x97\xc1\xdd\x3c\x8d\x2c\x5e\x1a\xf8\x0b\x4b\xe2\x0d\x5b\xfe\x25\x1a\x76\x6a\x08\xb8\xb3\xf2\xac\x5d\xa8\x2f\x35\xbe\xa0\xe0\x50\xef\xc8\x53\xd1\x8e\x8d\xbb\x98\xc6\x71\x03\x33\xaf\x7f\xee\x59\x7a\xfd\xe3\x97\xc1\x0c\x6a\xcf\xd1\xc1\xac\x12\xbe\xfd\x3e\xec\x2a\xe1\x88\x5f\xc3\xb2\xb2\x72\xef\xbf\x38\xae\xbe\x8a\x92\x3b\xde\xde\x72\xb9\xbe\x54\x0e\xdf\x41\xfc\xac\xb2\x75\x74\x18\x3a\x9d\xa9\xe5\x70\x26\xdc\x99\x5a\xde\xb5\xa9\xc5\xb8\x68\xc7\x19\x16\x84\xa9\x1a\x91\xba\x7a\x9d\xc0\xeb\x21\xe6\x82\x93\x3a\xa0\x01\xa4\x25\x5a\x64\x2f\x64\x7f\x55\x7d\x59\xb6\x17\x2b\x18\x04\x99\x90\x27\xff\x2c\xfe\xf6\xc2\x7a\xa9\x02\xc4\x8a\xe8\x24\x83\xf5\x2f\xf5\x1d\x9d\xdb\x40\xa5\xdd\x73\x25\xb1\x2a\x89\x82\x10\x44\xbd\x36\x9e\xe9\xc6\xbc\xfd\xbe\x52\x24\x97\x06\xfd\xba\xb1\x4d\xcb\x1b\xdf\xee\x00\xb9\x9d\xa1\x26\xdd\x2f\xc8\x29\xd3\xd2\x28\x9f\x16\x17\x83\x44\xcf\x34\x49\x00\x51\x04\x32\x1e\x9b\x6e\x80\xdf\x5c\xc4\x43\xe3\xce\xbf\x69\xdc\x43\x1d\x77\xa8\x63\x09\x6d\xec\xa9\xfb\xca\x99\x76\xc4\x06\xe9\xac\xa0\x0d\xad\x31\xc0\x7e\x19\x9c\xe0\x13\x51\xaf\xc5\x06\xb6\x3d\xfb\x2b\xcf\xbd\x20\x53\x22\x08\x8b\xc8\x01\x7a\xdb\x37\x09\x03\xf9\xc9\x4c\xd2\xc6\x80\x78\x28\x81\x70\xaa\x8a\x5b\x3d\xad\x24\xea\x76\x99\xe4\x5d\x26\x79\x97\x49\x5e\x3d\xea\x5d\x26\x79\x97\x49\x5e\x9b\x03\x11\x93\x84\x28\xd2\x28\x55\x9c\xc3\xe3\xb7\x92\x2a\x4c\xef\x5f\x86\x60\x61\xe6\xd2\xc9\x16\xbf\x19\xcd\xc2\x6d\xf8\x41\x68\x16\xe6\xac\xad\x33\x3f\x94\x7e\xac\x09\xb1\x7e\x75\x93\xc4\x36\x4c\xa3\x64\x97\x38\x87\xd7\xdf\x25\xeb\xa8\x0e\xbd\xb3\x51\xa0\x60\xeb\x5e\x8e\x93\x2c\x1d\x81\x76\x13\xb7\x1e\xc3\xf7\x3b\xef\x43\xe1\xa0\x4d\x74\x7f\xa8\x7c\x74\xeb\xa4\x94\x43\xb1\xd8\x7c\x41\x3c\xb2\xb3\xde\xbc\x71\xae\xc4\x12\x33\x7c\xb7\xd3\xed\x8c\x55\x9d\xb1\xaa\x33\x56\x75\xc6\xaa\xce\x58\x85\x3a\x63\xd5\xc6\xc6\xaa\x2f\x48\xa6\xea\x0c\x57\x9d\x58\xb5\xbf\xe9\x1e\xaa\x96\x79\x48\xd6\xba\xd6\x28\xe9\x45\x0e\xd5\xda\xc8\x7b\x3b\xed\x5f\xd6\x84\xdc\xdf\xb8\x11\xbc\x1f\x7e\x25\x5f\x9a\x25\xed\x12\x58\xec\x76\xf4\x8b\x8d\x2b\xee\x4a\x87\xd6\xae\x55\x17\xf6\xbc\x62\x71\xba\xb0\xe7\x2e\xec\xf9\xe0\xc2\x9e\xf7\xae\xac\x64\x5c\xae\x02\x24\x32\xa5\xb3\x56\xe6\x3f\xbb\x3b\x1b\x12\x8d\x80\x14\x0c\xca\x71\x4c\xb2\x84\x2f\xc0\x92\xb2\xe2\x3a\x77\x5d\xdc\x2c\x49\xd4\x87\x7e\xa3\xbb\x91\xbf\x96\xce\x71\x28\x32\x69\x31\xef\x83\x90\x42\x4f\xfe\x59\x49\xe7\x6f\x85\x97\xc9\x10\xf9\x4c\x25\xdc\x4a\xeb\x09\x7b\xc4\xea\x9f\x04\xa5\x0b\xed\x3d\x38\xc9\x55\x90\xbb\x27\xb5\x60\x95\x11\xa1\x16\xc1\x9b\x24\xcd\xd4\xe2\xbf\x46\x8c\x2a\xef\x61\xa3\x33\xc6\x85\xe1\x6a\xfa\xe3\x39\x66\x71\x42\x84\xbe\x54\x5d\x3b\x11\x66\x8c\x2b\x10\x37\x60\x06\x31\x7a\xa2\xd8\x08\x27\xfd\x9b\x61\x6b\x3f\xf3\x3b\x3a\x5d\xaf\x5d\xac\x6e\xcd\x5d\xf7\x29\xe1\x13\xa8\x60\x99\x97\x75\x7a\xdd\x40\xe7\x19\x2d\xed\xdc\x5b\x31\x04\x85\xe5\x63\x15\x38\xa4\x9c\x85\x3e\x5e\x09\x25\xb2\xe6\xdd\x12\xc6\xfc\xea\x57\x2b\x70\x23\xe5\x67\x16\x80\x04\x1e\xc3\x90\xab\xe3\x70\x3f\x86\x1d\xba\xdf\x8a\x96\xdd\x2f\xae\x74\x37\xfc\x28\x88\x12\x8b\x31\x56\x4a\x33\x99\x7d\x62\x9c\xdc\x63\xf9\xd8\x1a\xe3\xa4\xf4\xf2\x81\xb3\x9c\x12\xc6\x49\x79\xe0\x2f\xce\x72\x5a\x52\xe7\x1a\xce\xf4\xfe\xf2\xe3\xdb\x9e\xb5\x0d\x26\xfe\x5b\xc9\x95\x6f\xc7\x7b\xd6\x99\x69\xdf\x63\xde\xfc\x2a\x66\x7a\x30\x23\xac\xf0\xf3\x2f\xf1\xe4\x96\x6f\xa7\xee\x88\xae\x5a\xa3\x2f\xae\x10\x6e\x45\xe8\x58\x33\xb7\x77\x52\x10\xb7\x2a\x37\xed\x7b\x54\x2f\x63\xe6\x0e\x76\x63\x93\x18\xa0\x61\x19\xad\xdc\x9f\x21\x17\x15\x54\x94\x9e\x9d\x43\xa2\x35\x95\x61\x42\x7c\xc4\x85\x91\xbc\x62\x7b\x66\x8d\xd9\xce\x80\xf9\x9e\xa2\x3e\x8a\x6d\x6d\x7e\x41\x32\x41\x24\x61\xca\xa8\xda\xa6\xde\x95\x2b\xef\x4f\x99\xb5\x10\x9d\x63\x85\xcf\xb0\xc2\x09\x9f\x1d\xa3\xbe\x2f\xec\x4f\x25\xc2\x89\xe4\x08\x3b\xc2\x21\xb1\x6b\x02\xb3\xd8\xb1\x07\x8c\x22\x9e\x66\x34\xf1\x48\xed\xde\x8a\x4f\x59\x4c\x9f\x68\x9c\xe3\xc4\x23\x63\x8f\xd8\xe0\x89\x30\x95\x83\x0a\x87\x93\x04\xd9\x6e\xdd\x0b\x81\x7e\xee\x46\x29\x69\x4a\x13\x2c\x90\xe2\x76\xb4\xd7\xb6\x2d\x74\x3f\x27\x7e\xac\x0e\x05\x1c\xa5\xf8\x91\x48\x44\x15\xca\xb8\x94\x74\x92\x14\xc7\xf8\x61\x88\x60\xdc\x67\x17\x43\x30\x8d\x46\x0a\x71\xc3\x07\x5d\xe7\xd6\x4f\xe0\x7a\x4c\x31\x63\x04\x3a\xe6\x6a\x4e\x84\xed\xde\xbe\xfc\xd6\x56\xce\xb7\xc6\x88\x6e\xb6\x98\x86\x23\x7b\x3b\xa5\xb3\xb5\xc6\xd9\x56\xdd\x6c\xa7\x6b\x36\x2b\x9a\x2f\xe0\xa5\x6d\xaf\x0d\x5e\x50\x59\x56\x07\xdf\x85\xcb\xb6\x34\xe2\xd7\xc0\x47\xfb\x8d\x2a\x82\x9d\x16\xf8\x22\xeb\xf6\xa5\xaa\x80\x07\xae\xff\x75\xc8\x6e\x1d\x8a\x7d\x17\x80\xb1\xc7\xc5\xe9\x02\x30\xba\x00\x8c\x2f\x36\x00\xa3\x59\x9b\xa0\xf1\xce\xe9\x7a\x1b\x56\x90\xf2\x46\x01\xf1\x0b\x88\x52\x58\x3e\xb6\xad\x29\xa5\x45\xe5\x61\xfc\x2e\xa4\xfa\xda\x09\xbf\x86\x74\xdf\xd5\x29\xda\x6b\x9d\xa2\x83\x9b\x76\x27\xf8\x75\x82\x5f\x27\xdb\xb4\x9c\x70\x27\xdb\x1c\xae\x6c\xf3\x56\x0a\xcb\x97\x04\xa1\xab\x85\xa7\x52\x66\xcc\xca\x00\x5b\x03\x47\x03\xee\x81\x3c\x4b\x38\x8e\xd7\x05\xe1\xfc\x82\x0a\xb9\x66\x85\x68\x66\xda\xd5\x1f\x1c\xb8\x64\xb6\x14\x7f\x63\x46\xfe\x5b\x88\xa9\x6d\x9c\xfa\x9b\x86\xd5\x02\xfd\x42\x30\x59\x29\x28\xed\xa5\xb4\x90\x2a\x4d\xb7\x52\x38\xe4\x1f\x0f\x9c\xaa\xfd\x96\xbe\x86\x7a\xf1\x05\x3b\x08\x3a\x27\xc0\x6f\xb3\xf0\xf9\xc1\x48\xad\x9d\x6a\xd7\x65\x55\x76\x46\xfd\x4e\xf1\xed\x14\xdf\xbd\x2f\xe3\x21\x29\xbe\x6f\x28\x51\x9b\x34\x91\x17\x29\x63\xb8\x9d\x6c\xdd\x89\xd6\x9d\x68\xdd\x89\xd6\x5f\xac\x68\x7d\x18\x2b\xdc\xc9\xd5\x9d\x5c\xdd\xc9\xd5\x9d\x5c\xdd\xc9\xd5\x7b\x5f\xc6\x4e\xae\xae\xc8\xd5\xf0\x97\x4b\x93\xde\x54\xc8\x6e\x2d\x5c\xb7\xc8\x89\x7e\x2f\x92\x75\x27\x55\x77\x52\xf5\x61\x4b\xd5\x07\x33\xa1\x2f\x2f\x11\xb2\x4b\x25\xec\x52\x09\xbb\x54\xc2\xb7\x48\x25\x74\xbc\x64\x95\x84\xb2\x2c\x58\xfc\xb8\xc4\x81\x0e\x56\xb6\x28\x46\xbb\x6d\x78\xc7\xbe\x96\xda\x01\xcd\x6f\x53\x69\xaa\xf4\x9b\x6b\xe8\x80\xea\x4f\xf5\x9c\xb4\xa0\x19\x85\x1b\xdf\x7a\x84\xb0\x9f\xec\x9b\xef\x0b\x0c\x7c\x79\xd4\x5d\xfd\x29\x14\xec\x5a\x57\x7f\xea\x05\xe7\xed\x0e\xd7\x9a\x99\x3b\x1a\x35\x36\xde\x77\x3a\xed\x37\x07\x97\x6b\x3e\xe9\x6f\x1a\x2e\x57\x7b\x93\x2c\x25\xef\x9c\xfc\xb3\xf6\xa2\x78\x83\xb2\x5b\x1b\xdf\x0e\x9f\x88\xfa\x52\xae\x86\xae\xec\x56\x57\x1f\x62\x4f\xd3\xdd\x8a\xf5\xbf\xdb\xd9\x76\x45\xc6\xba\x22\x63\x5d\x91\xb1\xae\xc8\x58\x57\x64\x0c\xfd\xc6\x8b\x8c\x6d\x2c\x3e\x9a\x71\x7c\x29\x12\x64\x57\x64\xac\x13\x22\xf7\x37\xdd\xdf\x96\x10\x79\x80\x16\x84\x83\xa8\xa6\xe6\x2d\x08\x6f\x8e\xfb\xe1\x46\xd2\x16\xfb\xc3\x2d\x68\x87\xff\x61\xff\xaf\xc3\xff\xe8\xf0\x3f\x1a\x66\xdd\x05\xb3\x76\xf8\x1f\xa8\x0b\xd7\xec\xc2\x35\x0f\x39\x5c\xb3\xc5\x36\x76\xf8\x1f\x2d\xc5\xb9\x17\xc2\x00\x71\x32\xd7\x46\x38\x20\x3f\x2d\x2b\x1a\x07\x2b\xa5\xb9\xb1\xfe\x76\x70\x40\x6a\xa7\x7d\x10\x2a\xc9\x2b\xe2\x80\xd4\xd1\x75\x6b\x05\xe4\x7d\xe0\x81\xb8\xd1\x76\x89\x8b\x5d\x88\xf5\xe1\x87\x58\x1f\x5c\xe2\xe2\xc1\x48\xb2\x9d\xba\xd7\xe5\x2e\x76\xb9\x8b\x9d\x32\xdc\x29\xc3\x7b\x5f\xc6\x43\x52\x86\xdf\x58\xc2\x7e\x41\x5c\x90\xdd\x64\xed\x4e\xd4\x36\xef\x75\xa2\x76\x27\x6a\x7f\xa1\xa2\xf6\x61\xac\x70\x27\x67\x77\x72\x76\x27\x67\x77\x72\x76\x27\x67\xef\x7d\x19\x3b\x39\xfb\xd5\x70\x42\xea\x84\xed\x96\xf9\x36\xef\x49\xd2\xee\xa4\xec\x4e\xca\x3e\x6c\x29\xfb\x60\x26\xd4\x61\x86\x74\x98\x21\x1d\x66\x48\x87\x19\xb2\x95\x7c\xf3\x6f\xf6\x58\x7e\x08\x6e\x62\x7f\x65\x7f\xf8\x2e\xe1\x93\xfb\x45\x46\xf4\x7f\xcf\x69\x4a\x98\x04\x69\x94\xaa\x45\x28\xcf\x34\xac\xfc\xf2\x9a\x7f\xb8\x1b\x5e\x7d\xba\x08\xb3\x69\x3e\x5c\x3e\x5c\xdc\x0f\x6f\xfa\xb7\x7e\x5d\xfc\xac\xc2\xb5\xb0\xdf\x95\x44\x32\x4b\xf2\xb7\x44\xeb\x9e\x70\x6a\xee\x14\x56\xb9\xdc\x6e\x64\xb7\x83\xbb\xc1\xed\x8f\x90\x0d\x34\x3e\x1f\xde\xf5\xbf\xbb\x28\x11\x44\xe9\x79\xff\xec\xaf\x0f\xc3\xdb\xe6\xe7\x83\xff\x1e\xde\xdd\xdf\x35\x3d\xbd\x1d\x5c\x0c\xfa\x77\xcd\x5f\x7f\xec\x0f\x2f\x1e\x6e\x07\x2b\xd7\x63\xe5\x68\x57\x2b\x21\x12\x16\x09\xe2\xfc\x51\x64\xb9\x86\x28\xd6\x10\x79\xf1\xd1\xb1\xc3\xba\xbe\x4e\xd1\x83\xd5\xe9\xa9\x6d\xdc\x30\xd8\xa0\x21\xa3\x8c\xc4\x54\xe2\x49\x42\xe2\xa5\x96\xdc\x1a\x36\xb5\x84\x4b\x83\x7a\xd6\xda\xb3\x17\x39\x35\xcf\x8b\x0c\x2f\x40\x90\xa3\xa8\x08\x8b\x6b\xfa\x30\xfb\xd0\xd8\x03\xd3\xbc\x8b\x3e\x91\x52\x4f\x51\x2e\x04\x61\x2a\x59\x20\xf2\x99\x4a\x25\x97\x1a\x75\xdb\xd7\xd4\xac\xbd\x53\x7d\x83\x73\x2c\xd1\x84\x10\x56\x1e\xbf\x20\x09\xc1\xb2\x66\xcc\x76\xf7\xdb\x2d\x8b\xdf\x2b\x6b\x8d\x31\x97\xd1\x14\xd3\x24\x17\xa4\x72\x5a\x78\x9a\x61\x41\x25\x67\x83\xcf\xfa\x2e\xd3\x07\xf9\x1a\x3e\xe7\x62\xbb\x13\x33\xf8\x6b\x48\xc1\x57\xe5\x7f\x7e\xba\x2f\xff\xab\x74\xe6\x2f\xee\xcb\xff\x5a\x4d\xeb\x41\xc3\x55\xca\x3e\x42\x9f\xee\x4f\xd1\x27\x08\x71\x12\xe8\x7e\x8e\x0d\xc5\x5e\xdc\x9f\xa2\x0b\x22\x25\xfc\x52\x7c\xac\xa8\x4a\x60\x6e\xdf\x51\x86\xc5\x02\xb9\xe9\x9b\x44\x57\x1c\xcd\x11\xf1\x4b\x53\x5d\x3c\xf6\xf7\x9c\x81\xea\x5e\xac\xde\x05\x9f\xd1\x08\x27\xbb\x2d\x62\xff\xaa\xc4\x07\xae\x6f\x57\x2e\x45\xf8\xf6\xf2\x5a\xf4\xaf\xce\x21\x89\xd4\x0d\xb5\x66\xe6\x57\x44\x6a\x22\x89\x38\x8b\xad\x97\x46\xdf\xfe\x8b\x40\xa8\xff\x3b\x87\x44\xdc\x5c\x52\x36\xd3\x2d\xa2\x13\x74\x7d\x3b\x62\xd7\x22\x36\x86\x50\xa2\xa5\x61\x43\x73\x54\x22\xc6\x15\xa2\x69\xc6\x85\xc2\x4c\x69\x45\x00\xc4\x00\xbb\x22\x86\x03\x9c\xf1\x34\xcd\x15\xd6\x07\x6d\x69\x51\x99\x31\x87\xdc\x11\x35\x8c\xc1\xb5\x52\xb3\x86\x46\x4e\x28\xe6\x92\x09\xdd\xbe\x96\x51\xca\x3a\x34\x8d\x97\x54\x59\xd7\x04\x16\x02\x97\xa5\x89\x0f\x54\x91\xb4\xfa\x7e\xcb\x20\xcf\x7f\xd5\x1a\x08\xce\x4c\x52\x05\x11\x7d\x11\xcd\xa9\x22\x91\xd2\x47\x70\x2b\x9a\x78\xb8\xfa\xe1\xea\xfa\xa7\x50\x82\xf8\xd0\xbf\x3c\xff\xf3\x7f\x94\x7e\xb8\xbd\x5c\xfa\x61\xfc\xe3\x9f\x97\x7e\xf9\xff\xad\xa4\xa7\x6a\x4f\x4b\x7a\x7e\x30\x97\x23\x10\xa9\xc1\x26\xec\xa6\x8a\x68\x8a\x67\x04\xc9\x3c\xd3\x14\x20\x8f\xcb\xfb\xab\x45\xca\x0b\x8e\x63\xca\x66\x26\x03\xf4\x82\x2a\x22\x70\x72\x89\xb3\x8f\xce\x7e\xbd\xc5\xea\xfc\xdf\xbb\x52\xbe\xee\x87\x9f\xfb\x97\x61\xc6\xef\x87\x9b\xdb\xeb\xfb\xeb\x95\xb3\x2e\xb5\xb0\x7c\x8c\xf4\xe3\x53\xf8\x5f\x74\x82\x74\xeb\x5e\xf2\x4d\x89\xc2\x5a\x23\x40\x5f\x9b\xa4\x39\x9f\x48\x43\x59\x02\xa7\x26\x13\x34\xa5\x70\xa5\x18\x0b\xde\x37\x46\xb8\xf6\xda\x83\x3f\x37\xe6\x03\xd0\x96\xdd\xa5\xcc\x62\x2c\x62\xf4\x77\x59\x4d\x1f\x07\xc3\xb1\xf9\x81\xc4\xe8\x08\xcd\x95\xca\xe4\xe9\xc9\xc9\xf3\xf3\xf3\xb1\x7e\xfb\x98\x8b\xd9\x89\xfe\xe3\x88\xb0\xe3\xb9\x4a\x13\x93\x2e\xaf\x57\xe1\x14\xdd\x08\xae\xaf\x10\x50\xd0\x89\xa0\x38\xa1\xbf\x92\x18\x4d\x0c\xff\xe3\x53\xf4\x4b\xc4\x05\x39\x2e\x36\xc6\x1a\x95\xec\x3d\x62\x0d\x4f\x27\xfa\xa5\x1a\x66\x52\xdd\x4f\x14\x93\x88\xc6\x56\xcc\x20\x2c\xe2\x60\x79\x34\xbe\x0a\xdd\x9e\xcb\x34\xd4\x1a\x4d\x96\xab\x62\x39\x03\x65\x05\xc7\x24\xc8\x76\x57\xbc\x4c\x70\x5a\xf1\x19\x1a\xb5\x35\xd7\x2a\xba\xbe\x5b\x31\xdc\xaa\xee\xd5\x4c\x4f\x38\xe2\x09\x9a\xe4\xd3\x29\x11\xa1\x43\xba\xa7\xb5\x19\x2a\x91\x20\x11\x4f\x53\x90\x18\xf4\x57\xb9\x34\x54\x0d\x2b\x66\x47\x7b\x3c\x62\xb0\xff\x5a\xcd\x01\x0a\x88\x39\xb0\x3a\x46\x48\x8c\x30\x5b\x98\x6e\x26\xf9\x34\x6c\xdf\xc0\x50\xe0\x18\x51\x35\x62\xfd\x24\x41\x82\xa4\x5c\x91\x20\x87\x12\x9c\x67\xe5\x05\x07\x16\x29\x48\x96\xe0\x88\xc4\x86\x1e\x12\x1e\xe1\x04\x4d\x69\x42\xe4\x42\x2a\x92\x86\x0d\x7c\x0d\xb6\x1a\xbd\x66\x54\xa2\x98\x3f\xb3\x84\x63\x3b\x8f\xea\x67\xdf\x94\x4f\xe3\xc0\x41\x04\x0c\x84\xe0\x02\xfe\xe7\x07\xca\xe2\xbd\x71\xa8\x87\xbb\xc1\x6d\xf8\xef\xbb\x9f\xef\xee\x07\x97\x9b\x71\x1f\x4f\x59\x30\x3c\xd0\xe1\x4f\xd1\x9d\x59\x04\x2e\xb4\x44\x24\x1a\x26\x75\x69\x49\xa9\xf8\x81\xc7\x5b\x72\xdf\xcb\xfe\xd5\x43\xbf\xc4\x51\xee\xce\xbe\x1f\x9c\x3f\x54\xf4\x01\x3b\xbf\x92\x0c\x6f\xd4\xbf\xf0\xb7\xb3\xef\x87\x17\xe7\xe3\x1a\x85\xf1\xc3\xed\xe0\xec\xfa\xc7\xc1\x6d\xa1\xdb\xd5\x2e\x51\x65\x30\x55\x66\x75\x6f\x98\xd2\x9c\xc7\x68\xb2\xa8\x07\x84\xd0\x92\x73\x02\xbe\xd8\x02\x12\xc5\xb4\x7a\x0a\xbc\xc9\x61\x73\x14\x5f\xa4\x3c\x26\x3d\xfb\x0e\x20\x69\x18\xe3\x8a\x91\x98\xeb\x1b\xd6\xbd\x63\x16\x18\x2a\x0c\xc8\x85\x5f\xb8\x53\xd4\x47\x52\xbf\x98\xeb\x43\x2d\xe8\x6c\x06\x86\xc3\xca\x50\x4d\x6b\xf6\x53\x58\x5e\xf8\xce\xec\x7f\x26\x38\x9c\x73\xdd\xad\xb5\x38\x7b\xab\x84\xf9\x10\x50\x57\xca\x2d\x0a\x0c\x06\x87\x9a\xa1\xb9\xcd\xd2\x8b\xd0\xb8\x5e\xe6\x3c\x1a\x7b\x91\x3e\x5c\xc0\xb6\xa4\xb1\x77\x66\x82\x3c\x51\x9e\x07\x9f\x5a\x60\x8f\xd2\x8e\xd7\x36\x5f\x2c\x00\x2c\x9b\x31\x8a\x54\x9a\xf1\xe4\x51\xdb\x82\x66\x61\x4f\xd0\xc2\x54\xf0\xb4\xa6\x8d\xf2\x31\x19\x5e\xdf\x29\x81\x15\x99\x2d\xce\x2d\xcb\xd8\xfe\x78\x9c\x5f\xff\x74\x75\x71\xdd\x3f\x1f\x0f\xfa\x9f\xca\x27\xde\x3f\xb9\xbb\xbf\x1d\xf4\x2f\xcb\x8f\xc6\x57\xd7\xf7\x63\xf7\xc6\x4a\x92\x6f\xe8\x60\xf9\x9e\x2e\xbf\x78\x8a\x34\xcb\x05\xd6\xe8\x00\xef\x02\xfe\x38\x21\x53\x2e\x0c\x9f\x4f\x5d\xe8\x82\x15\x61\xdc\xda\x5a\x5d\xac\x32\x8b\x53\xb0\x8c\xd5\x35\x69\xac\xde\x4a\x10\x9c\xc2\x3d\x81\x19\x1a\xb0\xf8\xe8\x7a\x7a\x74\x67\x7e\x4c\xb1\x78\x24\xc2\x7f\xfa\x2c\xa8\x52\x84\x95\x54\x3a\xec\x86\xec\x95\xc4\xa2\x83\x63\x74\xab\xf9\xbe\x7e\xdf\x5f\x6a\x9a\xd8\x63\xa2\x30\x4d\xa4\x1d\x6c\x69\x5d\x4f\xd1\x05\x16\xb3\xc2\x0e\xf7\x35\x9f\x4e\x4d\x63\xdf\x98\x61\xe8\x3b\xac\x34\x8b\x1a\xde\xab\x49\xc3\xdd\x8b\xd0\x9f\x7d\xd9\xcb\xc3\xcb\x54\xf5\x90\xed\x46\x53\x0f\x37\xb0\xe2\x46\x63\x2f\xe9\x86\xf6\x49\x0d\xad\xc1\xc4\xcd\xe3\xd5\x97\x4c\x7d\xdb\xcb\xe4\x54\x7e\xb1\x86\x9c\x4c\x2e\x95\xde\xf9\xa9\xd6\x36\x6b\x68\x89\x7c\xa6\xd6\x60\x10\x8e\xbb\x42\x42\x45\x33\x60\x5e\xc5\x59\x46\xb0\x90\x75\xbb\x5d\x16\x03\x1b\xf6\xde\xf4\x14\xf6\x61\x37\xd9\xf5\xd3\x43\x9c\x81\xc1\xc1\x0b\x11\x15\x8a\x6c\x41\x03\xa6\xad\x25\x0a\xb8\x01\xb4\xa5\x6b\x8b\x6c\x74\x49\xa5\x56\x1a\xcd\x8f\xdf\x59\xc8\xa5\xed\x08\xe2\x63\x7f\x78\x51\x11\x2e\xc6\xe7\x83\x8f\xfd\x87\x8b\xd5\x66\xc2\xd2\x77\xd5\x2d\x46\x47\x48\x3f\x2f\xfb\xcd\xe9\xd4\xdc\x19\x0e\x38\xca\xa8\xb4\x84\x81\xd1\xca\x42\xd5\x18\x7b\x75\x4c\xb2\x84\x2f\x52\xc2\xc0\xc4\x53\xba\x09\xf5\x7a\x4e\x31\xb5\x57\x4b\x30\x58\xb0\xe2\x58\xb3\x1b\x5c\x63\x47\x0e\xad\x8a\xc4\xfe\xe6\x2d\x83\x55\x55\x58\xf7\x8d\xf1\x9e\xd9\xff\xdc\x29\xac\xb6\x3c\x63\xfd\xb3\xfb\xe1\x8f\x83\xb2\x7e\x78\xf6\xfd\xf0\xc7\x3a\xa9\x66\xfc\x69\x70\x35\xb8\xed\xdf\xaf\x11\x4e\x2a\x4d\xd6\x09\x27\x52\x0f\xb8\xea\x3d\xa5\xd2\x47\x04\x45\x06\xf2\x0a\x51\x25\xd1\x13\x95\x74\x42\x01\x20\xcc\x7a\x22\x1f\x86\xc0\x59\x9f\x70\x42\x63\xaa\x16\x4e\x7c\x31\xfd\x96\xf7\x51\x73\x52\xdb\xbe\x31\x3b\x84\xfe\x49\xb0\xf2\x99\xcd\x71\x93\x3e\x45\xa0\xdb\x3e\x81\xd2\x16\x7c\xc6\xb4\x20\xcd\x66\x44\x98\xe1\x80\xf7\x25\x1c\x4b\xf0\x5c\x8f\x2a\x14\x56\x8a\x55\xf3\x42\xeb\x8c\x30\x22\x00\x04\xce\x77\x62\x04\x29\x41\xd8\x57\x5a\xe6\xca\x12\x1a\x51\x95\x2c\x50\x04\x36\x2c\x30\x67\xa6\x98\xe1\x99\x15\x0e\x40\xcd\xa9\x90\xc4\x5f\x0d\x8a\xda\xf5\xd4\x9a\xf6\xef\x29\xd9\xf2\x98\x3d\x5c\x9d\x0f\x3e\x0e\xaf\xca\x24\xf0\xfd\xf0\x53\x49\x84\xbd\x1c\x9c\x0f\x1f\x4a\xb7\xb9\x96\x64\x57\xcb\xf5\xd5\x66\x6b\x8e\xa2\x7f\xe9\x14\x9d\x9b\x4f\x4f\xf5\xe2\xd6\x40\xc4\x79\xe5\xb7\xb2\x0e\xb7\x2e\x24\xcf\xfd\x31\x60\x4a\xd4\xfa\x25\xda\x9a\x90\xac\x0f\xb2\x64\x43\xaa\x0f\x55\x58\xea\xfb\xaa\xea\x54\xae\x4e\xd9\xbd\x08\x41\x97\xc7\x85\x65\x29\x8c\x61\x00\xa3\x41\x93\x11\xab\xc6\xad\x55\x30\xec\x1f\xc1\x45\x9d\xe6\x52\x19\x57\x22\x10\x27\x7a\xfc\x8b\xd4\x0b\x0a\xae\xc6\x63\x74\x47\xc8\x88\x39\xeb\xc1\x8c\xaa\x79\x3e\x39\x8e\x78\x7a\x52\xe0\x13\x9e\xe0\x8c\xa6\x58\x4b\xd2\x44\x2c\x4e\x26\x09\x9f\x9c\xa4\x58\x2a\x22\x4e\xb2\xc7\x19\x44\xc0\x38\x77\xea\x89\x6f\x76\xc6\xff\xfd\xe2\x4f\xdf\x1e\x5d\xfc\xe5\xdb\x0f\xcb\x16\xb2\xa6\xfd\x1f\xb0\x08\x67\x32\x4f\x6c\xc4\x9c\x08\xd7\xc6\x1d\xf9\x9c\xac\xdb\xef\xab\xf2\x76\xed\xa6\xbf\x9e\xdd\x3c\x94\x2c\xd6\xe5\x7f\x5e\x0e\x2e\xaf\x6f\x7f\x2e\x71\xca\xfb\xeb\xdb\xfe\xa7\x12\x43\x1d\xdc\x7c\x3f\xb8\x1c\xdc\xf6\x2f\xc6\xee\xe1\x2e\xb6\xb7\x1f\x18\x7f\x66\xe5\xa5\x91\x8e\x03\x2e\xf5\x74\x8a\x3e\x72\x81\x7e\xf0\x3b\x79\x34\xc1\x12\xae\x18\x77\x67\xc9\x1e\xca\x78\x0c\x8c\x17\x91\x6c\x4e\x52\x22\x70\x62\x6d\x06\x52\x71\x81\x67\xe6\xa6\x97\x91\xc0\x2a\x9a\x23\x99\xe1\x88\xf4\x50\x04\xd4\x30\xeb\xc1\xa6\x80\xaa\xc5\x67\x55\x3b\xdf\x6d\xce\x14\x4d\x89\x53\xc1\xed\x3f\xef\xcd\x66\x6c\xb1\x39\xd7\xf7\xdf\x97\x85\xbd\x8f\x17\x3f\xdf\x0f\xc6\x77\xe7\x3f\xac\x5c\x4f\xf3\x59\x69\x64\x77\x10\x80\x74\xc6\x93\x3c\x65\xe1\xdf\xdb\x8f\x6d\x78\x75\x3f\xf8\x54\x1d\xdd\x75\xff\xbe\x4c\x19\xb7\xe5\x00\xb7\x0f\xdf\x5d\x5f\x5f\x0c\x4a\x2e\xe1\x0f\xe7\xfd\xfb\xc1\xfd\xf0\xb2\x44\x3f\xe7\x0f\xb7\x06\x8d\x70\xd5\x34\xdd\x08\x6a\x26\xaa\xa7\x15\x4e\x73\xdf\xac\xb0\x15\x27\xea\xdb\x80\x72\x73\x96\x8f\x02\xb8\x1d\x13\x0e\x06\x56\x9d\x23\x6f\x52\x8d\xcc\x48\x6b\xd9\xa1\x2a\x6f\x13\x6a\x66\xc7\x2b\x37\x7a\x15\x57\xbe\xf7\x43\x30\x50\xa0\x46\xd9\xc6\x49\xc2\x9f\x4d\x28\x6f\x4a\xf5\xad\x6c\x81\xd1\xf4\x2b\xb2\xf0\x10\x1e\xd7\x70\xbc\xf2\xb6\x90\x48\x10\x75\xc9\x73\xa6\xb6\x27\xb9\xfe\x55\x89\xef\x0c\xae\x7e\x1c\xff\xd8\x2f\x53\xe0\xf0\x62\x35\xab\x09\x9b\xa8\xb9\x8a\xfb\x57\x3f\xfb\x4b\x18\x02\xbe\x7b\x5e\x43\x35\xb2\x6b\x94\x50\x2d\xf6\x46\x58\x6b\xaf\x09\x48\x34\x88\x50\x30\x39\xa4\x7a\x72\x10\x60\x9a\x19\x7f\x92\xe1\x4f\x66\x90\xa7\xee\x8f\x4a\x7b\x12\xd6\x05\xac\xa9\x2e\x9e\x1e\xda\xb1\x5a\x35\x43\x84\x3d\x51\xc1\x01\xcf\x16\x3d\x61\x41\xb5\x34\x6e\x5a\xd6\x73\x3d\x85\xff\xdd\xac\x4d\x30\x8c\x56\x18\xd7\x1d\x17\xea\xdc\x07\xf2\x6e\x67\x0d\xa9\x0b\x68\x5d\x0e\x65\xad\x37\x74\x2c\x7f\x5b\xb3\x39\x3b\x06\xfc\x96\x27\xfc\x8f\xe4\x9c\xe2\x44\x33\x80\xfd\xc9\x8b\xfd\xab\xbb\x61\x59\x7e\x2c\xab\x19\x01\x5f\xde\x5a\x5e\x04\x43\xa5\x19\xb9\x53\x26\xee\xfe\x7a\x61\xb4\x0b\x00\x3d\x36\xe7\x36\x50\x2c\x40\x00\x72\x28\x28\x19\x16\xb2\xf2\x85\x44\x00\x84\x56\x04\x5c\xe9\x3b\x0b\xc2\x99\x9e\x38\x8d\x47\x8c\x7c\xce\x08\x93\x10\x1c\x60\xee\xb3\xc2\xd7\x2e\x8f\xd1\x70\x0a\x2c\x41\xbf\xce\x50\xce\xac\x03\x4c\x5f\xb8\x66\x90\x3d\x2d\xca\xda\x21\x78\x0d\x11\x0c\x2f\x8c\xb8\x60\xa9\x62\xf0\x23\xf6\x93\x77\xa2\xc1\xa3\x29\xd7\x0c\x48\xef\xa2\x6d\xef\x14\x61\x26\x69\x0f\x69\x85\xa5\xba\xa7\x90\x3a\xa0\x15\x4a\x1b\xc2\xa5\x39\x8d\xfd\xf3\xf5\xaf\x81\xa5\x38\xe1\xf0\x32\xa8\xbf\x0b\x2a\x57\x41\x83\x68\x9c\x18\x8f\xc9\xb8\xfd\x9d\x10\x71\x41\xac\x9f\x65\xe3\x6b\x60\x1d\x63\xbf\xc7\xf2\x71\xc9\xf7\x30\x64\x52\x61\x16\x91\xb3\x04\xcb\x2d\x83\x90\x9c\x8d\xa3\x57\x96\x38\x6e\x6f\x1f\x6e\xee\x87\xdf\xad\xe1\xf2\xd5\x8f\x97\xc3\x80\xa2\x24\x77\xee\xb9\x89\xe0\x38\x46\x9a\x7d\xce\xb8\x71\x05\x5a\xc1\xbf\x80\xfe\x36\x79\x3d\x3e\xa0\xb2\x04\x3b\x5e\xa4\x23\x58\x3b\x47\xe8\x4a\xa0\x76\x21\x50\xa4\x57\x02\x05\x26\x0f\xb7\xd5\xe0\x59\x34\x05\x49\xac\x75\x2b\x4b\xb0\x9a\x72\x91\x1a\x2e\x5f\x9a\xb4\x69\x7c\x75\xa3\x94\x29\x22\x44\x9e\x29\xea\xb0\xdc\xab\x52\x2a\x54\x78\xe7\xb3\x4b\x22\x25\x9e\x91\x5d\x1c\xd0\x75\xca\xc3\xdd\x8f\xe1\x3f\xc1\xc1\xdc\x46\xf6\x2f\x8d\xd0\x45\xbe\x3b\x7a\xba\x66\x1f\x4d\x20\xcf\x0d\x4f\x68\xb4\x65\xc0\xdd\xc7\xfe\xf0\x62\x3c\xbc\xd4\x4a\x7c\xff\x7e\x70\x51\x12\x25\xe0\x59\xff\xe3\xfd\xe0\xd6\x82\x58\xf7\xbf\xbb\x18\x8c\xaf\xae\xcf\x07\x77\xe3\xb3\xeb\xcb\x9b\x8b\xc1\x9a\xc8\x9c\xc6\xc6\x97\xad\xab\xd5\x57\x4f\x97\x7e\x81\x1d\xd6\xbc\x2c\xb4\x97\x41\xd6\x18\xa6\x09\x38\xc1\xb9\x71\x86\x63\xc4\x78\x4c\xe0\x67\xe9\xac\x33\x1e\x39\x1a\x0d\xd5\x57\x49\x82\x70\xae\x78\x8a\xc1\x6b\x93\x2c\x46\x0c\x4f\x34\x6b\xc5\x49\x12\x84\x77\x89\x9c\x31\xcd\x62\x75\x63\x06\xa2\x3d\x4a\x88\x66\xe7\x59\x90\xec\x67\xfd\x06\x53\xca\x20\xd2\x36\xc5\xe2\xd1\xb8\x99\x8a\x2e\x8b\x43\x21\x11\x96\x23\xa6\xc7\x45\xac\x61\xa8\xcd\x0a\x9f\xb6\x7a\xab\x71\x75\x52\xfc\x48\xf4\xaa\xa4\x79\x34\x47\x99\xe0\x33\x41\xa4\xb4\xb6\xe5\x08\x33\x13\x80\x60\x5f\xd7\xd7\xd0\x88\x31\xae\x97\xc2\x99\xb0\x63\x92\x11\x16\x13\x16\x51\x93\xd6\x07\xbe\x7b\x6f\xda\x9c\x09\x9c\xcd\x91\xe4\xe0\xf4\x86\x65\x07\xfb\x95\xf9\xc8\xdd\x64\x66\xc6\xe6\x71\x68\x81\x16\xb9\xe6\x13\xd7\x20\x27\x9a\x55\x86\x8f\xdd\x65\xe8\xdc\x2e\xc6\x0e\x98\x66\x09\x51\x06\xac\x1f\x96\x1c\x36\x43\xaf\x75\x69\x3f\xf4\x36\xd5\x6d\x82\xbe\xb0\xdd\x98\xb1\xb4\x23\x3a\xae\xb1\x6c\xdb\x23\x85\xbe\xc7\x2c\x4e\x74\x2b\xce\x87\x51\x3e\x8b\x90\x8a\xd2\xd7\x54\xe3\x4e\xe3\x2e\xb7\x68\x84\x73\xb9\xcb\x35\x5a\xc9\xc5\x34\x56\xc1\xa3\x22\x28\x04\xc8\xdb\x26\x62\xc2\xea\x66\x9a\x45\xe2\x84\xdb\x55\x32\xaf\xe7\xa6\xfe\x13\x82\xd1\x34\x5c\xb3\x99\xa0\x2c\xa2\x19\x4e\xb6\xd2\xfd\x2a\xc1\xf8\x36\xc6\xfd\x6b\x3a\xd5\xe4\xf3\xcd\x92\xdb\x56\x11\x91\x42\x82\xb2\x1d\xa6\xdf\xc2\x0d\x2c\x49\x36\xab\x81\xc8\x22\x9a\x04\x0b\x9e\x1b\x7f\x1c\xac\x0b\x89\x6b\x8e\xea\x71\xdd\x76\xeb\x93\x81\xcb\x01\xd0\x5b\x6c\xb6\x89\xfc\x69\x5a\xbf\x4a\x2b\xb6\x77\x13\x8c\x87\x93\x9b\xfa\x36\xeb\x76\x20\x78\xf8\xaf\x55\xb4\x73\x89\x33\x4d\x33\x16\xb6\x1f\x17\x73\xb4\x4a\x92\xad\x0a\xe6\xe2\x67\x02\xdf\xb9\xcf\x0b\x69\xbf\x1b\xc5\x12\xda\x00\xa8\xe5\x4e\x4a\x31\x04\x41\x8e\xb9\xa5\xf1\x69\xae\x65\x59\x84\x21\x0a\x01\x7d\x4d\x8e\x67\xc7\xc8\x15\x61\xe8\xa1\xfe\xcd\xcd\xe0\xea\xbc\x87\x88\x8a\xbe\x71\x31\x8b\x36\x60\x69\xc4\x14\xb7\xd2\xca\xc2\x15\xd0\x48\x89\x98\x91\xd2\x9c\x5d\x74\x13\x84\x2a\xcf\xa8\x54\x36\x7c\x56\xf3\x95\xa0\xd4\x09\x4d\xab\x62\xb6\xa1\x90\x5c\xcd\x77\x21\x0d\x2c\x65\x9e\x6a\x5d\x76\x4c\x71\x3a\x16\x3c\xd9\x85\x29\x9c\xc3\x54\x40\x5d\xf6\xe9\xf9\x14\xa7\x48\x37\x6b\x43\x41\xbc\xcb\xd1\x8b\x74\x5a\x30\xd2\x7c\x59\xdf\x9b\xc1\xbd\xe5\xbc\x0f\x36\x1e\x8d\xba\x10\x08\x48\xdf\x6f\x60\x15\x85\xd9\x78\x6c\x2d\xf5\x63\x1c\x45\x5a\xe5\xde\xf3\xa4\x82\xfa\x39\xce\x25\x60\x3b\x7a\xb1\x69\xae\xa3\x73\x37\xcc\x4c\x73\x30\x08\x06\xd6\x57\xae\xe4\x11\x2d\xda\xaf\xe9\x77\xb2\x58\xea\xd5\x55\xb8\x79\x90\xde\xa4\x62\x2e\x61\x49\x60\x27\xa5\xa9\x90\xa3\xe6\x64\x81\xe6\xf8\x89\x94\xba\x74\x09\x31\xba\xe1\x05\xcf\x45\x1d\xa3\x1b\xb1\x73\x92\x09\xa2\x25\xfd\xaa\x03\xc5\xd3\xf4\x6d\x99\x12\x3b\xba\xee\xe8\xfa\xdd\xd3\xf5\x99\x29\x94\xd4\xf7\x85\xb1\x76\x12\xe0\x4c\x63\xe3\x8c\xf3\x64\xdc\xc2\x26\xd2\x7e\xc5\x4b\x9e\xb0\x4a\xd9\x28\x80\x04\xe0\x39\xc8\x47\xa5\x6b\x93\xeb\xbb\x2e\x48\xb1\xb5\xc3\x5b\xb1\x0c\xce\x65\x16\xd4\xcb\xd9\xe5\xbc\xd7\xb5\xb2\xaa\x25\xf4\xe2\x62\xce\x99\x91\x6f\xbc\xbb\x2c\xac\x7f\x5a\x3a\x4c\x4e\x14\xa1\x6c\xa9\x1a\x9b\xa1\x67\xbd\xc0\x46\xee\xf8\x47\xce\x15\x96\xdf\x1c\x8f\x98\x16\xa2\x1e\xc9\xc2\x98\x5b\xb5\x98\xf2\x3b\x2d\x8b\x1f\x49\xc2\x24\x84\x7b\xff\xce\xb8\xe7\x34\x89\x3b\x73\xb5\x51\x4d\x4d\x11\x38\x08\xba\xf6\xbd\x40\x88\xae\x6d\xd4\x4a\x49\x45\x00\x34\xc8\xf9\x66\x2e\xf6\x99\x19\xfe\x8c\x28\x48\xb1\x56\x54\x81\xce\x14\x9b\x2a\x73\x4b\x43\x5f\x6b\xba\x32\x54\x21\x38\xf8\x49\xe2\x7c\x37\xc6\x2f\x97\xdb\x58\xcb\x19\xbd\xb6\x70\x67\x63\xde\x4f\x9c\xdd\x28\x12\x7c\xa9\x74\x1b\x96\xc8\xec\xf4\xc4\xb0\x03\xe7\xbf\x26\xec\xf8\x99\x3e\xd2\x8c\xc4\x14\x43\x04\xbc\xfe\xd7\x89\x9e\xd7\xbf\x9f\xdd\x5e\x5f\x8d\x8b\x4c\x9e\xff\x1a\xb1\x7e\x22\xb9\xcf\x52\x40\x8c\x33\x1f\x6e\x9f\x09\xe2\x44\x42\x3b\x17\xb0\xba\x16\x66\xc4\x11\x6b\x1a\x41\xcc\x23\x79\x8c\x9f\xe5\x31\x4e\xf1\xaf\x9c\x81\x2b\xbd\x0f\x7f\x9e\x25\x3c\x8f\x7f\xc2\x2a\x9a\x9f\xc0\xb9\x56\x27\xe4\x89\x30\x65\xdc\x54\x7a\xb9\x62\x48\xde\x95\x10\xad\xff\xef\x7a\xcc\x45\x52\x91\xd4\x9a\x6c\x44\x32\x85\xfe\x1f\x41\x26\x9c\xab\xfa\x4b\x8a\x4f\xa7\x92\x6c\x74\x21\x15\x4a\xda\xdd\x35\xfa\xcb\x9f\xbf\xfd\x83\x26\xa1\x6d\xd6\x78\x78\x77\x3d\xd6\xdf\xff\xfb\xb9\xfd\x5e\x6e\xc0\xee\xae\xb3\x82\xb5\x39\xe2\x31\x81\xf3\x39\x83\xdb\x4f\x80\xf3\x02\xd8\x1b\x90\x43\xb1\x8f\x75\xdc\xed\xbc\xd4\xfa\x6e\x2a\xdb\x56\x8b\x09\x2a\x76\x30\x47\x74\x84\x18\x47\xa9\x89\x35\xc5\x0c\xfd\xc7\x0f\xdf\xd5\x6f\x60\x2e\xe8\x56\x1d\x52\x0b\xd7\x10\x74\x29\xe9\xaf\x44\x22\x4d\x35\x9a\x8a\x79\xaa\xbb\x16\x44\xce\x79\x12\xa3\x67\x02\x6a\x92\x8d\x03\xf5\x5a\xb9\x20\x23\x16\x36\x01\x21\x87\x08\x27\x8a\xcf\x08\xdc\xd5\x4e\x51\x53\x44\x68\x51\xc5\x64\x69\x28\x2e\x48\xcf\x40\x7d\xdd\xfd\xc9\xc5\x56\xc3\x34\xe1\x91\x4b\x6a\xb1\x26\xb9\x78\x52\x3f\xf3\x69\xd5\xf4\x8a\x9a\x6d\xf8\xd5\x4d\xb6\x66\xdb\xfa\xa5\xb1\x49\x28\xd6\x86\x55\xdd\x99\xfa\xc1\xd0\x88\xb3\x71\x42\xd9\xe3\x56\x9b\x71\xed\x44\x39\xdd\x82\x5d\x33\xdd\xa2\xb7\x73\x1b\x0b\xc8\x06\xe7\xe3\x63\x9e\x24\x26\xb5\x25\xdc\x1e\x90\xbb\xcc\xba\x81\x30\x90\x99\x1c\x50\x12\x5b\xbf\x97\xd5\x84\x05\x61\x10\xf0\x36\x62\x93\x85\xf5\xd9\xca\x1e\x92\x79\x34\x77\x99\x79\x11\x67\x52\x8b\xd1\x5c\xa0\x88\xa7\xa9\x29\x6e\xca\x08\x52\x9c\x27\xd2\x46\xbb\xb3\x23\x85\x23\x35\x62\x45\x7f\x6b\x4e\x9e\xa9\x80\xb4\x5b\xea\x5e\x7b\x97\x4e\x51\x69\x69\xa5\xc0\x4d\xe3\x10\xb3\x01\x8c\x60\xc6\x13\x15\xa0\x3f\xf0\xe5\xb3\x64\x36\xac\x41\x33\x90\x73\x2e\xd4\x38\xae\xe5\x39\x6b\x89\xa6\xca\x08\x19\x39\x4a\x20\x68\x98\x3f\x69\xe1\x9f\x3c\x7b\xe3\xeb\xaa\x21\x68\xaa\x5e\x35\x82\x76\xc7\x68\xe5\xc8\x36\x25\xc1\x86\xb5\x32\x08\x1e\x51\x39\x26\x7c\xdd\x18\xef\xe0\xab\x33\xfd\xd1\xca\xc5\xab\x9e\x3b\x27\x04\xf1\xb8\x00\x9b\x33\xf7\xba\xcd\x08\x59\xb5\xa6\x16\x3a\xe1\xe5\x32\x47\x57\x4d\xe5\xa1\x6c\xc9\xd5\x63\x01\x93\xbd\x24\x20\x6b\x62\x31\xa1\x4a\x60\x51\x42\x0a\xf1\xfa\xa0\x24\x58\x40\x7c\xd6\x88\x19\xdc\x38\xa3\x29\xc4\x28\xa6\x12\x12\x44\xe0\x2e\x0d\x9c\x61\xa8\x9d\x12\x58\x39\xda\x45\x9e\xa3\x89\x3f\x87\xc0\xb2\x82\x34\x1c\xb3\xd3\x1d\x79\x7c\x2c\xad\x9f\xf1\x28\x2f\x04\xb9\x08\x24\x5c\x8b\xa9\x83\x28\x93\x74\x36\x57\x88\x32\x6b\x77\xc4\xc9\x8c\x0b\xaa\xe6\xa9\xec\xa1\x49\x2e\xb5\x16\x6a\x82\xd5\x4c\x3c\x0a\x51\x51\x2b\x2e\xb4\x6b\x12\x71\x5c\x69\x70\x59\x45\xd9\x82\x34\xda\x1d\xca\x41\xe5\xae\x58\x43\x38\x7d\x8f\x33\x58\x6d\x83\x42\xdd\x46\x03\x4f\x89\x4c\x1c\x20\x77\xc8\x4e\x50\x05\xa4\xe9\x1c\x00\x2a\xe4\xde\xbc\x14\xaf\x51\x88\x0b\x99\x64\x50\x41\x5c\xec\x36\x48\x5e\x65\x64\x4a\x83\xde\xe4\x9d\x4e\x69\xa6\x6a\x03\xb7\x96\x5d\x45\xb7\x01\xe6\x4f\xbb\xc5\x86\x64\x2c\xa0\x66\x40\x6a\x1b\xb1\x3b\x42\x9a\x81\xdc\x96\xf6\xde\x94\xc6\x85\x29\xd8\x44\x8f\xd5\x24\xbf\x8b\x13\xfb\x7c\x70\x77\x76\x3b\xbc\x31\x90\x13\xd7\xb7\x97\xfd\xfb\x71\x8d\x5f\xbb\xe6\xad\xcb\xfe\xed\x0f\xe7\xeb\x5f\xfb\xfe\xbe\x9c\x95\x5d\xf3\xca\xed\xdd\xea\x64\x8e\x16\x43\xac\x49\x0a\xab\xed\xe7\x14\x65\x0b\x35\xe7\xcc\x87\x28\xc4\x25\xde\x74\x84\x4c\x46\xb0\x82\x10\x22\x21\x55\x8d\xe3\xf0\x1e\xe2\x72\xd6\x4b\x98\xe5\xcd\x32\x30\x6c\x7b\x15\x8d\x36\x38\x91\x9f\x12\x3e\x01\xbf\x75\x5e\x2a\x71\xbb\x22\x02\x7d\xc7\x78\x9f\x73\x2a\xb3\x04\x2f\x96\x7a\x58\x77\xe5\x5c\xe1\x94\x40\xc4\x71\x81\x1f\xe7\x92\x45\xf4\xce\x40\x02\x93\xbf\xd7\xe9\x14\x32\x99\x14\xc5\x8a\xa0\x09\x51\xcf\x90\x37\xe7\x7e\xf5\xb6\x54\x17\x30\x22\x8f\x47\x0c\xcc\x39\x23\xbd\xc8\x71\x0e\xd1\x7e\xa3\x0f\x3d\x34\xfa\x10\x93\x27\x92\xf0\x4c\xef\xbc\xfe\xa1\xe1\x92\x19\xa4\x98\x26\x57\x5c\x79\xcb\xdc\x2e\xfb\x29\x48\x44\x33\x90\xcc\xc7\x44\xb7\xfb\x7a\x82\x47\x89\x92\x1d\x3b\x83\x31\x20\x1c\xc7\x5a\xc9\x06\x56\xe6\x86\x57\x84\x00\xb1\x60\xea\xa5\x5a\x99\x9b\x88\x14\xde\xfc\x6d\x7a\x0c\xdb\x2c\x9b\x3d\x6b\x77\x80\x3d\xbd\xa0\x4b\x76\xd7\x8b\x5c\x6b\x25\x3f\x90\x05\xa4\x60\xdc\x60\x2a\xb6\x74\xcd\xd6\xc5\xbc\xbe\x88\x93\x76\x50\xd3\xd1\x01\xb9\x6b\xeb\xd7\x61\x37\xc7\xad\x8f\xd5\x7b\x2d\x2d\xd5\xc5\x72\xf9\x8e\x5b\xaa\xad\x0f\x4d\x4a\x6a\x63\x08\x03\xaa\x2a\x5e\x19\x89\x36\xd0\xb8\xfc\x00\xef\xf4\x77\x6b\x35\x15\x2f\xae\x45\x61\x4d\x7f\xd8\x05\x9b\x1c\x5f\xcd\xc7\x27\x6b\x47\x1c\x25\x5c\x96\xb1\x72\x5a\x0f\xfa\xcc\x7e\xba\x6a\xdc\x83\x90\x7c\xb5\x5c\xb8\x51\x40\x43\xcd\xc2\x57\xc0\x20\xcd\x3d\xa3\xac\x87\xcc\xbe\xdd\x43\x14\xa2\x2d\x41\x21\x4b\x0a\xe4\x00\x16\xa3\xc2\x0d\x32\x62\x45\xcc\x8a\x44\xcf\x24\x81\x30\xb7\x88\xa7\x19\x98\xf8\xed\x70\x6d\x4b\x24\x36\x11\xc3\x3d\xc4\x73\xa5\x1b\x33\x39\x39\xce\x88\x6b\x13\x7e\x0a\xb7\x87\xf1\xbd\xd9\xe0\x77\x0f\x2c\x6d\x68\xdd\xdc\xa5\x94\xa1\x4f\x44\x41\x2b\x00\xdc\x1f\x4e\x10\xf4\x84\x6a\x08\x65\xfd\xda\xef\x70\xa2\xec\x4c\x36\xd8\xf9\x02\x38\xe5\xbb\x84\x4f\x56\x1b\x09\xa0\x71\xf4\x70\x3b\x74\x16\xc9\x22\x7e\x2a\x40\x2f\x2e\x79\x14\x07\x37\xb7\x83\xb3\xfe\xfd\xe0\xfc\x18\x3d\x48\xa2\x97\xc7\x4f\x17\xf2\xab\xbd\x4a\x62\x46\x6e\x91\x58\x98\x54\x04\x37\x19\x42\x88\x10\xa5\x2c\xe8\x35\x8c\xa3\x0c\xd3\xb2\x9a\xb0\x01\x24\x85\x5a\x43\x1d\x00\x0b\x55\xe7\x69\x23\xf3\xd6\x9d\x40\x88\x93\x1a\xbf\x9f\x28\x35\x33\xde\x74\x39\x32\x6f\x1d\xf9\x94\x23\xfa\x5e\x7a\x32\x70\xb4\xd4\x9c\x50\x81\x5a\x4d\xcb\x10\xd5\xb8\xfd\x9c\x82\x10\xf7\x4b\x9c\xad\x4e\x3f\xc5\xcf\x25\xa2\x35\xa2\x70\xe0\xbb\x7f\xe9\x73\xe0\xd8\xda\xd8\xb0\xc2\xdd\x27\x58\x38\xb4\x0c\x6f\xf5\x7c\xd3\x64\x7c\x48\x67\x24\x0b\x27\x56\x19\x84\x8d\x63\x95\x08\xce\x0e\xfc\x42\x19\x2a\x5d\x89\x3d\x34\xa5\x9f\x6d\xa3\x45\x7c\xbb\x7b\x35\x08\x78\x68\x88\xa7\x9c\xe3\xe5\x33\xb5\x81\xd8\x70\x03\xdf\xaf\x14\x22\xb9\xd4\x22\x51\xa4\xc5\x25\x41\x22\x2e\xf4\x4d\x01\xdd\x16\x5e\x88\x75\x22\x83\xc2\x42\x2f\xca\xb2\x57\x66\xd5\xe9\x2f\x6a\x90\xc4\x58\x91\x23\x2d\x7a\xad\x49\x80\xb6\x39\x32\x90\x4d\x83\x55\x00\x07\x56\xdc\x3c\x13\x32\xc3\xcc\x85\x66\x37\x0c\xd7\x5d\x79\x3b\xb0\x2a\xad\x02\x61\x48\x0f\x03\xf9\x0a\x52\x7f\x4a\xe3\x90\x19\xac\xe7\xca\x71\xd8\xe8\x97\x43\x58\xb6\x67\xec\x83\x71\x1a\x06\x9b\x67\xf1\x21\x0d\x36\xc1\x52\x21\x3b\xa6\x26\x53\x44\xa0\x22\xbe\xac\x11\xb6\xa4\xdb\xb7\x55\xde\x34\x09\x95\xb5\x58\x02\x9e\x11\xe9\x70\x53\x0c\x4a\x8c\xd6\x69\x9c\x20\x6c\x4a\x31\xfb\xb3\x6d\x6b\x32\xbb\x5b\x22\x64\x26\x10\xa4\xbf\xdc\xf4\x31\xea\xb3\x25\xbc\x2c\x17\x97\x55\x5a\x2f\x73\x27\xe1\xe4\x19\x2f\x24\xca\x84\x81\x96\x31\x91\xfb\x6e\xf2\xa0\x81\x95\x3f\xf2\xa1\x10\xca\xa5\x4e\x20\xb0\xc5\xac\x0f\x9a\x73\x72\xef\xf8\x05\x5c\x79\x95\xa8\x72\x2f\x90\x17\xcd\x15\xb6\x8a\x16\xac\x4e\x91\x71\x34\xc7\x6c\x46\xc6\xce\xc8\xba\x8d\xb6\xa4\xdb\x39\x83\x66\xce\x6d\x2b\xf5\x97\xd3\x8d\x51\x98\x6c\xfd\x17\xf3\xaa\x37\x20\xea\x43\x20\x15\x9e\x11\x64\x46\xd4\xca\x2c\x5d\x8a\x18\xb3\x60\xc3\xa0\x27\xd8\x56\x07\xe5\x28\xfa\x26\xe1\x1d\x42\x9f\x2e\xf0\x84\x24\x6f\x13\x39\x01\x5d\x5b\xe3\x3c\x78\xeb\x4c\x36\x00\x41\xcf\x60\xcf\xaf\xb0\x0c\x6b\xbd\x17\x79\x5d\x6e\xc0\xaa\x79\x96\xaa\x9f\xef\x30\x51\x57\x2b\x64\x9b\xa9\x36\x55\x10\x09\xaf\xbd\xa0\xd2\x46\x9d\x81\x2d\xbc\xfe\xaa\x36\xe5\xed\x06\x12\x14\xfc\x68\x18\xc7\xce\x15\x3f\xd6\x4e\x65\x6b\x90\x81\x96\x55\xf0\x86\x53\xc4\x38\x23\x88\xca\xe2\x65\x55\x4e\x87\xf2\x10\x3d\x5a\xc4\x37\xc6\x17\x5f\xa5\xcb\x17\x5f\x7a\x69\x4b\x4b\x01\x9e\xe0\x6d\x03\x2e\xbf\x9b\x11\xad\xa8\x62\xb1\x00\x88\x4f\xc3\x87\xcb\x32\xdd\xda\x71\xee\x5d\xe0\xbe\x77\x08\xae\x41\xa4\xae\xe2\x08\xc4\xc8\xca\xe0\x90\xc1\x41\xb5\x2f\xd9\x8f\x2c\x4c\xcd\x88\x79\xcb\x06\x10\x22\x95\x28\xc5\x19\xf8\xf4\x18\x57\xc5\x57\x06\x76\x49\xf9\x2d\xec\x39\x41\x5c\x9a\x1a\x5a\x0d\x2b\xb0\xce\xb4\xe3\xae\xdf\x62\x5d\xcb\xf0\x96\x0e\x9a\x77\x46\x9f\x08\x73\x34\xdd\x73\x67\x42\x0f\xca\x75\x9a\x2c\x8e\x30\x84\x19\x93\x38\xf4\x7c\xac\xe6\x48\xc6\x20\x73\x08\xf6\xc8\xf6\x4b\x76\x5f\x1b\x46\x63\x40\xd2\x4a\xe8\xf6\x2e\x30\x3c\xa4\x52\x8b\xdb\x6b\x32\xc1\xb1\x44\xbf\x63\x5c\xfd\x2e\x40\x36\x76\xc6\x0b\xf8\xd4\x99\xa0\x7a\x4b\x25\x5b\xe0\xd0\x5a\xc2\x41\x38\x40\xd8\x5a\xbb\xf2\xbb\xc6\x06\x14\x81\xef\x2f\x2a\x8d\x0e\x96\xb3\xe0\x9a\x6a\x5e\x75\x1e\x7b\x54\xbd\x16\xaa\x06\x4f\x53\x56\xaf\x38\xe9\x25\x43\xa7\x5c\xe7\xa2\xf7\x7b\xd1\xca\x35\xbf\x84\x08\xb0\x0b\xb5\xa5\xad\x23\xa7\xd6\x80\x20\xd7\xdb\x25\xb6\xc9\xf3\x6c\x92\xcb\x45\x39\x74\xcd\x96\xc1\x68\x40\xf9\x3d\x1e\xb1\x8f\x5c\xd8\x2b\x58\xda\x3a\x03\x13\x1c\x3d\x1e\x11\x16\x23\x9c\xab\xb9\x41\xdb\xb5\x7e\x85\x85\xa5\x06\x2d\x69\x00\xd9\x78\x28\x0d\x2a\x23\x2c\x62\x57\xf1\xe2\x89\xbb\x51\x8c\x58\xd0\x08\x54\x32\x80\x42\x4f\x50\xaa\xb6\x49\xd5\x24\x52\xeb\x57\x4d\x6b\x51\x57\x84\x75\xa9\x04\xeb\xea\x73\x56\x2a\x2a\x0b\x35\x18\x20\xc0\x89\x4f\x97\x57\x67\xe8\xac\x8d\x4e\xbf\xd3\xf4\xbc\xec\x85\xe8\x59\x8d\xc2\x98\xa4\xec\x0c\xb4\xa4\xf3\xad\xe3\xb5\x25\xd4\xe0\x69\x2e\x20\x5c\xb7\xae\xcd\xaf\xa3\x39\x4d\x0a\xdf\xc5\x37\x3d\x3f\x4c\xdd\x64\x42\x9e\x48\x62\x30\xeb\x23\x01\x91\xf9\xc6\x6a\xf8\x2d\xfa\x3f\xa6\x30\x29\xfa\xc3\x88\x7d\x02\x36\x9c\x24\x0b\x40\xd4\xf4\x2d\x63\x55\x69\xe6\xb1\x76\x00\xca\xa6\x02\xa1\xf2\x40\xcc\x5e\xcf\xf1\x13\x19\x31\xd7\xcc\xff\x41\x8f\xe8\xf7\xe8\x0f\x4d\xea\x9d\x0b\xb0\x7f\x61\x3b\xc7\xc7\x20\x7c\x3d\xb8\xe5\x2c\xa3\xb4\xfc\xc6\x99\x41\x4a\x46\xc8\x1a\x64\x0d\x0f\x8c\x4d\xd9\x13\x8f\x96\xb2\x38\xc2\x53\x8b\x05\x61\x6a\xcc\x78\x4c\xc6\xa4\xc6\xa5\xb9\x82\x49\x68\x21\xe0\x8a\xc7\x64\xad\x43\xd2\x33\xd3\x9f\xc0\x74\x23\xf3\x89\xdf\x0e\x48\xf0\xf7\xd9\xdc\xde\xfa\x50\xa6\xb4\xfa\x91\x7b\xf4\xd9\x6d\xc6\xbd\xad\x33\xd5\x85\x89\xf6\xe0\x42\xb0\x03\xa8\x77\xe8\x25\x58\x39\xf7\x7a\xf5\x38\x56\x1d\x01\xfa\x65\x3d\x73\x7b\x59\x05\xb8\xba\x50\xfb\x44\xd0\x19\xd5\xf2\x7b\x7b\x87\x2d\x70\xc2\x6d\xbc\x19\x06\x64\xb4\x95\x3b\xa3\x58\x0a\x07\xb4\x72\xe4\xe9\xaf\x70\x42\x4e\x78\x5e\x15\xe0\xed\x02\x50\x19\xba\xfb\xad\xac\xbe\xd0\x7c\x78\x66\x32\x00\xc9\x9c\x9a\x9c\xfb\xfe\xd9\x05\xd2\xa7\x83\xa7\x06\x98\x0a\x16\x2d\x57\x73\x2e\xe8\xaf\xab\x68\x1b\x0b\x45\xa7\x38\x52\xe3\xbd\xd4\x71\x69\x26\xa6\xbe\xed\x67\x78\xde\xda\xd4\x77\x87\x9f\x48\x10\x02\x08\x01\x7e\xb6\x15\xe9\x5d\xa9\x55\x7e\xcb\x05\x62\xfc\xb9\x00\xa6\x72\xdf\x03\x16\x73\x90\x3a\x81\xb5\xd2\x93\x41\x0c\xaf\xa4\x40\x9f\x00\x13\xf5\x95\x32\x69\x91\x00\x31\x6e\x00\x9e\x34\x79\xce\x31\x8b\x13\x77\x85\x20\x6e\x62\x6a\x16\xcf\x78\xb1\x91\x57\x3b\x8c\x6c\x2c\xf2\xe4\xcc\xf6\x97\x95\x20\xe0\x01\x46\x52\x53\x25\x65\xaf\x4e\x15\x45\x93\x1c\xa0\x6d\xf5\x9a\x4c\xf3\xc4\xd4\xc3\x88\xb8\x88\x4d\x41\x7a\x59\xca\xca\x83\xe8\x66\xa7\x35\x61\xe5\x1b\xa4\x16\x01\xd4\x56\xdc\x30\x86\xb1\x95\x72\xfd\x5f\x73\x92\xef\x29\xb1\xf1\x4d\x43\xc1\xef\xf1\x4c\x16\xb1\xdd\x66\x6d\xf4\x95\x57\xac\xef\x3f\xf4\x4c\x65\x90\x0a\xec\x0c\xb6\x1e\x59\xcb\x58\x3a\x4c\xbd\xd5\x8d\x0c\x65\xb7\xa6\xa2\xc0\x1e\x2c\x65\xaf\x11\x26\xb3\x2c\x7a\xd6\x70\x75\x4b\x7e\x4f\x3e\x31\x16\xbd\x8e\xf9\xc9\x95\x66\xa8\x08\x75\x2f\x68\x89\xda\xe2\xee\x58\xd6\x55\x56\x06\x9b\x17\x76\x29\x7f\x5b\xd4\xe4\xa8\x2b\x0e\xd9\x2c\xcf\x82\x02\xf0\xde\xa2\x78\xd9\x97\x16\x76\xb7\x70\xc8\x63\xb4\xf0\x67\xb4\x05\x08\x97\x71\x4b\xb8\xa8\xbf\x3a\x37\xb0\xeb\xd8\x86\xca\x5d\x2f\x87\x43\x34\x9d\x08\xc3\x92\x0e\xf5\x48\x2c\xa3\xee\xac\x3d\x0c\xbe\xc0\xca\xdb\xd8\x65\xbd\xc4\xf8\x7a\x27\xc3\x93\xe3\x38\xc2\xd1\xbc\x71\x52\x13\xce\x13\x82\x59\x93\x52\x50\xfb\xb8\x7a\x44\x0c\x66\x2c\xb0\xee\x24\x01\xe0\x64\xb7\x04\xb6\xd8\x66\xa1\x15\xb1\x18\x00\xef\x0d\x0f\x37\x21\x97\x6e\xa0\x8a\x30\x67\x50\xa3\x6c\x96\x90\xea\x5a\xd9\xca\x04\x3d\xdb\x49\x12\xe5\x49\x50\x6d\x33\x23\x42\x8f\x5a\x2f\xf1\x13\x61\x5a\x15\xb3\xe3\x70\x3e\xa2\x67\x97\x67\xee\x6b\x6c\xf5\x7c\xd7\xce\x4d\x09\xc9\x9c\xf1\x88\xc1\xc1\xe5\xe5\xc3\xaa\x69\x55\x6a\xed\x2d\x34\xf7\x6d\x7d\x3a\x03\x21\x62\xe3\xe3\x79\x57\xb6\xbe\x6f\x7c\x26\x4d\xdf\x63\x08\xdd\xd8\xd9\x63\x19\x78\xb5\x0a\x04\x0c\xb3\xb1\x0e\xe5\xec\x95\x6c\xf3\x10\x0c\x53\x8e\xe6\x0d\x62\x61\x9a\x50\xb6\x5e\xf4\x2e\x29\xaa\x8a\xb8\xdb\xa0\xe5\x50\x56\x46\x00\xb4\xf4\xe7\x83\xd1\x77\xd5\xb9\xbd\xb0\x52\x7d\xd9\x13\xee\xd3\xa6\x8a\xe8\x51\x5b\x37\x57\x09\x0c\xa0\x0f\x90\xaa\xff\x93\x31\x5c\x50\x69\x84\x7b\x57\x3d\x24\xcd\xd4\xc2\x16\x9b\x83\x7b\xb1\x24\xef\x03\x90\x5e\x9d\xd7\xbd\x7a\x47\xc6\x25\xbf\x7b\x5d\x67\xd0\x91\xb5\xd6\xd4\x36\xe9\x16\x3a\x04\x66\xa9\x00\x61\x34\x05\xd9\x98\xba\xbd\x63\x9c\x34\x9a\x08\xf7\xc0\x34\x41\x39\x2a\xc0\x2f\x2c\xa6\xae\x12\x39\xd1\xbc\x0b\x27\x49\x65\x5e\x18\xb2\xcc\x95\xaf\xdd\x37\x29\x0a\x0c\xb7\x8f\x01\x48\xf0\x84\x6c\xe4\xf5\xbf\x30\x1f\xac\xa4\x22\x78\x05\x02\xe6\xb3\x2c\x59\xb4\x0b\xd4\x0f\xb5\xdf\x5a\xec\xb9\x75\x03\x0b\x11\xeb\x56\xde\x4d\x65\xd4\xb7\xed\x86\x28\x49\x94\x0b\xaa\x16\x63\x6b\x4b\x6d\xcf\xb4\xee\xec\x97\x67\xf6\xc3\x36\x86\x8a\x53\xe4\xfa\x73\xb6\x5b\xb8\xa7\x04\x35\x85\x89\xec\x14\xda\x6c\x37\xce\xd5\xbc\x16\x93\x6a\xd5\xc2\x3a\x50\xac\x76\x43\xd5\x5d\x6c\x3b\x3c\x5b\xf0\x64\xcc\xa7\x0e\x6e\xaa\xfd\xc2\x56\x2b\xc1\x6c\x60\x84\x76\xa8\xd6\x99\xa0\x5c\xd8\x82\x2b\x6d\x62\x05\x53\xfc\x79\x9c\x61\x81\x93\x84\x24\x54\xa6\xdb\x9b\xcc\xff\xf4\xc7\x95\xa3\x3d\x33\x85\x81\xa4\x2d\xb3\xf5\x99\xa6\x79\x8a\x58\x9e\x4e\xac\x94\x8b\xe5\x63\x88\x29\xea\x10\x10\x0c\x34\x96\x1b\x60\x09\x87\x41\x04\x28\xb1\x23\x16\xe0\x85\x5b\x53\x05\x8e\xe6\x94\x3c\x01\x9a\xa9\x60\x44\xca\x63\x74\xc5\x15\x39\x45\x97\x38\xbb\x07\x41\xcd\x54\xea\x9c\x19\xa7\x03\x96\x48\x4b\xad\x39\xa3\xaa\x37\x62\x16\x64\xdc\xad\xca\x49\xc4\x99\x01\x9a\x8d\x60\x61\x7d\x13\x60\x45\x77\x88\xab\xca\xe5\x8b\x52\xd9\xb0\xd8\x02\x3f\x8f\x83\xa0\xe0\xb1\x49\xba\xd8\x80\x8e\x6f\xf1\xb3\x09\x83\x3f\xc7\x0a\x9b\x22\xbc\xab\x24\x77\x1b\x67\x66\x0b\x33\x19\x7c\x65\x17\x8f\xc3\x2d\xc8\x87\x2f\x29\x67\x82\x7e\xbf\xa6\xc7\xe4\x18\x7d\x97\xf0\x89\xec\x15\xa6\x2a\xf3\x50\x12\x25\x7b\xc6\xef\x07\xff\x36\x19\x76\xdf\xb8\xd5\x2f\xf8\x3e\x54\x53\x9c\xd2\xcf\x06\x5b\x44\xfe\xe9\xf4\xe4\x24\x5d\x1c\x4d\xf2\xe8\x91\x28\xfd\x17\xc8\x14\xb5\x2b\xe4\x80\xb9\x70\x1d\xcc\xd7\xba\xd5\x59\x86\x08\x6b\x45\x91\x36\x5b\x49\x12\x80\xa3\xd7\x57\xba\xaf\x57\xeb\x10\xa5\x38\xab\x2f\xc6\x69\xa7\x2c\xf2\xa6\xe3\x55\xc2\xb1\x7e\x1d\x6d\xc5\xd4\xe3\x0d\xe1\xb3\xa7\x09\x9e\x55\x54\x96\x0d\x94\x94\xeb\x94\x5a\x2a\xd2\x73\x87\x30\x16\x7d\xca\xca\xc1\x7b\x5f\x39\x2f\x2f\x78\x6b\xad\x17\xeb\x78\xc4\xfa\x12\x3d\x13\x53\x66\x17\x52\x3d\xc1\xe9\x93\x53\x39\xf7\x89\x9e\x60\x86\x86\x46\x0d\xca\xb0\x01\xa3\xb0\x8a\xa3\xd3\xac\x9c\x5b\xcc\x6a\xa0\x38\x91\xa4\xa7\x1b\x06\x93\xaa\x8b\xcf\x44\xcf\x02\x67\x19\x11\x23\x66\x11\x63\x01\x17\x9d\x73\x1b\x7b\xd3\x14\xa4\xdf\x69\x94\xaf\xab\x51\x06\x6b\x4f\xca\x79\xa0\xeb\xce\x37\xa4\x8d\xae\x5a\xe1\xba\x4c\x48\xb7\x7c\x5a\x16\x6d\x1b\x40\xff\xf6\x66\xe3\x96\x63\x5e\xa7\x9d\xf7\x2b\xd9\x0f\x50\xc5\x3b\x05\x05\x52\x16\xc5\x4a\x9d\xad\xcf\xab\xef\x25\x31\x07\x00\xc7\xe1\xe3\x98\x13\x19\x18\xf1\x91\xb7\xc5\x25\x74\x4a\xb4\xf4\x31\x62\x9a\x8c\x43\x87\x83\xc1\x2d\x77\x30\xe6\xba\xd3\x48\x70\x29\x6d\x42\x83\x69\x67\x75\x5a\xda\x0e\x25\x12\x0d\xf8\xfa\xf0\xfa\x6a\xbc\x5c\x2c\x31\x78\xe6\xca\x26\xda\x87\xb5\xd8\x05\x8d\x4d\xad\x2d\x92\x58\xac\xc5\x06\x65\x12\x4f\xce\x2e\x86\xbe\x36\x58\xa5\xeb\xe5\x3a\x89\x21\x60\x7d\x73\xa5\xc4\xe5\x19\x07\x35\x13\x2b\x4d\xac\xa8\x9a\xb8\x7e\xb3\xca\x61\xd4\xbb\xa0\x11\x56\xb6\x7e\x2d\x7f\x28\xd3\xcc\xba\x68\xff\x3d\x6d\x53\xc3\xb5\x12\x81\xc0\xf8\xd2\x81\x0b\x20\x78\xe9\xb7\xa4\xc2\x69\x16\x66\xb2\x3a\x38\x56\x3b\x4d\x73\xd4\x9a\x2e\xc1\x57\x85\x89\x8f\xb0\x09\x12\xaa\x0e\x6e\x69\x2b\x36\xf3\x78\xdd\x5b\xf4\xf9\x7d\x44\x87\xbf\x5e\x6a\x78\xb2\x28\x82\x21\xa5\x95\xdd\x5c\x65\xf3\x06\xbb\xff\x84\x78\xa4\xfd\xc6\x0d\xdd\x35\xf7\xd3\x23\x72\x09\x82\xa5\x75\x7f\x43\x8a\x64\x25\x7d\x6a\x03\xf3\xb0\x1f\xb3\x49\xb2\x3e\xf2\xb5\x2d\x82\xab\xc6\x96\x6b\x8b\xdc\x41\xa4\x42\x90\x27\x22\x80\x76\x6c\x28\x15\x2b\x1f\x55\x9c\x08\x82\xe3\x45\xb0\x22\x3e\x8e\xc3\xf4\x0c\xe6\x31\x49\x53\xad\xc0\x83\x6a\xc2\xf8\x11\xcf\x9c\xce\x52\x7a\x0b\x0a\x93\xd0\xa9\xbe\xb1\x82\x28\x10\xfd\x05\x3b\x22\x9f\xa9\x54\x5a\xae\xa8\x09\x81\x75\x8d\x80\xc4\x03\xe5\xca\xe6\xc4\xde\x70\xa3\x0f\xfd\xef\xae\x6f\xef\x07\xe7\xa3\x0f\x45\xd2\x83\xcb\xea\xf3\x40\x5b\xae\x6e\x02\x67\x23\xe6\xe3\x94\x3d\xae\x34\xec\x25\xc2\x71\x5c\x00\x46\x58\x25\xd2\xc8\x6c\x2b\x39\x72\x70\x2a\xd6\x46\x28\xaf\x68\xe6\x01\x52\xbb\x0e\xf5\x64\xad\x70\x9d\x95\x4e\x8e\x49\x50\x5b\x91\x49\xb4\xa7\xcb\x26\x84\xc4\x55\x46\xd7\x26\xca\x61\x36\x32\xf2\xec\x74\x25\xb8\x9d\x4f\xb0\xb9\x84\x37\xe3\x76\x6e\x43\xb6\xd8\xd4\x8f\xf4\x33\x89\x6f\x1b\xa4\xaa\xbd\x24\x0a\xb5\x0a\xb0\xac\xdd\x85\x9c\xd1\x4d\x34\x7e\x3f\x95\x07\xfd\x5d\x7b\xb6\x74\x5d\x20\xdd\x15\xa8\xb5\x00\x59\xab\x10\x46\x11\x11\x0a\x53\x86\xa6\x70\xb0\x59\xb4\x40\x80\x83\x42\xc0\x87\xfd\x47\x94\x52\x06\x80\x0c\xab\x96\xf6\xa1\x3c\x8f\x0d\x84\xd6\xcb\xe1\xd5\xc3\x7d\x49\x54\xfd\xfe\xfa\xa1\x5c\x2b\xbf\xff\xf3\x4a\x59\xb5\xd2\xc2\xaa\x60\xa1\x60\x8a\x45\x72\xa7\x05\xef\xf5\x2b\x53\x3b\xd1\x64\xa1\xc8\xc3\xed\xc5\x4e\xf2\x5d\xbd\xb3\xac\x11\x7a\x3d\x94\xae\xea\x81\x26\xda\x7c\x1a\x93\x68\x1d\x38\x6c\x7b\x3a\x32\x51\x50\x7a\x1d\xac\x35\xd1\x02\xc7\x61\x89\x32\x2c\xac\x1f\x2a\x36\x01\x50\xe5\x82\x6b\x46\xf3\x5a\x05\xcc\xf1\x89\xa8\x1f\xf5\xd5\xc7\xd9\x3e\x92\x4b\xac\x28\x0b\xfe\x51\x32\x7e\x32\x0d\x6f\x70\xd2\xec\x50\x56\x64\x10\x39\x61\x19\x7a\x40\xb6\x87\x10\xce\xe2\xd8\x14\xde\xef\xeb\xe6\x60\x45\x5c\x98\xa6\x56\x49\x39\xd3\x14\x69\x50\x6a\x1d\xb4\x6d\xd0\x1c\x9f\x9a\x8f\x5b\x02\xfd\x05\xc9\x02\xba\xad\x62\x29\x51\xff\x66\x58\xb3\xd6\x17\x55\x17\xd2\x97\x55\x25\x28\xf1\xde\xac\x7d\x63\x4f\x05\x59\x9f\x07\x01\x36\x65\x67\xba\x1b\xba\x94\x71\xfa\xdf\x94\x23\x09\x0e\x01\x04\xb9\x4e\x65\x28\x65\x73\xaf\xc1\x3b\xde\x2c\xc1\xb1\x58\x86\x0d\xb1\xa4\xc2\x01\xd9\xec\x9a\x10\x3f\x69\x39\x74\xbb\x17\xe2\x29\x71\x53\x87\xd8\xc6\x16\xec\x0d\x63\xaa\x98\x4d\x1b\x90\xa9\x1f\x0d\x45\x7b\x0c\x12\x40\x55\x71\x75\x2e\x5d\xc8\xb5\x85\x04\x08\xa7\x1b\x52\xdb\x66\xb8\x54\xc5\xf8\x9c\xf9\xdb\x42\x7c\xe3\x0c\x5b\xbb\x03\x28\x51\xae\x00\x45\x5d\xbd\xc2\xe3\x11\x0b\x02\x56\xa4\x51\x7b\xf4\x19\x71\x35\x5f\xa0\x90\x30\x03\xbc\x70\xc8\x7d\xf2\xc2\x4f\x69\x07\xaa\xc8\x03\x6a\x5e\xae\xda\xb2\xd4\x8f\x3d\x9d\x72\x8e\x5d\x7e\xa7\xb3\xa0\xd8\x38\xc0\xd0\xbe\x04\xed\x05\x75\x1a\x6c\xc7\x60\x8e\x06\xa3\x05\x0e\xaa\x00\x06\x98\x00\x31\x27\x92\x7d\xa5\x7c\x06\x2d\x4d\x16\x2e\xa4\xba\xe2\x1e\xd0\x52\x1d\xa6\xb6\xe5\xd5\x07\x7c\x0f\xa0\x57\x9b\x2a\x0e\xc1\xb1\x5a\x6b\xa6\x72\x3e\x5e\xa0\x84\x30\x16\x09\x3a\x6d\xb2\xaa\x7f\xce\x48\xb4\x0d\x32\xcf\x0d\x16\x38\x25\x8a\x88\x55\xe1\x48\xe5\x1a\xdd\x20\xe2\xb8\x1d\xb4\xfd\x9a\x5d\x34\x05\x4c\xaa\x95\x6e\xbc\x76\x7b\xb1\x0e\x69\xc7\xcf\x62\x23\x50\x31\x3d\x8d\x1f\xad\xe5\x7f\xc3\x59\xd8\x7e\x8a\x69\xd8\x68\xab\x00\x58\x69\xd7\x39\xbd\x0e\xc2\xcc\xfd\x12\x56\x4b\x29\x5c\xe8\x40\xa0\x65\xd6\x8f\xb2\x09\x53\x66\x1d\x2f\xdd\x0b\xef\x76\x19\x0e\x2e\x33\xb9\x72\xa8\x4a\xb9\x13\x40\x25\xa0\x52\x19\x78\x95\x7a\x5c\x18\x10\x5a\xea\x22\x24\x03\xb7\x9f\x45\x0d\x2c\x0c\xba\x56\xb2\xaa\xd6\xec\xaa\x2c\xd7\x1a\x1e\xb7\x2f\xcc\x8c\x4e\xa2\xd9\xb7\x44\xb3\x8e\x94\x4b\xd1\xb5\x9a\x3a\x89\xa8\xc0\xf7\xd8\x5a\xda\x16\x77\xa1\x3c\x41\x48\xe9\xb2\x57\xa4\x2d\xc8\x0b\x57\x3f\x65\xfe\x5f\x65\x0e\xee\x88\x3a\x24\xd5\xba\x5c\xd5\xe3\xc0\x05\x05\x1e\xa8\x24\x94\x06\x6c\x5c\x0d\x8c\xd6\x84\x41\x1a\x2b\xff\xf0\xca\x38\xb0\x20\x67\x7c\xc1\x73\xf4\x4c\xe5\x1c\x29\x3e\x62\x10\x27\xe8\xbd\x01\x8a\x23\xf3\x62\x0f\xde\x02\x74\x09\x99\x4f\x52\xaa\x10\x0e\x66\x58\x32\x49\xf6\xec\x79\xd6\x1f\xc0\x8c\x6b\xe1\x0b\xea\x90\x8f\xd6\x1c\x9a\x2d\xec\x6b\x45\x23\xbb\x22\x14\x04\x31\xcd\x2f\x8b\x51\x10\x68\x3c\xa1\x86\x59\x7b\xe6\x3a\x90\x02\x54\x6f\x6d\xb0\x58\xac\x00\x98\x4b\xa5\xaa\xdc\x2d\xd6\xd0\xb3\x06\xa0\xa0\xd8\x88\x56\x08\x05\xc5\xeb\xfb\x80\x28\x68\xaa\xfe\xb6\x2a\x65\xd5\x7d\xd2\x60\xff\x76\xa9\xd0\x8a\xbb\xc0\xf9\x50\x52\xba\x69\x94\x94\x0e\x0d\x2c\xae\x48\x08\xd8\x3e\xbc\xbc\x29\x7a\x19\xce\x78\xc4\x59\x4c\x37\x88\x17\x86\x0a\x5f\x93\x7c\xda\x67\x8b\xf5\xd8\x43\x69\x18\xa8\x6f\xed\x25\x81\x24\x52\x8f\x7a\xb9\x56\x65\x2d\xda\x0f\x29\x3d\x48\x09\x2d\xc3\x01\x91\xea\xed\xc4\xb8\x82\xbc\x9f\x08\xaa\xf7\x2f\xe5\xa2\x8e\x58\xbd\x94\xb4\x9a\x6f\xef\x9a\x46\xb2\x57\xe0\xbb\x80\x47\xb8\x59\x58\xab\xdb\x4f\x3e\x10\xcf\x28\xf4\xb6\x0e\x7f\x55\x0c\x2e\xdc\x90\x4d\x01\x54\x5a\x38\xda\x26\xd7\xbc\x86\x73\xd4\x0f\x7d\x29\xc9\x63\xed\xd9\xb5\x82\xc1\x1e\xd5\xcf\xa5\x1b\xa4\x75\x4e\x8c\x97\xe3\xed\x8d\x61\x83\xba\x63\x6f\x6b\xa8\xb8\x93\xb7\x29\x30\x0c\x80\xb2\x7b\x83\xc1\xad\x22\x53\xe8\xc6\x7b\xe0\x82\xb6\x63\xc7\x26\x1c\xc7\x83\xb3\x57\xf6\xa4\x34\x63\x13\x52\xf9\x22\xb3\xde\xb4\x2a\x74\xe0\x13\x15\x36\x26\x99\x86\xd6\x0d\x28\x07\x6d\x43\x39\x2b\xb7\x85\x17\x40\x73\x16\x13\xc1\x08\x56\xf3\xd7\xcb\x04\x39\xdb\xd5\x84\x1e\x8c\xef\x65\xb3\x42\xec\x48\x71\x39\x39\x64\x97\xe1\x96\xcb\xe3\xaf\x1d\xa7\x7e\xbd\x8d\x35\xcb\x06\x48\xf8\x02\xd1\x4b\xea\x6d\x8d\x69\x33\xc0\x1f\xda\x84\x4a\x77\x4a\x16\xa9\x57\x39\x5f\x26\x6d\xa6\xc6\x36\xb5\x94\x30\xa3\x4f\x7b\x58\x56\x7b\xcd\x92\x7c\x11\xf9\x29\x2f\x9f\x32\xb1\xaa\x80\x77\x1e\x64\x51\x40\x15\x75\x85\x29\xb3\xdc\x6b\x55\xe2\x84\x96\x7b\x53\x5c\x97\x2b\x71\xf0\x59\x38\x5f\x7c\x12\x4e\x97\x92\xd1\xa5\x64\xd4\xec\x51\x97\x92\x81\xd0\xa1\xa5\x64\xac\x53\x41\x57\x19\x69\xbd\xdf\x10\x0a\xad\x96\xaa\x1b\x99\xfd\x5d\xa3\x47\x6e\x9f\x76\xe0\xec\x9c\x61\xcc\x96\xfd\xc5\xfe\x50\x1b\xb6\xb5\xf4\x59\x75\xb6\xa1\xcd\x95\x2d\xaa\xae\x0b\x2c\xe2\xc4\x42\x10\xda\xa0\xea\xb2\x8d\x6c\x95\x39\x77\xc4\xbe\xe7\xcf\xe4\x89\x88\x1e\xc2\x0a\xa5\x1c\x70\xad\x8a\x18\x1e\x38\x08\x25\x34\x7b\x13\xab\x81\xd1\x15\x4e\x49\x6c\x8a\x5d\x06\xa1\x97\xd6\xa8\x6c\xdd\xc1\x75\x48\xbb\x00\x1a\x6b\xb6\xc1\xc5\x76\x8c\x98\x09\x87\x34\x21\x78\x20\x2b\x50\x37\x31\x20\x98\xdf\x79\x67\xf5\xef\x8e\xd1\xbd\xbe\x9f\xa8\x2c\x8f\x37\x00\xde\x6b\x1a\xdb\x88\xcd\x04\xcf\x33\x6f\xe7\xe3\x13\x53\xf5\xd8\x44\x68\x2d\x3b\xab\x61\x30\xce\x53\x1d\xe1\x58\xeb\xe2\xab\x09\xe7\x4d\x22\x65\xb7\x82\x59\x0a\x09\x48\x1f\x43\x1f\xfe\x67\xc3\xf1\x8d\x8f\x39\x00\x97\x59\x85\xc1\xff\x42\x0e\xf0\x73\x22\xc1\x2a\xe4\x3d\x03\xa5\x5c\xf7\x32\x9e\x42\xed\x38\x57\xd9\x6d\xbd\x6f\xc5\xf9\x1f\xea\xa1\x1a\x8a\xce\x6d\x5c\x9a\x49\xa4\xb5\xf7\xc4\x8b\x59\x74\x5b\x47\xf8\x36\xf1\x8b\x9b\x5c\x64\x1c\x24\xb1\x64\xe1\xa0\x25\x2c\xc8\x5f\xc6\xb3\xdc\xc4\xde\xd1\x30\x14\xab\x96\xb2\xa9\x54\x97\x58\x45\x73\xcd\xb9\x0b\x54\xb6\x3d\xc5\x24\x16\x5c\xf9\x65\xad\xbc\x35\x33\x38\x0b\x7b\x6f\x70\x7b\xac\xa2\x9e\x20\xc6\xd0\x07\x72\x7a\x49\x22\xd5\xfd\x19\xd7\xa0\xad\x65\x1e\xd8\x45\xdd\x27\xf6\x89\x9e\xe8\x3a\x2a\x5a\x37\xfe\x76\xb4\x55\x2e\xb6\xb6\xf7\x68\xc7\x1d\x60\x6e\xce\x2d\xa8\x58\xf1\xa2\x2d\xce\xdb\x10\xa2\x20\xe8\x76\x99\x4a\xb6\x40\xc2\x93\x16\x47\xbc\xc5\x35\xc5\x99\x56\x22\x14\xd7\xb7\xa4\x98\x19\x39\xd6\xc4\xf2\x22\x8c\x72\x41\xdd\xd9\xaf\xe4\xad\x37\x53\x07\x58\x28\x4f\xc2\x62\x5a\x11\x0e\xea\x0c\x9a\xa0\x04\x1c\xa9\x1c\xfb\xe0\x49\xa0\x09\x57\xff\xde\xe4\xe8\x3b\xe7\xbf\x70\xe2\x5d\xcd\x9e\xae\x25\xec\x1d\x76\x19\xd7\x61\x30\xb6\x3a\x69\x94\xcd\x02\x00\xc7\x7a\x2b\x71\x9b\xb2\x17\xb5\x5f\xb6\x2b\xdd\x51\xfb\xa9\x93\x7d\xb6\xf9\x76\x05\xc0\xd4\xd6\xf1\xe3\xa5\x58\x7c\x1b\xac\x6b\xa5\xa7\x10\x5a\xd3\xda\xef\x00\x78\x98\x42\x30\x01\xb6\xd2\xd4\x7f\xf9\xbf\x4c\x99\x34\xb3\x34\xff\x85\xb8\x18\x31\xf3\x7b\xcf\x97\x28\xd1\x2f\x14\xd8\xbf\x38\x25\x05\x8c\xa7\x28\x03\xfe\x01\xec\x89\x05\x6c\x33\x38\xcf\xbe\x42\x83\x1e\xc3\x63\x3e\x21\x82\x11\x3d\x34\x07\x90\xe0\x99\x59\x8a\x19\x9e\x01\xaa\x74\x0f\xa2\xf7\x40\x5c\x2d\x54\x11\x43\xd2\xa6\xd4\x25\x70\x2b\xcd\x2c\x6d\x4e\x70\x51\xf2\x19\xfa\x34\xa2\xac\x05\xb5\x2d\x42\x40\xea\xa9\xff\xd6\xf6\xbf\x9d\xc4\x7e\xdf\xbf\xfb\x61\x7c\x3b\xb8\xbb\x7e\xb8\x3d\x2b\x89\xed\x67\x17\x0f\x77\xf7\x83\xdb\xda\x67\x45\x3e\xed\x5f\x1f\x06\x0f\x0d\x8f\x5c\x03\x17\xfd\xef\x06\xa5\xfa\xe9\x7f\x7d\xe8\x5f\x0c\xef\x7f\x1e\x5f\x7f\x1c\xdf\x0d\x6e\x7f\x1c\x9e\x0d\xc6\x77\x37\x83\xb3\xe1\xc7\xe1\x59\x5f\x7f\x19\xbe\x7b\x73\xf1\xf0\x69\x78\x35\x76\xa1\xd1\xe1\xa3\x9f\xae\x6f\x7f\xf8\x78\x71\xfd\xd3\x38\xe8\xf2\xfa\xea\xe3\xf0\x53\xdd\x2c\xfa\x77\x77\xc3\x4f\x57\x97\x83\xab\xd5\x75\xda\xeb\x57\xa3\xb1\x04\x74\x70\x91\x05\x46\xa3\x40\x4c\x9a\x2c\x2c\x69\xd3\x5f\xc1\x77\x71\x63\xe8\xf1\xa8\xe7\xfe\x32\x55\xd5\x8f\x34\x0b\x74\x7e\xb1\x82\x7b\x8c\x98\x77\xae\xfa\x4b\x55\xe1\x99\x74\xe9\xd1\xa5\xd1\x9e\xa2\x3e\x9c\x15\x50\x18\x4a\x9d\x42\xf6\x85\x1f\xa9\x73\xc7\x03\x1d\x26\x34\xa5\xe0\x99\x47\x47\xa8\xba\xe1\xe5\x06\xed\x9c\x60\x08\xd6\x6f\x17\xaf\x3a\x0d\xb2\x9a\x79\x0d\x94\x72\x8a\x1c\x87\x26\xc6\x9c\x60\xf0\x71\x17\x0c\xa7\x34\xaa\xa6\x89\x00\x44\x2c\x2a\xe0\x50\xaa\x2d\x96\x08\xac\xdc\xf2\x9c\xa0\x1f\xfe\x52\x0c\x0a\x3c\x18\x56\xf3\xce\x97\x8a\x29\xda\x07\x22\x37\xab\xba\x8e\x3c\x4b\x3d\xb9\x63\x6e\x4d\xcb\x70\x6e\x6d\xd1\x76\x70\x37\xe5\x2c\x80\x44\x2b\xf9\x9e\xf4\xf1\x36\x33\xaa\xd0\xf8\x29\xba\x03\x38\x16\x59\xa8\xee\x7a\x17\xb3\x24\x9f\x51\x86\x68\x9a\x25\xc0\x63\x8c\x3e\x3f\x21\x73\xfc\x44\xb9\xab\x5c\x62\x0a\xbc\xc0\x3a\x5a\xd1\x0a\x1d\xa1\xc6\x83\x72\x8a\xfa\x71\x2c\xcb\x0c\xae\x44\x39\x8e\x65\x1e\x95\x87\x1d\xa2\x98\xb1\xd8\xb3\xcd\x0a\x1d\x15\x47\x0e\x56\x6c\xff\x80\x33\xcb\xec\xb0\x7c\xf7\xee\x70\xfd\xeb\x15\x1c\x3b\x52\x1e\x6f\x25\x0c\xdc\x63\xf9\xe8\x58\xf3\x3a\x81\xc0\x41\xff\xec\xd6\xa3\xc5\x00\x6a\xdb\xa9\x5f\xd9\x31\x1c\xb4\xed\xfa\x6c\x44\xae\x5e\xd3\xa5\x9b\x71\x52\xa9\xda\xd6\xba\xbf\x52\xd5\xb7\xda\xce\xf6\xea\xed\xa9\x97\xc6\xe0\x48\x8e\x3d\xfd\x6f\x30\x8f\x1b\xf8\xf4\xda\x7f\xb9\x52\x64\x1b\x07\xeb\xb6\xa9\x0f\x68\x29\x91\xd8\xfa\x81\x56\xd2\xe1\x9e\x20\xa8\xda\x0b\x83\x50\x73\x83\x46\xe0\xee\xc3\x94\xd9\x4a\x4c\xc4\xfb\xa3\x5c\xed\x71\x7d\x8e\x7d\x75\x40\x3c\xe1\x4f\x25\xe5\x32\x25\x52\xe2\x06\x50\x95\xc0\x24\xb6\x0b\x63\xf0\x27\xd4\x7e\xd8\x92\x9e\xdc\x99\xbc\xd7\x5f\xad\x32\xfa\xdc\x86\x9a\xb1\x9b\xa8\x16\x58\x63\x17\x0d\x8c\xae\x4d\x4e\xa0\xe6\x2f\xbd\x22\x98\x86\x8b\x20\xc6\xa8\xc9\xfd\xd3\xd2\xac\x56\x5d\xb0\xda\x02\x5b\xa1\x0b\x6f\xf3\x18\x9c\xa0\xf5\xad\x51\xbb\xad\x5f\x05\x97\xd7\x67\x03\xaa\x2b\xf9\x3b\xc3\xe2\xe3\x11\x4f\x53\x23\x17\x94\x6c\xa9\x3d\x84\x4d\x2a\x66\x21\x4d\xc9\x3c\x9a\x1b\x2f\x93\xbe\x32\x7a\x23\xf6\x1c\x6c\x48\x29\x58\xb9\x1f\xb6\x04\x88\xa7\x9f\xf5\x71\xa3\x4f\xa5\x10\x70\x10\x19\x29\xc4\x23\x07\x84\x60\x1c\x82\x45\xe5\xb0\x35\x04\x1e\xec\xd7\x0e\xa4\xbe\x45\x99\xc8\xca\xfa\x36\x15\x8b\xf4\x73\x0b\x6a\x34\xee\xa0\x29\xb7\x1d\x42\x50\x26\xb2\x6e\x04\x7b\xa8\x12\xf9\xaa\x10\xe4\x3e\xa5\xd4\x64\x20\xa7\x13\x8b\xa3\xa1\xa7\xeb\x56\xfb\xf7\x6e\x46\xbf\x37\x7e\x87\xbc\x01\x78\x25\x68\xcd\xa3\x90\xa3\x23\x2d\xb3\x3a\x40\x00\x1b\x88\x21\xd1\x91\x41\x36\xfc\x0a\xa2\x41\xfb\x37\xc3\xaf\x7a\xe8\xab\x30\x23\xee\xab\xad\x0e\xa0\x1d\xb7\xad\x14\x09\xda\x54\x29\x2d\xa2\x7c\xec\x60\xaf\x2a\x27\xd1\xee\x99\x3d\x88\xa8\xe9\x1c\xea\x2f\x4b\xdf\x80\x73\x1a\x2a\x1f\x1a\xff\xad\x0f\xca\xb6\x2e\x20\x23\xe3\x52\x59\xb3\x76\xf1\x88\x4d\x16\x55\x27\x4f\xcf\x7b\x79\x5a\x9f\xd2\x9d\xab\xf9\xe9\xf6\x96\x53\xa8\xf7\x1c\x2c\xbc\xfa\x3e\x58\x93\x94\xdd\xf7\x25\x67\x0a\x2e\xd6\x14\xa5\xd0\x45\xd9\xd7\xcd\xaa\x64\x2f\x73\x8b\x59\xbb\x29\xeb\xe4\x9f\xf7\x46\x6e\x2d\x42\xd3\xfb\x75\x2b\x62\xb3\x12\x1a\x84\xeb\x8e\xca\x5e\x96\xca\xf6\x91\x95\x51\x1e\xdc\xe6\x17\xe8\x99\x91\xe3\x82\x66\x9c\xc1\x55\x2b\x13\x9e\xc1\x97\x4a\x3e\xae\xaf\x95\xbc\xa1\xcf\x37\x58\x93\xf5\x4e\xdf\x3b\x13\x38\x60\xdc\xae\xcb\x63\xad\x0e\xb5\xaf\x6c\xa1\x24\x4e\x4d\x06\xa6\xa2\x29\xe9\x99\xca\x5c\x45\xb0\x83\x3d\xaf\x40\x6e\x26\x46\x69\x4e\xa8\x70\x9d\x58\x1c\xc4\x8d\x52\xf6\x37\x94\xc6\x9b\x68\x64\x87\x48\x93\xab\xfe\xe5\xe0\x7c\x3c\xb8\xba\x1f\xde\xff\x5c\x83\x71\x59\x7e\xec\x60\x2e\x83\x17\xee\x7e\xbe\xbb\x1f\x5c\x8e\x3f\x0d\xae\x06\xb7\xfd\xfb\x35\x10\x98\xab\x3a\x6b\x82\x57\xcc\x65\x9d\xfa\xb6\x09\xc4\xa2\x33\xf3\xd6\xf4\xbe\x0c\x84\x19\x74\x42\x49\x03\x18\xa6\x81\x27\x60\x31\x11\x28\x26\x4f\x24\xe1\x59\x61\x56\xad\x5d\xb0\x00\x25\xb3\xa6\xfd\x55\x48\x99\xd0\x66\x75\x8d\x4f\x91\x29\xf3\x17\x54\x3a\xf6\x0d\x82\xc8\x87\x05\x61\x5f\x29\x44\x3e\x67\x09\x8d\xa8\x0a\xd2\x17\xb9\xb0\xee\x15\xe3\x3e\x84\xe8\xd4\x35\xc4\xb5\xb7\x68\x94\xbd\xeb\xfc\xa1\x27\x7d\x59\xdb\xf7\x27\xca\xa3\xb6\xad\x2d\x72\xb4\x07\xc5\xbe\xc1\x69\xbc\x04\x2a\xb7\xc5\xe8\x5e\xc2\x3c\xb0\x9c\xa3\x63\x53\x10\x1b\x00\xe7\xea\x07\xb9\xfe\x36\x5c\x15\x27\x53\x3a\xd7\xab\x03\x65\xda\x51\xea\x1b\x87\xbb\x94\x6a\xaa\xee\x01\x1d\xc4\xc6\xae\x6f\x18\xb0\xb0\x54\xd3\x86\x99\x98\x53\x8c\x04\x49\xb9\xd2\x0a\x98\x89\x08\xe8\x69\xa1\x8a\xe2\x84\xfe\x0a\x38\x5a\x82\x1c\x07\x11\x14\x0e\x7d\xac\x70\x1f\x58\x8c\x8b\xe3\x11\x3b\x1f\xdc\xdc\x0e\xce\x34\x43\x3a\x46\x0f\x12\x20\xb2\x4a\x53\x3f\xb7\xe4\x6d\xc4\xb1\x30\x92\x81\x32\xa9\x08\x6e\x0a\x06\x23\x42\x70\xd1\x9e\x3f\xf8\xfe\x06\xf0\x5d\x3d\x79\xc3\xb3\x92\x6d\xca\x19\x00\xae\x1a\x0b\x62\x07\x39\x03\x7b\xcf\xc9\xba\xc5\xcf\xa5\x15\x09\x21\x42\x40\x12\x29\xaf\xfa\x0b\xae\x36\x80\x8c\xb6\x9f\x5f\xa9\xcf\x1b\xf8\x76\xd5\x3c\xef\x21\xc4\x4e\xaa\x02\xb1\xd4\x80\x9a\xfa\xca\x3c\x95\x79\x36\x8a\x8a\xe2\x2d\xe0\x44\x2a\xa4\x3f\x21\x33\xcc\x90\xc8\x19\xab\x40\xd8\x86\x96\xb6\xe5\xa0\x99\x4d\x8f\xaa\x5e\x33\x9c\xf2\x9c\x99\xd2\xb2\x7a\x54\x35\x83\x91\x19\x61\x6a\xcd\x60\xde\x0a\x2c\xa6\x32\xd4\xc3\xc5\x8b\xa9\x19\x68\x13\x64\x4c\x9d\x3f\x09\xaa\x6e\x6f\x76\x2d\xbb\xa0\xbc\x92\x53\x49\x1f\x2a\x7f\x3f\xd7\x6b\xd9\x58\x3e\xee\xdc\xdd\x3d\x96\x8f\xeb\xbb\x8a\x49\xf4\xb8\xe9\x65\x53\xcd\xcc\x4c\x6c\xd1\xf2\x25\x63\xdf\x42\x3f\xb5\xe5\x63\xa0\x56\x7d\xf4\x88\xbe\xbf\xbf\xbc\x40\x53\xaa\xe5\x5e\x7d\xad\x5c\x61\x2d\x63\x3f\x88\xc4\xd9\x85\xad\x6d\x35\x17\x89\xbf\x7b\x61\xe3\x9d\x28\x15\x48\x09\xfa\x46\xc3\x33\xe2\x8c\xbd\xc2\x22\x02\x56\xca\xc7\x08\xcc\x62\x9e\x9a\x79\x9c\xc8\x7c\x3a\xa5\x9f\x8f\x15\x16\xdf\x34\xac\x87\x89\xaa\x18\xff\x9d\x4f\xc6\x7a\x44\x3b\x5e\xc4\x75\xcd\x21\x5b\x4b\xdb\x2f\x9b\x9d\xd9\xb9\x79\xf7\xff\xf2\x09\x64\xbb\x43\xc2\xbe\xf3\xce\xd9\x48\x05\xfb\x8a\xa3\xa4\xa2\xb8\x74\x09\x88\x25\xe2\x42\x10\x9b\x24\x6f\xea\x9f\x66\x58\x28\x0a\xd6\x5a\x07\xe4\x52\x42\xf0\x2f\xb6\x28\xac\xf6\x3e\xc7\x05\x5a\xf6\x84\x10\x70\xf0\x64\x34\xd9\x4c\xe9\x3d\x2b\xf9\x26\x2b\x27\xd0\x46\x9e\x5a\x6c\x4f\x30\xc8\xac\x15\xb1\x06\x4f\x84\xa9\xbd\xe8\x27\xd0\x44\x4d\xda\x7e\x3b\x2f\x83\x29\x43\x3a\x3c\x2f\x2e\x37\x17\xd2\x1b\x46\x35\x29\x81\xe1\x9e\xb7\x89\x52\xd6\xa5\xde\xe4\xe8\x7f\x6a\xed\x3b\x86\x57\x97\xd7\x65\x4d\x68\xbc\x5d\xed\xa2\xca\x7b\x11\xd6\xea\xca\x0f\x6c\x09\x36\x24\x89\xb1\x62\x04\x20\x17\x56\x39\xad\xee\xb9\xe9\x53\xd3\x56\xa5\xcb\xb5\x5b\xbe\x05\xb2\x4e\xa9\x99\x4f\x04\x52\x3a\xf7\x11\x88\xbe\x49\xee\x3e\x0c\xe4\x41\x24\x10\x42\xbd\xd2\x8a\x65\x4a\xa1\x6b\xce\xe7\x25\x3b\xdc\x42\x46\x37\x83\xd1\x42\x23\xc9\x04\x89\xf4\x55\x76\x8a\x6e\x12\xa2\x25\xaf\x5c\x4b\x5f\x79\x92\x38\x14\xb2\xd5\xd2\xe1\x46\xc8\x79\x2f\x3e\xaf\x40\xf7\x58\x31\x31\x87\xc2\xb7\x7a\x66\xc1\x1a\xec\x1f\x71\x21\x58\x5f\x30\x21\x83\x21\xb1\xac\x45\x02\x87\x5f\x98\xb8\x59\x30\x25\xe1\xd2\x45\x46\x7f\xd5\xec\x57\x10\x39\xe7\x8d\x49\x8e\xe1\x6c\x5f\x66\x0e\x6e\x29\x5f\x70\x12\xee\x3e\x6c\x8a\xab\x6e\x21\xd7\x54\xee\xc0\x92\x88\xb3\x6a\x8a\xbe\x42\x85\x8f\xfe\xb0\x98\xb0\xf6\x6e\xb5\x43\x83\x5b\xb2\x30\xb5\x85\xf8\x6c\x85\xeb\xa2\x50\x66\x16\xc6\xf7\xea\x3f\x2f\x0c\xc8\x45\x4a\x00\x55\xb2\xa8\x8c\x87\xf4\x5d\xdb\xb4\xc5\x7a\x9e\xe3\x5c\x6c\x04\x49\x51\x20\xab\x6f\xc2\xb9\x6d\x32\x4a\x31\x2c\xbd\x08\xf5\xec\xd2\x16\xbc\x00\x31\xda\x86\x1a\xc9\x12\x5a\x9d\x25\x1b\xb3\x8c\xb5\x2a\x5e\x33\x53\xde\xd5\xad\x06\x52\x72\x21\xca\xbc\x94\x77\xad\x44\x81\xa5\x09\x74\xf8\x67\x9b\xe3\x9f\xd9\xea\x27\x9e\xf6\x00\xad\x50\x09\x48\xfc\x2f\x1c\x68\x55\xc1\xc1\x1a\xbd\xd7\x65\x3e\x95\x76\xa7\x55\x9a\x53\xe9\x0b\xcd\x4b\xce\x77\xf4\xc0\xe9\xc9\x2c\xc6\x90\x38\xba\x4b\x14\x4e\x69\xfe\xc6\x7b\x00\x6d\x92\x18\x19\xf4\x02\x83\xce\x6c\xd7\xce\x7b\x4e\x32\x2c\x08\x53\x23\x76\xab\x47\x61\xbe\x28\x22\x31\x5c\x1c\x8e\x43\xcc\x87\xba\xba\x53\x84\xed\x57\xb0\xe8\x4d\x81\x70\x72\x6c\x5e\x02\xd5\xf4\x05\x93\xec\xbf\x33\xef\x18\xcc\x03\x8b\xf9\xa3\xa7\x4a\xa7\x85\x1a\xaf\x05\xc8\x68\x4e\x01\x72\x20\x26\xd2\x5e\x48\x54\x59\x4c\x09\x2f\x7e\xe7\xc4\x61\x44\xc3\x67\x9e\x7f\xd5\x31\x6c\x67\x28\x60\xce\x40\x27\x47\x2c\xe8\x63\x05\xa4\xa8\x51\xd6\xb7\x54\x25\x60\x9f\x69\xec\x1d\x5f\xf0\x4f\xb3\x43\x5c\xd0\x19\x65\x41\x61\x27\x3b\xbd\x14\x67\x60\xde\x35\x67\x90\x4f\xfd\x9d\x76\x6f\xb3\x0c\x8e\x61\xc4\xff\xf3\xdf\x7f\x3b\xa6\x4d\xde\x0f\x39\xb6\x2b\x70\x08\x3b\xb9\xd9\xb6\x84\x3b\x1f\xa0\x88\x34\xa0\x53\x04\x3a\xad\x2c\x65\x4e\x14\xbf\xda\xcb\x4d\x13\x0d\x57\x73\xe3\xee\x2d\x93\x3b\xf8\x46\x44\xbe\xe2\x6c\x98\x2b\xe6\x6d\xd7\x92\x4a\xc8\x0e\xd0\x23\x31\x27\xd9\x1b\x08\xc2\xa2\xe9\x4b\x66\x9a\x11\x2b\x3e\x91\x06\x0f\xc5\x40\xd0\x9a\x1f\x8a\xd5\x69\xb9\x30\xab\x78\x7f\x11\x29\x51\xb8\xc3\x83\x68\x64\x57\xe2\xc3\x44\x91\xea\xf6\x2b\x37\x6d\x85\x73\x07\x58\x8c\xbb\x44\x6d\xce\xb1\x7c\xb9\xd0\x9c\xda\xd2\x54\xc6\x9a\x1e\x0a\x0f\xeb\x82\x74\xcc\x20\x4d\xb6\xa7\xde\x90\x5c\x12\x61\x38\x9d\x87\xc3\xb2\x94\x10\x22\x4d\x42\x8c\xe6\x1a\x5f\x23\x49\x31\xdd\x28\x9f\x40\xbf\x5f\x8f\x83\x59\x72\x36\xe0\x19\x11\xe3\x38\x57\x4b\xc7\x62\x55\x8c\xbf\xfe\xe8\x3c\x57\x8b\xf5\xed\xcb\x04\x2f\x97\xe6\x59\x85\x3d\xaa\xdf\x6f\x68\x76\xbd\xc4\x1c\x84\xf8\x94\xa5\xe6\x06\x64\x4f\x52\x41\xf6\xb4\x31\xa7\x25\x13\x09\xdc\xc0\x4c\x01\xa4\x5e\xa1\x49\xd9\x2b\xda\xe0\x8f\xc3\xc8\xd1\x24\x2f\x4c\x4a\xbe\xa2\x43\x7c\x3c\x62\x1f\x4d\x49\x14\xd0\xf2\xcc\x00\x22\x48\xf8\x21\x9f\x33\x2e\x49\x29\x03\xad\xa6\x4a\x83\xcd\x20\xb5\xc3\xa8\x17\xd6\x8b\x8f\x76\x97\xd5\xdf\x1c\xa3\x75\x79\xc3\x97\xa7\x5c\x4f\x81\x3b\x89\x83\x11\xcd\xa8\xa6\x9d\x71\xed\x49\x7b\xb9\x4a\xc1\x45\x4c\x17\xe0\x60\xa9\x64\xd1\x43\x7e\x7a\x15\x82\x48\xc8\x13\x01\x73\x3a\x8c\x31\xac\xc5\x51\xb6\xeb\x35\xb0\x93\x75\x07\xa8\x48\xff\x04\xb6\x80\xe2\xea\x08\xca\x49\x72\x75\xb4\x58\x4e\xff\xd9\x39\x53\xad\x2e\x30\x65\x03\xf1\xbc\x1f\xd6\x24\x59\x10\x85\xc8\x67\x45\x6c\xd5\xd2\x7b\x97\x4b\xb8\x9c\x7e\x80\xea\xd3\xa1\x9a\x65\xc7\x17\xaf\x1f\xdd\x77\x19\xe4\x2e\x59\x32\x76\x57\xbe\x4d\x1e\x9c\x63\x16\xdb\x8c\x58\xab\x64\x68\x61\x0b\x66\x67\x8c\x6e\x3e\x57\xc0\xe6\x75\x06\x60\xee\xa6\x4d\x83\x3a\x0f\x17\x99\x53\x18\xb5\xca\x02\xe1\x15\x5c\x68\xc9\x3d\x67\x8a\x26\x9a\x38\xec\x18\x24\x9a\x42\x64\x9c\x05\x2a\x84\xc8\xf6\x26\x2c\x3c\x2a\x25\x65\xb3\xb1\x5d\x49\x97\xdc\xd9\xee\x62\x28\xd3\xd4\xa5\x69\xca\xfc\xf8\x9d\x6b\x68\xb5\x51\xdd\x90\x35\xe0\x94\xb9\xb4\x52\xd0\x38\x18\x77\x93\xb1\x00\x73\x2e\x1b\x75\x4c\x63\xb3\x14\xd4\x14\xc7\x86\x89\x6e\x62\x77\x07\x99\x6e\x19\xc7\xa1\xb8\x42\xa4\x4d\x15\x35\x09\x60\x10\xa9\xaf\x1a\x72\x61\x65\x63\x0e\xec\x90\x79\x11\xcd\x96\xe6\xf2\x99\xfe\x95\x74\x5a\xec\xba\xb3\xe9\x08\x38\x49\x26\x38\x7a\xf4\x5a\x98\xb7\x45\x70\xe1\x4a\x1b\x68\xb9\x12\x6a\xb7\x19\xe2\xd2\x03\x8d\x40\xba\x09\xbd\x85\x06\xc9\xc7\x0e\xbb\xe8\xdc\xac\x9a\x85\x48\x33\xd0\x4d\x66\xf4\x26\xb7\x21\x26\x59\xc2\x17\x69\xc3\x7d\x56\x4d\x21\xdc\x25\x52\xa7\x29\x83\x71\xaf\x57\x59\x85\xe9\x6d\x7c\x99\x2d\xe5\x23\xed\x01\x57\x6a\x03\x2e\xf9\x29\xe1\x13\x30\xa9\x5a\xf3\x83\xcb\xb1\x09\x52\x3d\xaa\xe7\x79\xd3\xcc\x9f\xea\x89\xa4\x32\x4b\xb4\x32\xd3\xdc\x83\xc9\x39\x79\xd9\x7d\x33\x18\x05\xeb\xad\x83\xed\xa3\xb5\x6b\x3f\x7f\x09\x04\xe3\x0b\x27\x09\x98\x77\x0d\xff\xaa\x58\xd9\x4c\xb2\xdf\xb1\x71\x52\x2b\x3e\x62\x0a\xcf\xdc\xe6\x5a\xe1\x92\x3f\x33\x22\xe4\x9c\x66\xa5\x9a\x8e\x3b\x87\x87\x5b\x8a\xb6\xff\x31\xc1\xd0\x1b\xf0\x4e\x9e\x1d\x19\x84\x12\x4d\x1f\x32\xc3\x51\x61\x15\x8d\x12\x2c\x25\x9d\x2e\x02\x60\x11\x1f\x69\x0b\xe9\x5b\x65\x33\x42\x50\x46\xad\x8e\xd1\x98\xf1\xed\x27\xb3\x7e\xf7\xac\xc2\x87\xf2\xf1\xa3\x71\x88\xe0\xa6\xef\x93\x65\x14\x19\x77\x53\x5b\x34\x99\x46\x24\x5a\x03\x21\xb0\x5d\x26\xfc\x4a\xf0\x9f\x66\x33\x42\x21\x4c\xda\x61\x5b\x45\xc6\x03\x7e\x84\x60\x38\xaa\x94\x4a\x09\x9b\xaf\x15\x27\x67\x17\xd6\xc4\xe9\xc1\x42\x00\x53\xa1\xf8\xb8\x87\xe4\x4e\x20\x5b\x6d\xe8\xe2\x9c\x24\x64\x2f\x11\xd7\x5b\x10\x49\x35\x9c\x21\x20\x8f\x95\xa4\x51\x94\x19\x58\x6f\x5c\xd8\x22\x10\xbc\x01\xaa\xa7\x7e\xe8\x3f\x99\x81\xda\x58\xf0\xba\x5d\x04\xc3\x20\xac\x72\x5b\xdd\xa5\x0e\xf3\xcf\xb4\x60\x49\xae\xe8\xa6\x44\x57\x45\xa7\x5e\x5e\x39\x44\x52\x7b\xe3\x90\xe9\xa5\x71\x7d\x22\x6d\xc2\x3b\xd6\x1e\x80\xad\x38\xd0\x32\x9f\x6e\x47\x17\xd6\x81\xaa\x38\x9a\x11\xc0\x64\xa1\x2c\xa6\x4f\x34\xce\x71\xf2\xae\x68\x62\x6f\x09\x1f\x7b\x5a\xfd\xfa\x43\xbe\xd9\xa9\xbd\x23\x4a\xba\x2b\x61\x09\x46\xd1\x6e\xce\x01\x6e\xc1\x61\x1c\x4b\x23\xb8\x7e\xf1\x72\xcb\xce\x20\x09\x76\x64\x16\x2a\xe0\x37\x24\x50\x99\x39\xc6\xf6\x0b\x0f\x0b\x50\x02\xc4\xc2\x25\x10\x41\xb3\x46\x6f\xcf\xf5\xaa\xa4\xfd\xa5\x8b\x5e\x9b\xd3\x78\x75\x54\x05\x75\x77\xf2\xe0\x66\xf2\xa0\x03\xd9\x3c\xc0\xcb\xbf\xe9\x18\x1c\xe6\xfd\x73\x00\xc2\xe1\xd2\x95\xb8\x3f\x11\xf1\x1d\x91\xc9\x41\x48\x8a\x4b\x5b\xf1\x5a\xf2\xe2\x91\x43\x39\x2a\x30\x83\x0e\x77\x8b\x0e\xe3\x24\xdf\x5a\x37\xd0\xcb\x5d\xb0\xeb\xe9\x65\x2f\xf4\x01\x80\x9f\x18\xf2\xa2\x73\x5b\x41\x04\x0e\x6f\x10\x4b\xb7\xe4\x7b\x58\x13\xa5\x68\x87\xd7\x2a\x3e\x71\x69\x39\x5f\x62\x7b\x6d\x12\x5c\xeb\xcd\x7d\x49\x52\xdb\x74\x2c\xfb\xd0\x51\x5e\xd8\x8b\x63\xa9\x31\xf8\xa0\x0b\x16\x6e\x77\x8b\xd6\x40\xeb\xb8\x2d\xdb\xe7\x21\xab\x2b\xfb\xb6\x7b\x1a\xbf\xcb\xf1\x1b\x67\x82\x4c\xe9\xe7\xad\x44\xf1\x1b\xf8\xd4\xaa\x97\x7a\x99\x2b\x85\xe4\xc0\x3d\x03\x85\xe7\x82\x80\x46\xbb\xd2\xb6\xd8\xd4\x88\x15\x99\x91\x36\x2d\xf2\x91\x2c\x10\x17\xa5\x9f\xb6\x05\x81\xdc\x7f\xd1\x3b\xb3\xaf\x73\xa5\x32\x79\x7a\x72\x32\xa3\x6a\x9e\x4f\x8e\x23\x9e\x9a\x38\x7c\x2e\x66\xe6\x8f\x13\x2a\x65\x4e\xe4\xc9\x1f\xff\xf0\x87\x62\x8b\x27\x38\x7a\x9c\x19\x58\x9d\x65\xbf\x53\x79\xcb\x09\x96\xbb\x45\xf6\xb8\x14\xb6\x17\x4e\x65\x0e\xba\x71\xc9\xa3\xfa\x1b\xa9\x70\x9a\x85\xa1\xa0\xa6\x6c\x9c\x54\xb8\x28\x56\x01\x79\x89\x7a\x9a\x68\x8e\xb3\x8c\xb0\x66\xb3\x83\x49\x34\xdd\x81\xf5\xb8\x54\x55\x3b\x42\xf2\x39\x4b\x30\x2b\xc3\x2f\x40\xe5\x25\x41\x22\xc2\x94\x85\x06\x28\xca\x5d\x03\x35\x1a\x08\x20\xc3\xff\x37\x4b\x45\x84\x39\x52\x59\x94\x54\x73\xc3\xb1\xe5\x4d\x5d\xd1\x4b\x1c\x2c\x5d\xb5\xa4\x6c\xb1\x76\xc4\xad\xda\xaa\x24\xc5\xbb\xe5\xf2\xe7\x9b\x97\xb4\x11\x9c\x8d\xc9\x67\xcd\xe4\xe4\xb6\x80\x5d\x0f\x92\x48\xd4\xff\xe9\x0e\xc9\x05\x53\xf8\xf3\x29\xba\xa4\x0c\x04\xd8\xef\x79\x2e\x24\x3a\xc7\x8b\x23\x3e\x3d\x4a\x39\x53\x73\x74\x09\xff\x6b\x7f\x7a\x26\xe4\x11\xfd\x4c\xb0\xb0\xfc\xc1\x96\xa4\xf3\x35\xd8\x35\x09\x89\x9c\x49\x44\x9e\xf4\x09\xfd\xc3\x7f\xa2\xd4\xb4\x7c\x8a\xbe\x3d\xf9\xc3\x7f\xa2\xdf\xc1\xff\xff\xff\xd1\xef\x1a\x34\xfd\xcd\x20\xbf\xa0\x72\xf1\x6d\xd9\x9d\xdb\xab\xac\xd4\x16\x15\xe7\xcf\x04\x2f\x76\xaa\xb6\xe5\x47\x1a\x3d\xf2\xe9\x74\xac\x09\xc3\x24\xf2\x8d\xb1\x58\x82\x8b\xde\x12\x3f\x95\xda\xda\xd3\xa6\x92\x5d\x51\x43\xc6\x76\x6a\x10\x1f\x1c\xbb\x96\x79\x51\x79\x17\x82\x88\x4a\xd5\x8c\xa9\x84\xaf\x48\xac\xb9\xea\x26\xa7\xc3\x59\xf7\x5c\xf2\xb7\xb3\xe0\x84\x08\x29\x61\x3d\x75\x1f\xf8\x17\x46\xb1\x9a\x40\x1f\xbb\x90\xb5\xc7\x61\x29\xbc\xf6\x8b\x89\x99\x84\xa9\xbd\x55\xbc\xa4\x5c\xea\x7c\x7d\xa8\xe4\x1d\x17\x3b\xe9\x5b\x8f\xa4\x31\x95\x61\x4d\xbd\x24\x57\xc3\x37\xac\xec\x0f\x19\xe2\x5c\x78\x1c\x63\x63\x17\xb1\x55\x15\xd7\x5b\x31\xa9\x30\xc1\x65\xed\x0e\xbd\x9e\xfa\xb9\xff\x64\xdd\x30\x21\xd2\xcc\xbd\x5d\xd4\x8b\x83\xd1\x6a\x11\x49\xb3\xc4\x9a\x11\xd7\x80\x1d\xae\xdb\xd0\x3b\x8f\x6f\x01\x8d\x43\xd8\x23\xe4\x6f\x30\x27\xd9\x5a\x00\x81\xfa\xfd\xcc\x45\x44\xce\xf8\x6e\x61\xaf\x09\x65\x4b\xf1\xf2\xed\x4b\x11\xf9\xd5\xbb\xb0\x45\xa7\x1c\x1e\x30\x8f\x0b\x65\xc1\xb8\x05\x6c\x15\x8a\x00\x88\xb4\x3c\x1b\x00\xb4\xdb\x07\xd6\xe5\x52\x6d\x84\x1d\xb8\xb6\x31\x1c\x17\x0c\xcf\x95\xd6\xa8\x54\xd4\x10\x58\xf3\xc2\x15\xb1\x6b\x10\x54\xb4\xf3\x38\x82\x2a\x31\x45\xa4\x52\xa5\x1a\x3b\x36\xa5\x52\xc4\x96\x58\xa5\xa6\x60\x53\x0f\x09\x0c\x41\x99\x6a\xae\xdb\x93\x44\x1c\x4d\x71\x44\xd9\xac\x17\xc0\x54\x02\x64\x44\x78\x1d\xd4\x11\xe9\x3d\x96\x8f\xfb\x0d\x34\xdc\xb9\x80\x25\x8d\x8b\x22\x6a\x16\x58\xc6\x38\x36\xe8\x12\x46\x9f\xc2\xf2\xb1\x09\x59\x69\x09\xd6\x6d\xc5\xe8\xfc\x52\x38\x30\xb8\x55\xe3\x73\x29\xe8\x24\xd4\xa7\xa0\x66\x83\x2b\xa9\x6c\x41\x1e\x5d\xc6\x1f\xf6\x28\x2c\x55\x74\xd3\x15\xe3\x97\x73\x2e\xd4\x78\x4b\x5c\xd8\x6a\x1a\x3d\x23\x47\x09\x00\xba\xf0\x27\x22\x9e\x28\x79\x2e\xc3\xab\x6e\x42\x8b\xc6\x68\x16\x44\xd5\x01\xfe\x66\x9a\x71\x48\xa1\x99\xa2\x14\xb3\x85\x61\x94\x9a\xb9\x60\xf9\x28\x7d\x21\x57\x24\x53\x9c\x24\x3d\x24\x48\x2e\x4d\x81\x63\x49\x92\xe9\x91\x2b\x85\x11\xa3\x84\xcf\x68\x84\x13\x34\x49\x78\xf4\x28\x4d\x86\x1b\x9b\x19\x26\x95\x09\x1e\x11\x29\x03\xc9\xaa\xc8\x66\xb7\x39\x86\x50\xc5\x55\x11\x91\x52\x46\xa5\xa2\x91\x13\x99\x0a\x50\x0a\x53\x4b\x3c\xc2\x60\x12\x86\x8c\x4d\x18\xae\x96\xf4\x88\x01\xe7\xcc\x99\x2d\x9a\x04\xd7\xb5\xc5\xdc\x73\x41\xe2\x4d\x07\x68\x0f\x10\x82\x8e\x42\xc6\xaa\x7c\x20\xd7\x1c\xa9\x33\xfb\x19\x1c\xe3\x55\x24\x70\x5b\x3e\x51\x9e\x20\xfd\x49\x2b\xc1\x1a\x41\x4c\xb9\x0f\x81\x2f\x49\x2e\x3e\x32\xfc\xc0\x10\xcd\x60\xc8\x0d\x38\x66\xeb\x68\x5a\xaf\x22\x88\x3c\x50\xa7\xab\xea\x35\xa7\x2c\x4a\xf2\xd8\x57\x6a\xd4\x22\xc0\x93\x26\x12\xb7\x3c\x7a\xed\xb5\xa0\xd0\x43\x58\xa2\x67\x92\x24\xfa\xbf\x26\x02\xfe\xc8\x17\x4e\xd0\x2c\xd9\x14\xb7\x80\x4e\x1c\x97\x6e\xa2\xa8\x83\x43\xa7\xbc\xc1\x6a\x6e\x72\xfe\x53\xae\x4c\x91\x4c\x83\x4e\xe9\xec\x5b\x06\xce\x70\x92\xf0\x09\x9c\x74\x00\xae\x74\x79\xae\x41\x5a\x5d\x1e\x45\x84\xc4\x24\x46\x5f\x07\x07\xd7\xe3\x51\x7c\x53\x0f\xa3\x58\x5a\x91\x03\x00\xad\xac\x1a\xd6\x1a\xa1\x2b\xcb\x75\xde\x8e\xd1\x4d\x05\x98\x25\xac\xdf\x8e\xab\x30\x5d\xbd\xa5\x2d\x7c\x1b\xa0\xcb\xca\x24\x5e\x6e\x87\x36\x04\xba\x2c\xf5\xb9\x07\xa0\xcb\xca\x3c\x1b\x62\xf7\xf9\xec\x45\x73\x8e\xf5\xa4\x2e\x78\xfb\x44\x30\x03\x10\x66\xee\xce\x12\x09\xba\x03\xb9\xa8\x23\xc4\xc3\x02\xf1\xac\x54\x43\x7c\x5b\x10\xcf\xca\x60\x0e\x19\xc4\xb3\x32\xd4\xc3\x05\xf1\xac\x19\x68\x0b\x10\x4f\xe3\xdc\x1f\x6b\xa2\x6e\xc7\x14\x20\xb1\x65\x92\x4f\xef\x20\xd5\x7b\xe5\x18\xcf\x4c\xe0\x80\xb9\xc6\xdc\x1d\x6d\x31\xad\x61\xb4\x36\x07\xb2\x29\x1c\xaa\xe2\x84\xd8\x94\xf6\xbc\xf7\xcd\x80\x3f\x6c\x6a\x76\xef\x85\xd6\x6e\xb0\x43\x46\x38\xb3\x39\xe5\x4d\xa5\x66\x0e\x27\x7b\x76\x3b\x7c\x54\xc0\x20\x2c\xb1\xfc\x56\x08\x62\x97\x95\xaa\x0d\x73\xfe\x6c\x2b\x27\x01\x19\x1a\xa2\x6c\x24\x41\xe8\x74\x6c\x95\xb6\xa6\x95\xa3\x4c\x91\x59\x55\xa7\x2d\x0e\x0d\x65\xea\x4f\x7f\x5c\xcb\x89\x0c\xc4\xa2\x53\x0f\x83\xda\x09\xde\xd9\x61\x9f\x91\x18\x45\x73\xad\x15\x49\xad\xbe\xe8\xe9\x98\x9b\x55\xa2\x14\x53\xa7\x48\xe5\xd2\xb8\x96\xa8\x1c\xb1\x12\x26\xe9\x31\xfa\x08\x05\x61\x71\x9a\x69\xfd\xcb\xcf\x8f\x6a\x4a\x1a\xe5\xdf\x7e\xfb\x27\x82\xbe\x45\x29\xc1\xac\xa4\xc3\x82\xda\xa4\xaf\x3e\xc0\xf0\x53\x73\x32\x62\xb5\x5b\x81\x06\x9f\x4d\x8d\x29\x17\xef\x37\x64\x53\xee\x74\x62\x28\x74\x88\xa3\x39\x92\xf9\xc4\x54\xea\x0d\x6c\x18\x4e\x90\xbe\xe0\x33\x70\x54\xc3\x8d\xec\x06\xbd\xea\x14\xbe\x6c\x0c\x80\x75\x37\xb6\xbd\x8d\xfb\x70\x8f\x1c\x49\x52\xc2\x76\xaa\x71\x9a\x19\xce\x17\x1e\x7c\x69\x70\x5f\x7a\xc6\x87\xa0\xf5\x33\x6c\x2d\xfb\x5a\x96\x86\x70\x5e\xf0\x92\xe5\x09\x16\xf6\xe8\x8f\x98\x56\x34\x04\x79\xa2\x3c\x97\xc9\x02\xc5\x9c\x91\x1e\x50\x42\x1e\xcd\x8d\x63\x55\xeb\x2c\xd8\x16\xac\x78\xa2\x32\xd7\x0a\x2d\xb4\xe5\xea\x63\x48\x85\x0d\x26\xd5\x9c\x42\x3f\x5a\xfd\x26\xf0\x55\x98\x25\x87\xda\x69\x51\x21\x6c\x6c\x85\xe7\xb7\x84\x8d\x2d\x51\x55\x07\x1b\xeb\x61\x63\x97\xd7\xe5\x10\x61\x63\x2b\x7b\xde\x0e\x36\xb6\x6e\xcb\xb7\x80\x8d\x2d\x35\xf3\xc5\xc0\xc6\x56\x56\xf4\x8b\x81\x8d\xad\xcc\xab\x83\x8d\xfd\xf2\x60\x63\x77\x04\x46\xad\xe7\xc5\x06\x5f\x49\x51\xb6\xd8\x98\xc8\xbe\x92\x68\x78\xad\x09\x2c\x7a\x2c\x07\xb5\xf9\xeb\x6a\x77\x30\xd6\x7a\x26\xb4\x19\x18\x6b\xad\xaa\xde\xcc\xea\x76\x05\x78\x02\xc5\xe0\x95\xc1\x58\x4b\x13\xe8\xe2\x2b\x37\x8f\xaf\xac\x25\x3e\xdb\xb7\x1e\x9e\x0b\xba\xac\x5e\xc8\x2d\xe1\x58\x4b\xfb\xd3\x2a\x12\x13\x44\xf7\x3d\x50\xe2\xcb\x4a\xf3\xf7\xa5\x43\xbe\x56\x96\x0f\x57\x51\x5a\x60\x68\x2d\xe1\x39\xb4\x38\xa3\x84\x87\xfe\xff\x8e\x72\xb7\x88\x0c\xae\x2c\xaf\xf7\xab\x18\x5a\x6c\x41\xaa\xad\x29\xd4\x69\xa5\xfb\x49\x94\x75\xc9\x93\x1b\xba\x98\xdd\x20\xee\x32\x12\x35\xd8\x98\x69\x4a\xf7\xd5\xec\xba\x8b\xcc\x63\x61\x81\x42\xbe\x94\x17\xaa\xaf\x27\x33\x1c\x23\xe3\x57\xd2\x61\x01\xa8\xc3\x7c\x39\xa3\x52\x89\xc6\xd8\xa6\xa5\x11\xee\xe2\x2a\xcd\xf2\xd6\x01\x31\xc1\xaa\xce\xb6\xfb\x2c\x25\x29\x17\xeb\x02\xab\x6a\xbf\xb4\xa5\x6e\xb6\xf9\x94\x64\x73\x92\x6a\x49\x66\xbc\x69\x23\x6d\xf7\xdb\x27\x0d\xdb\xdc\x35\x13\xe8\x58\x22\x82\xc0\x11\xaa\xdf\x8d\x0d\x22\x65\xeb\xed\xde\x75\x9b\x2d\x66\xe6\x86\x0e\x21\x07\xa6\xbc\xda\xe0\x66\x5f\x2a\xb9\xbb\x81\xbe\x6b\x63\x3a\x7c\x48\xcd\xfa\xa8\x8d\x15\xf1\x1a\xab\x70\xa7\x8a\xaf\x6c\x21\xe8\x0d\x5c\xf9\x65\xef\xbc\xe6\x84\x61\x15\xe0\xcd\x03\x3c\x1a\x50\x53\x97\x97\x07\x22\x73\x24\x11\x47\xa1\x66\x50\x1a\xcc\xf2\x7a\x95\xa8\xc4\x69\x94\x3b\x10\x49\x2e\x1a\xa3\x4c\xdb\x18\xb4\x23\x95\xe3\x04\x34\x89\xb0\x7a\x65\x75\x53\x27\x8b\x9a\xb4\xc7\x76\x1e\x13\xca\xd4\x9f\xff\x63\xa3\xdd\xd4\xaa\x95\x5d\x37\xa8\xb8\x85\xa3\x88\x48\x63\x63\xb7\x51\xc8\x78\xc2\x9f\xa0\xd8\xd6\x2e\xbb\xaa\x8f\xb2\x9e\xb7\x66\xf0\x1e\x8a\x38\x2e\x48\xdd\x88\x0b\x73\xc1\xf3\xd9\xdc\xd9\x90\xf4\x99\xd1\x53\xab\xdb\xcb\x1f\x97\x6c\xe4\x1b\xef\xe5\x77\x39\x4d\xb6\xb3\xd0\xdd\x95\xca\x90\x7d\x1a\xde\x23\x39\xf7\xa7\x75\x02\xcd\xd6\x6e\xec\xf2\xa0\xdb\xf7\x69\xbf\xf5\xfe\x1a\xe8\xa6\xe7\xe0\x37\xa7\x3c\x49\xc0\xd3\x20\x49\xfa\x44\x44\x7d\xf7\x30\xe1\x7b\xba\x19\x72\x9e\x1f\x00\x7c\x5d\x24\x46\xb4\x92\xbf\x6e\x8c\x68\x28\x91\x1b\x7d\x35\x68\xc1\x84\xaa\x71\x46\x58\x9d\x8d\xed\xa7\xe5\x0a\x30\xef\x2c\x60\xd0\x45\x8f\xed\x2d\x68\xd0\x2d\xc9\x2b\x07\x0e\xae\x99\xc7\xa1\x06\x0f\x56\x98\x9d\x8f\xe5\x2b\xae\x19\x17\x38\x64\x14\x9f\xbe\x5e\xe2\x11\xeb\x97\xf2\x29\x5c\xa5\xec\xc9\xa2\x08\xc8\x36\x3a\x44\xc8\xcc\xa0\xce\x86\x35\xac\x80\x1b\x4d\xff\x05\x9a\x8e\x01\xaf\x35\x21\x85\x2e\x6c\x10\xa2\xc9\x49\x7c\x84\xa3\x45\x94\xd0\x28\xd0\x99\x67\x02\x67\xf3\x3a\x8e\xe7\x76\xbe\x43\xdd\x79\x2b\xd4\x9d\xa6\x82\x54\x9b\xc4\x6d\x3b\xba\x62\x38\x25\x1d\x1a\x50\x1d\x1a\x50\xcf\xe3\x5d\xb0\xa2\xb4\xd6\x1b\xc2\x28\x2c\x9f\xbb\x0e\x12\xe8\x0d\x20\x81\xb6\x39\x7c\x05\xde\x4f\xe9\xd8\x75\x30\x45\x1f\x5a\xc1\x14\xf9\x4b\xf0\xa0\x90\x67\x9a\xcf\xe3\x1b\x23\x9a\x2c\x0f\xec\x2d\x61\x89\x6a\xc4\x85\x4d\xe4\xa6\x55\xb8\x44\xab\xe8\xa2\xd5\xba\xbc\x2d\x4a\xd0\x66\x2b\xb3\x11\x00\x50\xed\xdd\x75\x20\x70\x40\xcd\xdb\x70\x20\xe7\x66\x9f\x59\x2d\x9b\xd5\x0e\x0d\x33\x5b\x36\x51\xb0\x36\x4b\x72\xf1\xf4\xf0\xbe\x12\x5d\x8a\x22\x6b\xdb\x25\xbb\xf4\x9d\x0f\x9a\x08\x34\xe7\x49\xec\x40\x28\xfc\x6a\xf9\x0e\x7c\x26\x80\x5f\x20\xb7\x19\x50\xec\x1c\xb4\xad\xa2\x20\xd8\xaa\x94\x16\xbf\x89\x30\xdc\x3d\x30\x9a\x7d\x58\x11\x3c\x27\xd9\xc6\x7e\xb0\x56\x16\x91\x65\xf3\xf7\x8a\x31\x96\x56\x08\xac\xe6\xf5\xc3\x5c\x6b\xf7\x5d\x33\xb8\x55\xa2\x47\x60\x1c\x14\x75\xa5\x3e\x0d\x9d\xc1\xd3\x27\xea\x0c\x11\x38\xec\x71\xa5\x97\xce\xcd\xae\x95\xa7\xae\x4a\x2c\x5b\x04\x83\x2d\x55\x6e\xdb\x1d\x1c\x28\xc5\x9f\xc7\x19\x16\x38\x49\x48\x42\x65\xfa\x62\xc1\xc0\x67\x65\x77\xad\x3e\xab\x82\x1b\x13\x11\xcb\xd3\x89\x21\x45\x37\x10\x5b\xec\x4f\x71\x24\x72\x16\x42\x9b\xf9\x8d\xf1\xc5\x04\x73\xb8\x17\xc0\xaa\x14\xcd\xa1\x6a\xeb\x14\x53\xc1\x88\x6c\xac\x91\x49\xa2\x5c\x50\xb5\x18\xdb\x92\xa3\xed\x0f\xdc\x9d\xfd\xf2\xcc\x7e\xb8\xda\xc3\xed\xb2\xfa\x5d\x7f\xbe\xc4\x69\x46\x04\x94\x09\x72\x05\x6f\x82\xb2\xaa\x16\xb5\x81\xf8\x5a\x43\x10\xfe\xbc\x74\x6d\x37\x05\x0e\xe3\xe7\x71\x90\x51\x35\x8e\xaa\xc4\xb1\xee\xb0\xd6\xe1\x4e\xad\x9a\xe4\x0b\x23\x2f\x35\x78\x91\x5f\xa0\xca\x88\x4d\x9b\x30\x4d\xeb\x01\x07\xae\x60\xb0\x57\x16\x1b\x13\xa4\xbc\x5b\xa5\xaa\x61\x9c\x16\xeb\xa7\x2e\xf8\x68\xc5\x60\xfb\xc1\x57\x2d\x46\x1c\x74\xb2\xa7\x61\xeb\x83\x2e\x44\x9e\x29\x3a\x59\x86\xb6\x51\xfb\x2b\x21\xda\x4f\x20\xcd\xda\xb9\x19\x4a\xdd\x9a\xba\xa2\x25\x4e\x6c\x67\xa7\xe5\x7f\x8b\x23\xe6\x10\x82\x0c\xc2\x52\x98\xc7\x77\x9d\x52\xa5\x5c\xa2\x80\x31\x40\x6b\xea\x2c\xdb\x66\xbf\x72\xe1\x1e\x18\x2a\xbd\x1a\x13\xd1\xf1\x88\xf5\x25\x7a\x26\x88\x11\x0b\x21\x51\x53\xc3\xd5\x5b\xb5\xa1\xf6\xd3\x84\xe8\x9e\x7c\x6c\x8a\x16\x1e\xa8\x92\xbe\xfc\x98\xe9\x63\x8a\x13\x49\x7a\xba\x61\xa8\x5a\xaa\x38\x04\x7f\x62\xf4\x2c\x70\x96\x11\x31\x62\x36\x8b\x03\x1c\x2e\x9c\x27\xa6\xfd\xa6\x10\x57\xbb\x06\x64\x1c\xe1\x68\xfe\x4a\x7b\x84\x21\x19\x27\x9a\x93\xd8\xe5\x0b\x97\xb7\xc7\xcd\xdb\x18\xac\x37\xd8\xac\xe1\xd4\x95\xcf\xea\xd9\x4e\x92\x48\x73\x14\x5f\x66\x3a\x23\x42\x8f\x5a\xd3\xf0\x13\x61\x88\x4e\xdd\x38\x6c\xec\x0e\x7a\x06\xcf\x94\x26\xfd\x27\x4c\x13\x93\x80\xef\xba\x76\x42\xa0\x31\xbf\x8f\x98\x71\x77\xb3\xa8\x94\xa1\x4a\x19\x95\x73\xcd\xa9\x73\xf0\x49\x82\x9a\xd1\x94\x38\xc3\x9e\x36\x39\xcd\x03\xfd\xfa\x6a\x0e\xfa\x44\x05\x67\x29\x24\xc9\x58\x5c\x26\xb7\x7c\x92\x28\x7f\x3c\x6a\x53\x1c\xd7\x4a\xc4\x71\x2c\xcb\xc6\x4f\xa3\x56\xd2\x5f\x4b\x66\x97\xa3\x52\x56\x60\x14\xc0\x0a\x41\x10\xa7\xab\x2c\xb6\x4a\xfe\xed\x52\x1b\x96\x53\x1b\xea\xd7\xe6\x10\xd3\x1b\xfc\x21\xde\x34\xc5\xa1\x69\xfb\xf7\x21\xd9\xee\x31\xd5\xe1\x8d\x73\x02\x5e\x26\x1d\xe0\x6d\xf3\x37\x5e\x22\x75\xa3\x4b\x70\x78\xc3\x04\x87\xd6\x96\xda\x72\x6c\x76\xf3\xb1\xdd\x28\x39\x60\x0d\x98\x53\x5d\x2f\x97\x44\x09\x1a\xc9\x7d\xf0\x07\x99\xe1\x96\x51\x6d\xa0\x05\x66\x6b\xa4\x26\xfd\x82\x77\x42\x42\x9c\x98\xaf\xf3\x37\x11\x04\x3f\xc6\xfc\x79\xc9\x56\x27\x43\x34\x8d\x4b\xae\xc5\x1e\x41\x22\x2a\x49\x29\x92\x85\x4a\xc4\x88\xb4\xc6\x4e\x3c\x62\x73\x4a\x04\x16\xd1\x1c\xb2\x1b\x8b\x8d\x31\x59\xb2\x06\xd0\xc8\xc4\x32\x84\xde\xa6\x0d\x36\xbd\xc5\xba\x57\x2d\x4c\x1e\x9f\xce\xee\xb9\x1e\x49\x6a\x3e\xf1\xc2\x8c\x95\x32\x42\x93\x5c\xab\xed\xdf\x35\x10\xdf\x2f\xf6\x8b\x06\xe3\xfb\x60\xa2\xe0\x8b\x96\x01\xf9\x05\x35\x74\x41\xf9\x2f\x14\x94\x5f\xb3\xc4\x9b\x05\xe6\x6f\x65\xf2\x7b\xfd\x98\x61\xd7\xf3\x6b\xc4\x0d\xaf\x0b\xda\xca\x27\xe3\x17\x3f\x7a\xb5\x73\x6e\x7b\x02\x7f\xf2\x44\x61\x24\x62\xa1\xe9\x6c\x42\xe2\x18\x38\xad\xe2\xb6\x52\x74\x41\x3b\xce\x3c\xa0\xef\x5e\x2c\x35\xb1\xe3\x84\xb3\x99\xa4\xb1\x01\x5b\xc9\x30\x54\x6c\x0d\x8d\x17\x00\x2e\x00\xfb\x9b\x24\x44\x38\xaf\x84\x40\x5f\x4b\xca\x2c\x9a\xa2\xff\x2d\xe6\x44\xb2\xaf\x94\x31\x16\x60\xb6\x40\x8f\x8c\x3f\x27\x24\x9e\xc1\x0e\x55\x07\x73\x84\x28\xe9\x21\xaa\xfc\x67\x02\xd0\x08\x78\xae\x46\x7a\xec\x10\x6b\x66\x34\x00\x62\xbf\x0d\x6a\xa2\xfb\x66\xbe\x39\x46\x68\xc8\xd0\x14\x47\xaa\x87\x64\x3e\x29\xda\x8f\xb9\x29\x72\xad\xb5\xef\x60\xe2\x45\x23\x5d\xcc\x78\x4d\xe7\xf5\x67\xc3\x71\x07\x4d\xae\xfd\x84\xe2\x9d\x62\xeb\x9e\xf0\x2e\x10\xa3\x97\xb9\xb4\x41\x18\x88\x33\x7f\xf4\x2d\xbc\x92\xc7\x88\x06\xbc\x4f\x83\xb7\xcc\x78\xdc\x68\xeb\xac\x4c\x65\xd3\xb1\x14\x81\x90\x56\x50\xb2\x8e\x2a\x68\xd7\x2c\xb7\x96\x9a\xa4\x12\x04\xa7\xd6\x39\xa0\xaf\x1a\x10\x6b\x4c\x18\xa4\x1e\x3d\x15\x46\xc2\xdc\x64\x8b\x2f\x28\x7b\xd4\xbb\x5b\xa0\x62\x43\x7d\x79\xe8\xb9\x6e\xd3\x32\x7d\xe3\x91\x33\xce\x8c\x83\x70\x27\xb9\x93\xce\x18\x4e\x36\xb4\x71\x2c\xad\xdc\xb2\x4f\xcf\xc9\x59\x56\x5c\xd0\x52\x84\x31\xf6\x21\xd3\xe3\x46\x36\xa4\xca\x7c\x43\x79\x0f\xa3\x98\x64\x84\xc5\x84\x45\x0b\x20\x11\x06\xc8\x39\x82\xe1\x04\x61\xf8\x0e\x27\xc7\xe8\xdc\xe4\xd7\x78\x09\xcf\x5e\xeb\x70\xa1\xa7\x98\xd1\xa9\xd6\x13\xc0\x08\x6b\x47\x39\x62\x66\x98\xce\x07\x12\x14\xed\xf7\x2b\x56\xb7\x33\xfa\x06\xb9\xda\x11\x95\x98\x95\xbf\x47\xab\x2f\x1c\xe8\x6d\xd5\xee\xe8\xe6\x5c\x0d\x02\x99\x4f\x8e\xe0\xdf\xa5\x84\x33\x07\xd4\x53\xa0\xc8\x90\x84\x80\x39\xd0\x7a\xbc\xe0\x62\x6c\x02\x96\xdb\x87\xdf\x6e\x4d\x1e\x47\xd0\x47\x49\xa9\x49\x29\xa3\x69\x9e\x06\xce\x3b\x53\xb1\x20\xb2\xf6\x4b\x93\x89\x91\x69\x3d\x20\x72\xe0\xe5\x48\x5f\xae\x6c\x81\x66\xf4\x89\xb0\x11\xcb\x38\x65\xea\x18\x5d\x71\x45\x82\x12\x11\x06\x3a\x8a\x67\x8a\xa6\x06\xed\x54\x10\x7d\x0e\x0c\x28\x36\x00\x4d\xce\xb1\xea\xa1\x38\x87\xa3\xca\x88\xd2\xac\x43\xdf\xb8\x0a\x76\x06\xe2\xa3\xc5\x88\x99\x9b\x6e\x8a\x69\x92\x0b\x62\x65\x56\x6c\xf2\x62\x8a\x21\x17\x23\xb3\x48\x68\xc1\x24\x52\x3a\x9b\x2b\xbd\x45\x5a\xc6\xb3\xfe\xc6\xb9\xe6\x46\x7c\xc4\x26\x04\x61\x94\x71\x49\x15\x7d\xf2\xfe\x4b\x3a\x45\x58\x4a\xb0\xa0\x1c\xa3\xf3\x92\xfd\x9f\x4a\x50\xbd\x9b\xe2\x6a\x29\x1b\x5b\xdb\x73\x73\x3e\xce\xce\x1b\x59\xea\xc5\xae\x32\x9e\x48\x9e\xe4\x2a\x74\xc1\xd6\xef\x6d\x61\x1a\x77\xc0\xfd\x60\x20\xe6\xd3\x11\x73\x74\x2d\x8f\x51\x5f\x22\xc9\xf5\x2e\x49\xb3\x95\x91\xa0\x8a\x08\x6a\x50\x9c\x88\x32\x9b\xe0\xcf\xa9\x3f\x03\x29\x16\x8f\x5a\x84\x0a\x2d\xf0\x06\x53\xb4\x64\xed\x98\x18\x09\x09\x60\xad\xc2\xed\x00\xd3\x3f\x62\x9c\x1d\x31\x32\xc3\xeb\x76\x64\xc4\x4a\x5b\x82\xbe\xa6\xd3\x42\x21\x6d\xf2\x39\x06\x6b\x37\x86\xc8\xa7\xa6\x5d\x32\x1d\x37\x6d\xd2\x34\xe1\x78\x8d\xdb\x78\x5a\x1c\x7a\xf4\x77\x3e\x31\x63\xd4\x7a\x3f\x57\x20\x05\x6a\xf5\x6a\xca\x05\x99\x63\x16\xf7\xdc\x66\x95\xc7\x06\x37\xa3\x35\xb5\x39\x65\x0c\x24\x41\x07\x22\x4c\x0c\x16\x13\x66\xc1\x5e\x58\xc5\xcd\x6e\x45\xb1\x0f\x1b\xdd\x15\xbe\x35\xa8\x7d\x62\x0c\x10\x86\xe5\x2d\x32\x7b\xc4\x25\x4d\xb3\xa4\xc8\x69\x0a\x6c\xa3\x53\x2d\x62\x39\x1e\xc9\x9f\xc0\x74\xe5\xb4\x36\xb8\xd5\xed\xce\x69\x3a\xab\x19\xb9\x67\xa4\x70\x6b\x38\x9b\x97\x29\x83\x19\xb0\xb0\xaf\x25\xd1\xff\x54\xa4\x50\xfb\x8c\xb0\x3e\x62\x4e\x04\xf9\x06\xb8\x8c\x6d\x36\x30\x9e\x69\x11\xda\xc0\xbc\xda\xf5\x43\x91\x71\x72\x97\xce\x89\x3d\x0c\xee\xd5\xda\x8b\x4a\x51\x2d\x66\x7f\x47\x01\xa1\xea\x7c\x47\xd8\x79\xca\x62\xd2\x58\xcc\xa9\x15\xd7\x68\xba\x5b\x0c\x43\x1d\x6f\x5b\x7f\xe1\x7e\x4e\x24\x41\xea\xd9\x03\xa5\x69\xbd\x0a\x4c\x96\x82\x24\xe4\x09\x17\x77\x9c\xef\xcb\xb2\xcb\x08\xcb\x86\xf2\x28\x80\x36\xa6\xc7\xbf\x7d\xe2\xb0\x1f\xdf\xb5\x1e\xca\x13\x4e\x6c\xe2\x86\xf5\x95\xcb\xe6\x0d\x1b\x9e\xef\x14\x43\x6a\x5b\xa9\x5b\xcf\x66\x11\xc3\xf5\xfd\x03\x59\xd4\xaf\xc8\x1a\x10\xbf\x55\xd9\xd8\x7e\xd9\x37\xb0\x55\xdf\x14\xdf\x2c\xaf\x71\xe3\xca\xfd\x50\x9a\xf2\x1b\x24\x11\xdd\x2c\x55\x80\x86\x3f\x65\x3e\x9d\xd2\xcf\xa0\xd5\xba\x9b\xc4\x69\x1e\x91\xe0\x52\x73\x31\x90\x55\x90\xdb\x3c\xe3\x48\xde\x25\xa1\xa8\xf6\x4b\xad\x65\x6d\x4c\xd1\x8d\xab\xfd\xd7\x9c\x88\x9d\xd6\xdb\x93\xea\x26\xe1\x88\xc1\x29\xa9\xd7\x11\x5d\xa3\x0a\xb7\x8c\x49\x0a\x5b\xbd\xc7\x0d\x4b\xb7\x1e\xfe\xbb\xf6\xb3\x89\x61\xbe\x9b\x0f\x24\xe4\xda\x2b\x6d\x6a\x45\x7c\x9a\x8f\x4d\x76\x45\x79\x34\x7f\xeb\x59\x80\x71\x6c\x03\xa7\x7c\xaa\x2f\x76\x31\x23\xc6\x31\x62\x6a\x36\x29\x5b\x69\x20\xd0\xd4\x6d\x63\x94\xcd\x46\xcc\xad\xad\xec\x21\x13\x26\x5e\x61\xa8\x25\x6c\x77\x1c\x7c\xea\x09\xbb\x9d\x49\xd5\xf8\xd5\x19\x91\x52\x5f\x8c\x52\x09\x4c\x99\xf5\xe1\xb8\xf5\x91\x23\x86\x8e\xaa\x71\xea\x3d\xb0\x23\xf4\x5c\xb6\x67\xaf\x18\xa0\x1c\xb1\x6b\x63\x9d\xf9\x23\xfa\x5a\xe1\x99\xb9\x25\x00\xbd\x11\x27\x80\xfb\x08\x5a\x82\xd5\xca\x83\xe4\x00\x7f\x22\x69\xfc\xcd\xe9\xaa\x3e\x8d\x0d\xe1\x6b\x68\x06\x0e\xb9\x5e\xc3\x62\x81\xe8\xb4\xf8\x07\x89\xbf\x59\xd5\x52\xf1\xd1\x23\x59\xf4\xaa\x8b\xdc\x7c\x6f\xdc\xe3\x9d\x22\x34\x5f\xea\xe2\x80\x41\xb7\x77\x52\xe2\x09\x49\x7e\x2c\x26\x8a\x56\xb2\xa2\xef\x28\xc3\xbb\xf1\xa0\xda\xe1\xb5\x8b\x40\x9f\x2c\x9a\xea\xb6\xd5\xb0\x9e\xad\x11\x47\xfa\x46\x96\x25\x48\x77\x67\x25\x76\x57\xd5\x0f\x43\xd4\xe3\x9c\x24\x19\x8a\xe9\x14\x5c\x6f\x0a\xe8\xc5\x83\xa7\x9a\x7a\x37\x5a\xa1\x49\x73\x66\x80\x70\x4d\xd4\xc7\xb3\x3d\xe9\x96\x65\x14\x8d\x1f\x8f\xd8\x50\x7d\x25\x91\x54\x82\xb3\x99\x56\xa6\xe3\x27\x2a\x8b\x42\x6e\xfa\x40\xe6\x29\x11\xb6\x0b\x2a\x8d\xd4\x6d\x8b\x20\x61\x77\xb1\xe9\xb1\xe9\xab\x0f\x04\x1f\x57\x6c\x50\xff\x68\xf4\x0a\x3d\x4a\xe9\xa2\xa6\x6a\xc2\xde\xed\xe6\x56\x78\xe7\x2b\x9b\x2e\x7f\x0c\xad\x93\x28\x2d\x0c\x99\x8e\x5f\x9e\x54\xcd\x98\x76\xd5\x57\x98\x30\x37\xbe\x10\xda\x5e\x04\xae\x6a\x40\x6e\xd2\x9c\x74\x3f\xce\xb1\x65\x06\xb7\x91\x8a\x55\x99\xa0\x1d\xb5\xd1\x9e\x42\x13\x26\xa1\x60\xff\x90\x0a\x2b\x1a\xd9\x5b\x80\x0b\x6b\xc5\xb5\x7a\x75\xf3\xd6\xee\xaa\x93\xc8\x08\x27\xcb\x3b\xbc\xc2\xa7\x6e\xde\x5f\x6d\xe8\xb4\xc7\xcd\xb4\xbd\x12\xd8\x24\xe2\x49\xb2\x49\x99\xb6\xca\xcc\xcf\x8a\xcf\x57\x8f\xa8\xe8\x47\x6f\x80\xdb\x0b\x38\x35\xc6\x40\x81\x13\xeb\x2e\x92\xca\xee\x52\xf8\x92\xb9\xd4\x16\x56\x7d\x1c\x31\x3e\x85\x42\x7e\x49\x53\xe4\x7a\x26\x78\x4a\x37\xa9\x24\x61\x82\xb9\x6f\x9d\xef\x7f\x8d\x27\xc5\x45\x08\x80\xf9\xcd\x90\x97\xed\x11\x30\x09\xb0\x35\xa9\xad\x38\x43\x29\xce\xb6\x5a\xf0\x75\x91\x2f\x7d\x94\x9a\xb0\x23\xbb\x7a\x80\x29\x4d\xa0\x26\x1e\x2c\xf2\x33\x5e\x14\xf0\x2f\x4d\x35\x02\xd8\x46\xe4\xf0\xa0\x5f\x1f\xb2\x29\xdf\xe0\x70\x16\x70\x2d\xf6\xf4\x61\x47\xb3\xc1\xf9\xf3\x91\x18\x66\xf7\xcd\x9a\xb6\x39\x8f\x67\x75\x44\xbd\xf1\xc9\x74\x2b\xf8\x92\x7e\xd8\x90\x89\x04\xdf\xfc\x6b\x93\xbb\xb5\x7c\xb4\x82\x16\x11\x0c\x67\xf5\x52\x5d\x96\xe8\x70\xef\x6b\x54\x69\x07\x9e\x15\x09\x63\x37\xf5\xad\xbe\xc2\x9a\xd9\x43\xd2\x6a\xb1\x76\xc4\xa7\xda\xac\xd6\x81\xeb\xd1\x57\x36\xd8\x59\x93\x5b\xb7\x18\xc0\xcd\xa4\xd5\x1a\x8a\xec\x13\x9b\x86\x3f\xa5\x09\x91\xc7\x68\x58\xe3\xc4\x75\x49\xf0\x3e\x68\xdc\xa4\x03\x3a\xe9\x29\x17\x34\x28\x7e\xee\x64\x24\x44\xa1\x08\x5b\x18\xc8\x12\x38\x2d\xc0\x7d\x3a\xe7\xcf\x26\x03\x4f\x50\xcd\xb3\x8c\xb0\xaa\xc0\xa5\xa5\x79\x01\xb5\x1e\x21\xe3\x50\xf3\x1f\x70\x93\x17\xa1\xd5\x1c\xef\x0c\x0b\x2d\x10\xd5\x2d\xdd\x47\x19\xcb\xf6\x18\x03\xae\xd7\x7b\xfd\x45\x1b\xa5\xc0\xbd\xbb\xc3\xe8\xbc\x94\xbf\xb9\x3d\xf2\x23\x7c\xea\x0c\xbb\x18\x4d\x05\x01\x2d\x3b\xf5\xb8\x61\xa6\x70\x00\xe7\x70\xdf\xdd\x9d\xff\x70\xf2\x30\x44\x44\x45\x28\xa1\x8f\x64\xc4\x22\xf9\x04\x4a\xdf\x3f\x72\xa2\xf4\xcf\x0d\x46\x20\x9a\x12\x26\x81\x13\x50\xd5\x52\x5f\x73\x0b\xa3\xff\x7b\x5e\xfe\xbe\x8d\x56\xee\xb1\x2e\x35\xed\xba\x9a\x7e\x40\xa6\x50\xb6\xcc\x2c\x6d\x8d\x5d\xf3\x3b\xe3\x6f\x1d\xd4\x55\xfc\xde\x22\x25\x9a\xfd\x3d\x67\x1b\x0a\x5d\x67\xc5\x47\xc1\x28\x1a\x64\xba\x34\xc3\x50\xcf\x63\xb3\x5c\x6b\xf3\x4d\x6d\xeb\xeb\x98\x48\x01\x3d\xe3\xfc\xe7\x45\x71\x74\xa4\x04\x21\xc0\x42\x3c\x3d\xd9\xbb\xde\xa2\x8d\xf9\x89\x05\x1f\x1d\x8f\xd8\xa5\x8b\xaa\x2b\x7e\x95\x85\xaf\x21\x9d\x04\x65\x4e\xca\xad\x40\xb3\x31\x95\xfe\x07\x28\x5a\x27\xf3\x44\x99\xaa\xbd\x53\xca\x70\xe2\x07\x6a\x9e\xd4\x71\x09\x81\x59\x34\xdf\xd5\x4d\x4e\xa7\x63\x92\x6c\x22\x89\x0e\xa7\x83\x44\x6a\xfa\x8e\x1e\x1b\x4e\xe7\x36\x75\xa9\x8b\xc9\xd8\x6a\xfb\xa6\xb6\x25\x2a\xdc\xec\x38\x31\x55\x73\x09\x82\x38\xac\x6a\x86\xbc\x01\xc1\xd2\xbb\x68\x25\x75\x13\x86\x65\x52\x53\x7d\xda\x19\xf4\x82\xb0\x1a\x31\x91\x33\x28\xa8\xe5\xa3\x32\x31\x2a\x6a\xa2\x44\x2e\x46\xc2\x46\xac\xcc\x34\x9b\x30\x25\x47\xcc\xcb\x5a\x3f\xe3\xb9\x04\x7f\x54\x4a\x94\xbe\xa0\xbe\x86\x5a\xf7\x26\x2c\xba\x87\x32\x41\x53\x70\x29\xcb\x6f\x6a\xb6\xee\x0c\x2b\x9c\xf0\xd9\xbe\xad\x4a\x5b\xa6\xd8\xb8\x61\xa0\xe1\xb9\x5e\xfc\x19\x61\x44\xc0\x44\xc1\x96\x5d\x7b\x84\x5b\x58\xb9\x1b\x38\x37\x78\x12\xad\xf3\x57\x7a\x8b\x05\xce\x15\x4f\xb5\x7e\x8b\x93\x64\xd1\x33\x5e\x67\x82\xe6\x58\xce\xdd\x46\x1b\x87\x61\x9b\xbb\xc9\x2e\xee\x19\x8e\xe6\xe4\x4e\x61\x95\xd7\x46\x66\x55\x46\xf9\x81\xb0\x3c\xfd\x70\x8a\xfe\xa7\x98\xe3\x59\xff\xec\xfb\xc1\xf8\x7c\x78\xd7\xff\xee\x62\x70\x1e\xcc\xc7\x3e\xb9\x1c\xde\xdd\x2d\xff\xfa\xfd\xf0\x7e\xf9\xc7\x9b\xeb\x9b\x87\x8b\xfe\x7d\x5d\x2b\x17\xd7\xd7\x3f\x3c\xdc\x8c\x3f\xf6\x87\x17\x0f\xb7\x83\x9a\x4f\x1f\xee\x9b\x1f\xde\xfd\x30\xbc\xb9\x19\x78\x2b\xfd\xdf\x82\xd3\x05\x1e\x72\x3d\xd1\x86\x69\x54\x0f\xe0\x11\x2a\xbf\x78\x8a\x1e\xaa\xe5\x9d\x6c\xbe\x95\xc1\xea\x7a\xc6\x52\xf3\x30\x48\xf7\x03\x4b\x6b\xb1\x28\x4d\x9f\x9a\x90\xe4\x68\x4e\x50\xc2\xf9\x63\x9e\x59\xd6\x66\x8c\xea\x8c\x1b\xc3\x0f\x91\x41\x6b\xdf\x0f\xef\x4f\x97\xcb\x4c\xf9\xc6\x02\x54\x50\x6f\x43\x7e\xc6\x06\x21\x00\xd8\x29\xd8\x52\x5c\xf9\xa1\xc2\x43\x1d\xf4\xe0\x77\x66\x55\x3f\xa6\x35\xcc\x54\xa5\x9b\xd8\x44\x4b\x17\x13\x0b\x1a\x2e\xef\xeb\xaa\xd5\xf4\xcb\x61\xea\x6b\xa2\x09\x89\x70\x6e\x02\xb7\xf5\x3d\x25\x04\x17\xe1\x80\x0b\x7a\xd8\x5f\xa3\x96\x8e\x6a\x1b\xac\xec\x99\x9e\xb8\x7c\xa4\x59\x46\xe2\x0f\xcb\xf2\x4b\xb9\x02\xbe\x84\xd3\xa7\xfb\x0c\xce\xa4\xd6\xeb\x41\xe7\x77\xc5\xe1\xe6\x0b\x1f\x2d\x04\xc1\xa9\x45\xb8\x2e\x14\xab\xd0\x77\x82\x2f\xde\x45\x21\xfc\x07\x2b\xf4\x4c\x00\x36\x26\xb7\xd5\x31\x8d\xee\xad\xcf\x36\x74\x67\xfc\xf6\xae\xd6\x6d\x09\x4e\xa6\x91\x19\xef\x43\xe0\xd6\xdf\x4b\xb2\x99\xb3\x6d\x2d\xf6\xc7\xb9\x69\x14\xb8\xb3\x8b\xeb\x87\x11\xef\xd3\x39\x57\x73\x23\xad\xb9\x2c\x34\xdb\x6e\x33\x1e\x87\x77\x56\x2a\xe2\xd1\x7e\x60\xa5\x42\x0f\x6b\xd7\xea\x9e\xc7\x78\xa1\x89\x03\x82\x13\x64\x9e\x65\x5c\x28\xd4\xd0\x86\x09\x55\x34\xe3\x83\x3b\xc7\xce\xc3\xf3\x38\x68\x44\x4b\x18\xb2\xa6\x5e\x58\x3b\x08\x28\xbb\xae\x81\x8f\x2b\x48\x02\x02\x45\xd0\xd7\x76\x4c\x4b\x2a\x75\x89\x42\xeb\x84\xdf\x5d\xb2\x28\x33\x7d\xc1\xb7\x2d\x35\x5c\xd7\xfb\xb5\x6b\xa1\x76\xcb\x13\x32\x55\xe3\x0d\x9d\x52\xd0\x22\x6b\x42\xcd\xa3\xb3\xf9\x1e\x5a\x6c\xaf\x25\xfc\xd1\x06\x2f\x6b\xd5\x20\xb0\x10\x08\xce\x95\x91\x4f\x0b\x1d\x06\xb9\xd5\x04\xf3\x82\xed\xd4\xe6\xbb\x7b\x21\x50\xcb\xfc\x26\xe6\xcb\xa7\x86\x1f\x8f\xd8\x00\x82\x44\x0b\x45\xc4\xa5\xc1\x83\x16\xb0\x56\xfe\x2f\x15\x56\x7f\xd5\x8c\x94\x66\x14\xfb\x82\xee\x4d\x68\x21\x49\x16\xa8\x28\x9e\x5f\xfa\xae\xcd\xe9\x31\x56\x6f\x27\x02\x9a\x09\x9b\xa3\x23\x15\xc9\xac\x65\xde\xcc\xb3\x88\x66\x06\xaf\xb0\xee\xea\x18\xfd\xe4\x2c\x3f\x90\xdc\xe3\x93\x5d\x5c\x7c\x6a\x82\x17\x0e\xf8\xba\x6e\x61\xf7\x81\x25\xbd\xef\x74\x9f\xd5\x0b\xec\x41\x2b\x6b\x56\xb9\xa4\x80\x33\x66\x2c\xb2\x1b\x84\x0b\x9d\xf9\x8f\xee\xc8\xea\xc8\xc7\x8f\x50\x6a\xdc\x46\x8f\x83\xd0\xc1\x92\xc5\xff\x32\x9b\x65\xd0\x36\x5c\x30\x85\x2d\xfd\x6c\x3d\xa8\xfa\xfc\x80\x07\xd0\x80\x71\xa0\x29\x4d\x12\x90\x03\x8e\x51\x9f\x2d\x1c\x58\x85\xbe\x0a\x5d\x10\x29\x9d\x31\xbe\x2e\x8f\xbe\x81\x98\xa2\x80\x98\xee\x9a\x89\xc9\xc4\x69\x14\x58\x45\xfb\xa1\xa8\x3d\xe0\xd6\x69\xde\x82\x97\xab\x7e\xb4\x47\xab\xdb\x40\x79\x0f\x6f\xf3\xd7\xca\x00\x5b\x1a\x6e\xf0\xe1\xbf\xea\x87\xfe\x29\xc7\x02\x33\x05\x79\x4d\x56\x74\x17\x24\x48\xaf\x26\x9f\x21\x06\x95\x19\x43\x30\xfc\x14\x6e\xae\x73\xf9\x9b\x30\x31\x1a\xf7\x10\x3d\x26\xc7\x50\x81\x56\x68\x59\x62\x52\xbc\x39\xd7\x92\xc3\x88\x2d\xe5\x6b\x1c\xa3\x7e\x22\xb9\xfd\x82\xb0\x28\xe1\x12\x42\x70\x27\x21\x38\x38\x50\xbe\x75\x2b\x4d\x16\xa0\xa0\xc0\x56\x16\xcd\x73\xfb\x20\xf8\x10\x0a\xa9\x82\x4f\x3c\x81\x93\x5e\xfc\xfe\x7b\x9e\x19\x6f\x45\x53\x9c\xc4\x0b\x96\xac\x5a\xba\x86\x5e\x6c\x93\x4c\x39\xe4\x55\x1b\x04\x6f\xc0\xc6\x14\x79\x34\x01\xca\x1c\xfa\x1a\x2b\x94\x10\x2c\x15\xfa\xc3\x37\x1b\xc5\x86\xb8\x09\x16\xdc\xd5\x1e\xdf\x22\x19\xde\xa5\x53\x86\xc2\x9d\xef\x18\xea\xe3\x62\xa1\x10\x46\x8c\x3c\x87\xd9\x33\x1c\x12\x9e\x5c\xd1\x5b\x12\xe0\x77\x98\x98\x79\x83\x3e\x04\x19\xa9\x46\x65\x6a\xe0\x23\xae\xa4\x83\x75\x9f\xda\x61\xd5\x50\x56\xcf\x47\x9f\x41\xb8\xb9\x7e\xa9\x48\x6c\x9c\x63\x35\x62\x96\xb3\xba\xb0\x91\x20\x95\xbd\x9f\x24\xe5\x64\x42\x0c\xf9\xb2\x4c\x4f\x58\x8f\x3e\x3e\xf6\x0b\x74\x05\xea\x97\xcf\xe8\x2a\xd9\xe9\x8a\xc3\x62\x72\x0e\x3c\xa6\x63\xd8\x76\xad\xb4\x53\x67\x5f\x7e\x45\x21\xb8\xa6\xfb\x0b\x3e\xa3\x11\x4e\x5a\x08\xc3\xa4\x6e\xc8\x6b\x0e\xd6\xb2\x4d\x7f\x85\x6c\xbc\xef\x0e\xda\x8b\xca\xf5\xf6\x71\xb8\x66\x9f\x79\x8d\xb9\xbd\x61\x73\x03\xd9\x62\x17\x05\xdc\xa7\x16\xbe\x96\xc7\xb7\x34\xf4\x61\x0c\xc0\x06\xeb\xb9\x60\x01\x14\xe0\x58\x87\xc9\x2f\x8b\x83\xbc\xe5\x20\x4d\xd2\x06\x7b\x1a\xc6\x67\xdf\x6c\xf0\xbc\x66\xef\x7b\xfa\xbd\x62\xfe\x6e\x2a\x3e\x08\x6e\x79\xe2\xcd\xc2\x5e\x3f\xfe\x3b\x8e\x20\x9b\x11\x7a\x72\x79\x94\xcb\xa0\x93\xae\x54\x07\x06\x63\x7e\xad\x78\x98\x09\x1e\x11\x29\x8f\xd1\x00\x2e\x1a\xfb\x4f\x84\xa7\xce\x21\x11\xbc\x3c\x62\x5a\x33\x71\x18\x75\x41\xfb\x65\x12\xaf\x3b\x01\x06\xf0\x76\x27\x5f\x4e\xba\xbe\x0e\x5b\x93\x36\xe1\xf0\x76\xa1\x0d\x28\xdd\x84\x06\xb3\x53\x14\xf3\xe8\x91\x88\x13\x41\x62\x2a\x4f\xc1\xb7\xae\x1a\x9d\x7a\xa9\xd6\xb6\x77\x96\x34\x9a\x02\x05\xd6\x24\xfe\x9f\x99\xfe\x6d\xe8\xbf\x4b\x21\xea\x21\x3a\x05\x75\xc2\xe5\x9d\x9a\x44\x2b\x07\xe9\x47\x98\x12\x0b\x13\x95\xec\x4c\x59\x95\x85\x70\x9a\x86\x16\xda\x9a\x32\xa6\xc5\x3e\x62\x70\xb6\x9c\xb6\xc9\xcc\xb1\x01\x07\x66\x52\x8a\xdb\x7c\x2d\xc3\x2e\x32\xac\xe6\x12\xe0\x39\xca\x6b\x60\x95\x2e\xf8\x54\xaf\x10\xce\x20\x5e\xc1\x58\x29\x8a\x8f\x3c\x88\x84\x54\x34\x49\x46\xcc\x24\x58\x00\x92\xc6\x57\xb5\x28\x40\xfa\xd3\x1e\xc2\x71\x8c\xfe\xf7\xd7\x1f\x2f\x7e\xbe\x1f\x8c\x87\x57\x60\xb4\x1e\x5e\x0c\xbe\xe9\xf9\x1f\xaf\x1f\xee\xfd\xaf\xc6\xc2\xf2\x44\x04\x4a\xf1\x23\xa8\x78\x4c\x12\x9b\x20\x4a\x46\x2c\x1c\xa9\xc3\x47\xd2\x4f\x24\x71\x91\xae\x56\x4c\xf1\x30\xd1\x76\x0f\x9b\xc0\x55\x2d\x6c\xe6\x06\xca\xef\xad\xff\x64\x35\x0d\x3a\xe2\xf1\x5d\x38\x31\x10\xf2\x80\xb1\x0c\x00\x73\xac\xee\x5b\x10\x1c\x61\x33\xca\x9a\xe2\xf1\x08\x7b\x7a\x49\x21\xfe\x07\xb2\x80\x80\xf0\x1b\x4c\x45\x6b\xda\xab\x47\x3c\x74\x27\x46\xeb\xe9\x58\x56\x0f\x95\x34\xb2\xb0\xc9\x28\x6e\x8c\xf9\xac\x03\xbb\x7d\xf3\xe9\x5a\x08\x4d\xf2\x59\x09\x87\xc4\xe5\x73\x56\x1d\x5c\xa5\xbf\x68\x0a\x1a\x1c\xb1\xfb\xeb\xf3\xeb\x53\x44\x12\x3c\xe1\x90\xae\x68\x43\x82\x5c\x13\x76\xc1\x22\x9e\x06\x0d\x95\x50\xd8\x7a\x28\x2b\x50\xd8\x42\x23\xda\xb1\x69\x63\x0d\x1a\x5b\xc6\xc5\x32\x86\xd9\x7e\x55\x40\x3b\xd9\x1b\x2e\xda\x5c\xff\xfa\x35\x93\xbf\x91\x69\x45\xae\xc2\x79\xed\xdd\x3c\x25\x18\x10\x3a\xac\x5b\xc8\xda\xf2\x6d\x00\x6b\x92\x94\x6a\x46\xeb\x83\x23\x8f\xad\x0b\xbe\x78\x93\x33\xf4\xc3\x5f\x24\x9a\xe4\x6a\xc4\xca\x6d\x70\x86\xfa\x3f\xdd\xa1\xef\xb0\x8a\xe6\xdf\x8c\x18\xe4\x0f\xfe\xf0\x97\x06\xb8\xc8\x8d\x11\x98\xf5\x9a\x9c\x63\x85\x2f\x38\x8e\x29\x9b\xd5\xc1\x2f\x17\x35\xf2\x06\xf7\xfd\x53\x74\x6d\x75\xf8\x22\xdb\x55\x39\xd8\x93\xa0\x21\x60\xc8\x30\x11\xc7\x45\x80\x95\xb3\x32\x44\xad\xd1\xcc\xe0\xc2\x1a\xb1\x7b\x83\x3b\xad\xb9\x2a\x55\x28\xe3\xb6\x4e\xa3\xd6\xca\x0c\x22\x37\x76\x59\xe0\x24\x59\x20\xbd\x3a\x40\xc6\x7e\x33\xac\x3c\x06\xf2\xcc\x32\xb3\x1f\x31\x50\xd0\x7d\xfe\x6d\xc2\x23\x9c\x40\x4c\xde\x51\x60\xd3\xd3\x6a\x3b\xcf\x01\x03\x07\x82\x61\xd8\xa2\x1c\x3a\xeb\x61\x99\xbc\x50\x16\x6e\x14\x18\x00\x60\x1f\xad\x37\x36\xe5\x9a\xe3\x18\xbc\x59\x30\xbe\x25\x66\x75\xf4\x87\x1e\x7f\xd6\x2c\x8b\x7e\xea\x53\xd3\x79\xce\x1c\xde\x5a\x04\xe6\x7b\xb6\x80\xf0\x6d\x28\xac\xc6\x21\xf4\xa3\xe0\xce\x96\x28\x97\x76\xd1\xdf\x89\xc1\x67\x23\x66\x22\x05\x4b\xfb\x12\x22\x14\x06\xbd\x73\x06\x81\x8c\xcb\xf9\xf0\x79\x66\x03\x1b\xad\xac\x9f\x09\x72\xe4\xb3\xbc\xe3\xd2\x9a\xea\x1b\xf6\x18\xdd\x86\xea\x75\xcc\xa3\x3c\x75\xd5\x23\x20\x43\xdc\x46\xc0\xd9\x4b\xd4\x53\x88\xb9\xd8\xd7\x51\x3c\x20\xd1\x29\x02\x10\x39\xad\xf5\x63\x43\x30\xfd\xf0\xd3\x65\x49\xbd\x59\xf0\x05\xde\xb1\x5b\xd4\x9a\x69\x68\x9c\x95\x5b\x2a\xb5\xb6\x33\xf6\xc2\x55\x81\x70\xcf\x05\x08\x5b\xe4\x73\xc6\xc1\xc8\x6d\x12\xa0\x79\xfc\x95\x44\xc3\x1b\x2d\x01\x69\x8d\xd7\x9f\xc1\x5c\x2a\x13\x5c\x66\xf2\x94\xe1\x6b\x93\x2e\xd0\x43\xdf\xa2\x51\xfe\xed\xb7\x7f\x8a\xd0\x67\xf7\xc7\x9f\xff\xf3\x3f\xff\xf4\xe7\x4d\xd2\x49\x9c\x42\x0e\xed\x16\x6b\xe4\x4b\x66\x96\x45\xa2\x70\x07\x96\x39\xd5\x0e\xbb\x60\x0f\x60\xd3\xf2\x6f\x83\x64\x1d\xc4\x0e\xe1\x99\x3d\xe1\x32\x3c\x99\xa8\x74\x34\x8b\x48\x02\x49\x54\xaf\xcc\x21\xbc\xb0\x6b\x25\xfa\xff\xb5\x02\x90\x75\xac\x8f\xca\x76\x31\x4e\x34\xf1\xe2\xb5\x6e\x04\x7d\x6d\xed\x7f\x0a\x1c\x88\xdf\xb8\x0b\x8e\x27\x31\x11\x66\x4c\xde\x64\xe7\x0d\x89\xc0\x1c\xc8\xe7\x2c\xe1\xb1\x83\x80\x2f\xf0\x0e\x28\x08\x08\x83\xcf\x58\x73\xee\x9e\x85\x0a\xb5\xf9\xa5\xe0\x79\x99\xe2\x88\xd8\x5c\xe8\xaf\x3f\x9f\xea\xdf\x7a\x68\x71\x0a\x41\xa4\x3d\xf4\xeb\xa9\x45\x04\xc4\x42\x8d\xf5\x4f\xdf\x38\x59\xdb\x36\x01\x83\xa6\x12\x7d\x75\xf2\x84\xc5\x09\xb0\xe7\x13\x33\xa2\xaf\x2c\x67\xf5\xb5\x7f\x43\xd9\x3c\xe1\xfc\xd1\x06\xd8\x2e\x7d\x78\xe2\xc0\x65\x81\xbc\xbd\xdf\xc4\x6c\xbd\x07\x1f\x52\xe8\x08\x5e\x20\xe8\x38\x9b\xa0\xe3\xbf\x4b\xce\xd0\xf1\x02\xa7\x89\xfd\xd5\x3d\xb5\xf1\xbf\x58\xda\x9c\xb8\xd8\x07\xf9\x24\x0b\x63\x29\xfd\x2e\xe1\x13\x98\xd5\xa5\x9b\xa9\x89\xa0\x85\x81\x16\xb7\x4f\x71\x61\xd9\x89\x38\xb0\x0d\xc0\x48\x4c\xb9\x32\xaf\xd8\xf4\xd6\xe5\x59\x7d\xf6\x43\xfa\x6f\xe3\x17\x86\x45\x71\x49\x7c\xc6\x38\xec\xa3\xd7\x74\xa3\x9f\xd1\xd7\x96\x05\x7d\xa3\xef\x18\x1b\xae\x6c\x96\xa1\xae\x83\x85\xef\xe0\xe7\xa0\x03\xca\x90\x49\xcb\x5c\xf1\xe5\xaf\x27\xc7\xc7\xc7\xfe\x6b\x40\xe6\xf9\x7f\x11\x55\x92\x24\x53\xd3\x92\xbb\xc1\x16\x23\x76\xe9\x8a\x4b\x39\xe3\x75\x01\x5b\x9d\x09\xae\x78\xc4\x13\x74\x54\x18\x74\x63\x1e\x49\xf4\xef\x5a\xac\x0d\x96\x12\x7e\xd4\x7a\x5c\x03\xd4\xbd\xa9\x66\xf1\x4a\x87\xca\x1a\xc4\xab\xc7\x2a\x44\xaa\xf5\x8a\x2d\x96\x61\x32\x32\xd0\x82\xa6\x9c\x13\x8b\x66\x2b\x84\x7e\x99\x7c\x56\xf0\xa8\x01\x2c\xb8\x36\x94\xbd\xfe\xa6\x5c\x62\xb7\x05\x66\xb0\x21\xeb\x86\x05\xb0\x98\x9e\x96\x33\x98\x79\xf6\x42\xf7\x89\xbe\x5c\x58\x58\xee\x48\xe6\x69\x8a\xc5\xe2\xa4\x38\x6d\xcb\xc4\x59\xa0\xc9\x02\x8f\x49\xdc\x02\x80\x0b\x37\xb1\x47\xcb\x46\x31\x58\xf1\xd2\xdd\x68\xfe\xec\x46\x50\xaf\x39\x40\x65\x22\x2c\xe2\xb1\xa5\xeb\x22\xfb\xb4\x2c\xb1\xf8\x77\x96\x65\x15\x17\x11\x23\x0b\x63\x1c\x53\x06\xa6\xcc\xbe\xe1\x3e\x6e\x60\xdf\x7c\x0c\x95\xff\xc9\x6c\x03\xf7\xe8\xf0\xfa\xce\x7d\xd3\xfe\xd2\x85\x75\x28\x8b\xec\x38\x09\x31\x80\xd9\x0c\x09\xfc\x5c\x5c\xbf\x10\xdb\x61\xac\x33\xb9\xcf\xcd\x35\xff\x3e\xe3\x37\x34\xd1\xb7\x16\xd0\xf8\xf1\x88\x95\x7e\xee\x21\x92\xd0\x94\x32\x1f\x5b\x67\x98\x3b\x9f\x1a\xe9\xf9\x91\x2a\xbd\x65\x32\x7e\xd4\x1c\xcc\x61\x57\x06\x2a\x55\x9f\x2d\x1c\xe9\x78\xc7\x94\xb5\x40\xe4\x52\x8f\xab\xd0\xd1\x21\x6b\x9f\xc6\xe4\xc8\x0a\xa4\x34\x20\x3c\x38\xbf\xff\x1f\x7b\x6f\xda\xdd\xc6\x91\xa4\x8d\x7e\xef\x5f\x91\xd7\xef\x07\x49\xef\x80\xa0\xe5\x9e\x99\xe3\xd1\x1c\x9f\x73\x61\x8a\xb2\xd9\xa6\x28\x36\x17\xbb\xe7\x36\xfa\x40\x89\xaa\x04\x90\xc3\x42\x66\xb9\x16\xd2\xe8\xe5\xbf\xdf\x93\x11\x91\x4b\xad\xa8\x22\x48\xc9\xd3\xe3\x0f\x33\x6d\x11\x40\xee\x4b\x64\xc4\x13\xcf\x33\x57\xa6\x34\xbb\x97\x3c\x5c\x38\x28\x2f\x28\xee\xc8\x8a\xfe\x04\x27\x00\xd4\x51\xc1\xfc\x3a\xfb\xb7\xc5\x40\x39\x55\xe5\xf6\xd0\x64\x13\x82\x0f\x7f\x2e\x37\xdd\x65\x26\xec\x4d\x45\x89\x4b\x42\x95\x5b\xbb\xa1\x46\xac\xb8\x53\x32\x7f\x62\x11\x25\x1c\xd9\xf8\x4c\x41\x80\x7c\x9c\x60\x80\x34\x0d\xea\xc2\xeb\x05\xab\x41\x1d\xc1\x44\xa8\x97\xf8\xef\x57\x8c\xee\x86\x2f\x27\x74\x9f\x67\xb9\x63\x39\xc3\x39\x07\x1d\x6a\x11\xa3\x0f\x1d\x94\x17\xd6\x3c\x8b\xd1\x5b\x1e\xbe\x2a\x30\x83\xd7\xd8\x5f\x3b\x5d\xb2\x07\x99\x6f\xe6\xea\x46\x5b\x87\x23\x53\xda\x69\x57\x4c\xe0\x31\xda\xa8\x8f\xe7\x70\x08\x40\xab\xdb\x56\x80\x39\x84\x0f\xca\x35\x02\x14\xec\x42\xe9\x58\x1c\x46\xd2\x78\xe3\x63\x15\x36\x7e\x9d\x09\xcc\x07\x83\x9b\xa2\x2b\x9d\x56\xe4\xf9\x48\xdf\x7c\x7d\xe2\xe1\x1e\xa2\x72\x4c\xad\xfa\x61\x94\x82\x48\xc8\x7f\xea\x6e\x35\x28\xc5\xbe\x38\x83\x6c\xe0\xca\xd8\x3b\x45\x88\x43\x27\x21\x6a\x61\x64\x1c\x74\xf7\x63\xdf\x23\x18\x76\x07\x30\xe6\x6c\x9d\xe9\x32\x75\x29\xf3\x36\xdd\x0f\xa7\x81\x6c\x9a\x33\xb5\xd2\x6f\xe8\x4d\x75\x2e\xd5\x1d\xae\xf8\xe7\x9a\x23\x14\xfd\x10\x71\x85\xaa\xd6\x2a\xd1\x43\x1f\x8e\x98\x54\x51\x52\xc2\xc5\x97\x17\x3c\xba\x43\xe1\x92\x2e\xa7\xaf\xf9\xcd\x62\x7f\x32\x65\x87\xc5\x54\x26\x09\x55\xeb\x2f\x50\x20\x83\x03\x17\xd0\xbd\xe4\x8c\xb3\xdb\xab\xb3\xf6\xba\xef\x64\x33\x98\xd3\x7e\x7b\x56\x17\x08\xfc\xbf\x1f\xe4\x28\xdc\x65\x8d\xfa\x57\x54\x96\xba\x73\x2e\x75\x11\xcb\xe3\x22\x2d\xcc\x03\x22\xbe\x6a\x71\xed\x8f\x5e\xa7\xeb\xb4\x5c\x98\x81\x4a\xc6\x00\x04\x4c\x2b\xbe\xbb\xbc\x9d\x05\xbf\xeb\x5b\x2a\xdf\x5d\xde\xb2\xa0\x0e\x24\x75\x4e\x44\x54\x38\xa4\xf1\x94\x9d\x78\xad\x85\xba\x65\x1e\x8b\x7b\x19\x61\x8a\xeb\xc4\x58\x45\x73\x05\x14\xe6\xe6\xad\x73\x64\x79\x2f\xd9\x77\x97\xb7\xc4\x96\xe9\xf9\x6d\x50\x36\x02\x28\x2c\xc6\x5d\x3b\x35\xf2\x70\xa5\xd5\x11\x52\xfb\x64\xb1\x8f\x76\x4c\xe0\x71\x1d\xf1\xb4\x28\xc9\xc0\xb8\x7f\x3d\xb5\x73\x72\xe5\x23\x21\xa6\x59\x7a\xae\x8c\xad\x84\x59\x06\xa0\x70\x66\x3a\xdd\x9c\xda\xda\xa0\x1e\x02\x0e\x80\x41\x3b\xe8\xf0\x97\x2e\xc3\x8f\xab\x1d\xe3\xd9\x52\x16\x99\x79\x86\xe1\x8f\x27\xc8\x44\xb6\xb1\x2a\x56\x38\x6f\xde\x32\x22\x51\x3a\x98\x60\xa9\x8a\x7c\xae\x82\x0c\x16\x97\x15\x8c\xc9\x0b\x52\x31\xa0\xfc\x05\xec\x8d\xa5\x20\x8d\x12\x5d\xc6\xf6\x5a\xcd\x9c\xc8\xdd\x2e\x45\x23\x6a\xae\x80\x99\xc4\xdc\xad\xda\x98\xa1\xfe\xee\x7f\xc3\x3e\xaa\x7b\x19\x4b\x7e\x54\x88\x3c\xe1\x47\xc5\xbf\x7e\x9c\xd4\xfe\xc4\x5f\x7f\xf9\xe5\x47\xd4\xeb\xeb\xa2\x5d\x08\xd8\x95\x0e\x74\xf0\xb4\xc7\x29\x1c\x4f\xa1\x59\xa5\x07\xcc\xd3\xb9\xbc\x13\xec\x23\x4e\xf7\x47\x22\x29\x7e\xdc\xb4\xcd\x55\xdb\xbc\xb1\xc7\x4c\x1b\x50\xc6\xb7\xcf\x1b\xeb\x99\xb6\xd7\xeb\xe9\xbf\xad\x97\x66\xb6\xbe\x5a\x4f\x5f\x7f\x09\xff\x59\x9b\xa3\x7d\x9b\xd7\x65\xcf\xb4\x35\xbb\xe5\x20\x6a\xd9\x96\xee\x2c\x9a\xab\xfd\x87\x11\x1b\x77\x16\xc1\xaa\x6d\xdb\xf8\xbc\x10\x87\x66\xb7\x22\x77\xf5\x08\xf4\x75\x83\x14\xbc\x37\x22\x78\x20\xa3\xb6\x67\xc3\x06\xb8\x67\x37\xb5\x77\x08\xc0\x85\x0f\x47\xf0\xf1\xc0\xf7\x87\xf5\xa7\xf6\xdd\x3d\xdd\xe9\x6f\x66\x22\xc4\x08\x06\x99\x6b\xf3\xf5\x81\x8d\xac\x7c\xb5\xaf\x8d\x0f\x1c\x55\x03\x9b\x62\x35\x31\xbd\xd6\xc7\xec\x22\xbb\x1c\xd1\x65\x92\xbb\xbc\x3f\xd7\x12\x0b\xad\x74\xef\x6b\x5b\xef\x9a\xf6\x52\x28\x4a\xe8\xa2\x6e\x2d\x0b\x3f\x70\x45\x1c\x08\x85\x33\x4f\xea\xc5\x76\x30\x11\xba\xaf\xf8\x2d\xfd\xf8\x7d\x83\x16\xdd\x99\x97\xef\x21\x33\xdb\x91\x61\x6d\xb9\x32\xd6\x9a\xad\xb5\x23\xb0\x84\xaf\xfc\x47\x35\xe9\x36\x7d\x54\x83\xb0\xc6\x81\x7a\xfd\x54\x95\x2d\xe5\x01\x63\xab\x3c\xc1\xd8\x41\xb1\x01\xb7\xb2\xd7\xb9\xb5\xc7\x9c\x77\x2f\xa3\x26\x6e\xc2\xb3\x35\x3a\xbd\x72\x51\xe4\xaf\x5a\x66\xd8\xe7\xb1\x1d\x30\xc3\xd6\xec\x5a\x8c\xe3\xf9\xb0\xf6\x18\xb8\x54\xfa\x76\x9a\x6b\x65\x55\x14\xc3\xbd\xb4\x6c\xfd\x21\xe3\xbb\x4f\xae\x8b\x74\x86\x0a\x52\xc0\xc7\xda\xcd\x83\x75\x20\x1d\xec\x05\xdf\x3a\x96\x17\x2a\xcd\xe6\xec\x62\xe3\x96\x02\xf4\x5c\xba\xdb\x30\x88\xeb\x75\x68\x13\x88\x91\xb6\xab\x05\x73\x35\xb3\x5f\xf1\xac\xd4\xb9\x44\x2f\x0b\xa6\x23\x96\x4b\xcc\x70\x01\x9f\x19\xf7\xa3\x4e\x9d\xeb\xe8\xc4\xd8\x84\xfc\x5a\x17\x6e\x73\x91\xf9\xdb\xc8\xb3\x96\x86\xfd\xe8\xa8\x79\x18\x6b\x71\xef\x89\x6e\xbb\x48\x45\xd9\xb1\x6c\xab\x78\xf8\x43\x85\x0a\x22\xc2\x6a\x44\x31\x40\x5e\x40\xb2\xf3\xcb\xd4\x93\x9b\xd7\x2a\x6b\xee\xd6\xe2\xa0\xd3\x58\xf2\xed\x22\xd3\xdd\x32\xcc\x03\x86\xc9\x16\x51\xf1\xd9\x6f\x50\x96\x71\xc7\x7e\x2e\x79\x82\x97\x9b\xa2\xe5\x68\x9b\x0d\xee\x8f\xaf\xfe\x9d\xcd\xe0\xf6\x61\xef\xe1\x5c\x04\xd0\x16\x94\x56\x68\x26\xb7\xa9\xc8\x72\xad\x78\xa7\x1e\xf9\xdd\xd7\xf9\x82\x34\x55\xcd\xd3\x58\x97\x4d\xfd\xd4\x11\x3d\x69\x29\x2d\xec\x14\x67\x77\xe5\x52\x64\x4a\xa0\xe6\x3a\x7c\x8f\xd9\xef\x0d\x6a\xae\xe6\x65\xb1\xf9\x6a\x11\x25\x72\xb0\xd0\x2b\x64\x8c\xce\xcc\xcf\x4e\xf0\x57\x7d\x1d\xa8\x94\x5f\x69\xba\x62\xf8\x19\xc3\xcf\xa6\xec\x5b\x1e\xdd\x09\x15\xb3\x34\x29\xd7\x92\x08\x62\xd0\xdc\x97\xd5\x87\x7d\xb5\x63\x68\x5b\x60\xf9\xe6\x1a\x9a\xab\x2d\xbf\x43\xf1\x15\x32\x22\xcd\xcb\xa1\x8b\x5e\xd0\xb9\x4a\x16\xb2\xb9\x76\xf7\xce\x96\xbb\x0f\x9b\xc5\xd4\xd7\x5e\x5e\x62\xbe\xdc\xc3\x46\x13\xca\xa8\xe2\xa9\x19\xb1\x71\xdd\x6a\x6d\xf0\x78\x59\xae\x15\xa7\xbe\x4f\x8d\xc1\xdd\x0b\x21\x3c\x10\x10\x2a\x15\xe3\x40\x05\xf6\x22\x67\x65\x6a\xed\x33\x88\x2d\x25\x80\xf4\xc1\x29\x30\x1f\xa4\x32\xba\x43\x6c\x29\x64\x4f\x30\xd7\xbd\x86\x48\x33\x13\x1e\xe4\xd8\x76\x34\xac\x90\x08\xe7\x30\xdc\x4a\x43\x7f\x68\xcf\x3a\x1d\x98\x19\x52\x6c\x84\x5a\x3c\x42\x06\x67\xf8\xa4\x55\xb2\x40\xc8\x0c\x76\x31\x3a\x37\x84\xa5\x92\x44\x7b\xed\xdf\xd8\x4e\xe3\x41\xae\x6a\x66\xb4\xcc\x59\xce\x0b\x99\x9b\xb3\xac\x75\xc4\x3d\xfd\xd0\x21\xa3\xce\xc7\x71\x1e\xb5\xf0\x1d\xd5\xc6\xc2\x65\x9a\x4d\xd9\x3b\x88\x6c\x04\x2f\x03\xed\xd8\x83\xba\x0e\xac\x62\x23\x3a\x69\x74\x9f\x02\xa2\x69\x7b\x10\x7c\xbf\x37\x60\xe5\xb2\x0a\xa7\x6c\xe6\x23\xca\xc8\x9f\x84\xb1\xe2\x3d\x3d\x12\x49\x2e\x1e\xb3\xf8\x06\x05\x5f\x00\x75\x05\x0b\x88\x81\x25\x95\x9b\xbf\x7b\x3e\x75\xd7\xcc\x07\x48\xdc\xe7\x77\x42\xf5\x79\xd8\x87\xb7\x10\x43\x20\xbd\x2e\x01\x17\x5b\xd1\x18\x5e\x79\x4c\x03\x87\x6f\x3b\x4f\x59\x25\x57\xc7\x66\xc8\xcd\x33\x24\xba\xa3\x74\x41\x8c\xb0\x11\xe9\xd5\xc3\x46\xe7\xe1\x3e\xb3\xf3\x87\x2f\xd9\xac\x74\xea\x56\x90\x6e\xe9\x06\x18\x71\x96\x4a\x87\x9c\x58\xd0\x6a\xb7\x49\xd1\xad\xe3\xe6\x9b\xd9\x23\x14\x86\x01\x90\x09\xb6\xa8\x96\xdd\xac\xd2\xf2\xa9\xd4\x53\xf6\x13\x51\x37\x47\xb8\xd1\xa0\x1f\xbe\xce\x3f\x40\x7d\x4f\x41\x07\x83\x7e\xbe\xa7\x4f\xc5\x7a\x64\x10\xda\x81\x8c\xad\xff\x51\x43\x9a\x06\x5d\x94\xa9\x8e\x99\x5f\xef\x5d\xb9\x2e\x4a\x69\x04\x99\xfe\x0a\xbb\x15\x34\x6e\x70\xdf\xf6\x6d\xb5\xf7\x01\x52\x8d\x2d\x4b\x99\xc4\xc8\xe7\x17\x58\xa8\xda\x9a\x40\x20\x24\x04\xf6\x88\xcc\xdd\x05\xd7\xb2\xe8\x7f\xf8\x3a\xbf\xd4\xf1\x21\x0b\x6b\x3c\x67\x6b\x73\x5d\x0f\x48\x64\xc9\x43\x34\xd1\x76\xff\x48\xa4\xba\x3b\x05\x21\x5e\xe4\x55\xe5\xdc\x9e\x06\x03\xe6\x6c\x59\xae\xae\x41\xa6\xb3\x8b\x16\x29\x50\xb0\xb3\x79\xce\x66\x9e\x4d\x35\x2e\xeb\xae\x6b\x52\x08\xc2\xe4\xed\x11\xce\xfe\x70\xfd\xe1\xe2\x68\xcb\xb3\x7c\xc3\x81\x76\xc2\x96\x35\xb1\xca\xe7\xf8\x5e\xb7\xd0\x0a\xa9\xe6\xea\x88\xad\xf5\x04\x81\x3c\x6f\xd8\xa6\x28\xd2\xfc\xcd\xf1\xf1\x5a\x16\x9b\x72\x39\x8d\xf4\xf6\xd8\x0f\xcd\x31\x4f\xe5\xf1\x32\xd1\xcb\xe3\x4c\x40\x2a\xc7\xd1\xeb\xe9\x57\xaf\x61\x66\x8e\xef\x5f\x1f\x03\x7c\x63\xba\xd6\xff\xe7\xfc\xab\xff\xf8\xfd\xbf\x9b\x82\xd3\x5d\xb1\xd1\xea\x0d\xa1\x84\x7a\xcb\x3e\xc2\x67\xc2\x31\xfe\xa4\x56\xcb\x7f\x4c\xbf\x0c\x9b\x41\x5f\xdd\xea\x58\x24\xf9\xf1\xfd\xeb\x85\x9d\x98\x69\xda\xa1\x2d\xf1\x5b\xf2\xc3\x27\x48\x7e\xb8\x93\xc5\x6f\xc9\x0f\x9f\x35\xf9\x61\xb8\xc9\xe5\xce\x18\x60\x93\xf6\xe7\xa3\xf9\xbb\x3b\x23\x6d\x2c\x60\xdf\x39\xd4\x72\x39\x84\xa9\x69\x07\x5c\x11\x23\xa5\xde\x6a\xdd\x75\x6f\x99\x0e\x9f\xdf\x58\x45\x97\xce\xd7\xc5\x28\x26\x0e\x80\x1a\xca\x08\xd4\x02\xd0\x47\x99\x72\xd9\x96\xd2\x10\x28\xdc\x1c\x30\x84\xa8\xb8\xd1\x4e\x3b\x36\x44\x98\x8a\xb4\x99\x44\xbc\x78\x12\x89\xaa\xd6\x3a\x10\x95\x39\xba\xfc\x86\xdd\x3d\xc0\x34\x26\x9c\xf2\x41\x23\xfa\x8c\x62\x22\x4f\xad\x22\x42\xdd\x7d\xa4\x82\x48\x82\xbf\xb6\xa8\x6a\xfd\x60\x95\x43\x9e\x42\x6f\xc3\x23\xc6\x87\x69\x6d\xe0\x22\x85\xb6\xd8\x76\x75\x34\x63\xc3\xf3\xc7\xc1\xf3\x67\x48\xd6\xeb\xa2\xb1\x88\x6d\x96\xb9\xad\xd0\xde\xc6\x96\xff\xc8\x5c\xee\x96\x66\x31\x2d\xb3\x54\xe7\x22\x9f\xb2\x77\x3a\x43\x62\x2d\x62\xbd\xf1\x29\x07\x57\xef\x4e\xd8\xeb\xaf\xff\xe3\xf7\x73\xf5\xb2\xc5\x18\x82\x4b\x54\x67\x6b\xca\x80\x00\x13\x68\xcb\xf3\x42\x64\xc7\xd9\x2a\x3a\xc6\xab\xe3\xd8\xfc\xfe\x88\x2a\x3d\xd2\xab\x23\x27\x26\x70\x44\xbc\xea\xd3\x6d\xfc\xaa\x0b\x1b\xd8\x6e\x70\x7f\xb6\x47\xcf\xac\xc3\x30\x6f\x9b\xdf\xfd\x07\x6b\x65\x0b\xa1\x21\x42\x56\x48\x0e\x16\x0b\x92\x21\xea\x95\x93\xbf\xc1\x4c\x5b\x54\xca\xd2\xab\x96\xff\xf8\x36\xd1\xcb\xfc\x95\xa3\x60\xe5\xb9\xad\xc3\x73\x22\xb6\x9d\xdb\x8d\x3d\x77\xc8\xeb\x9b\x86\xe2\x39\xdd\x6a\xf6\x4c\x0c\xa7\x6d\xcc\xc0\xb7\x1f\x1a\xde\x16\x44\x46\x28\x9e\xe9\x52\x59\x7d\x09\xad\x84\x5e\x01\xd0\x08\x9e\x49\x16\x27\x09\x91\x05\x40\xdf\x39\xf6\xa7\x4c\xa4\x68\x7d\x40\x0c\xac\x7b\xb8\x0f\xd4\x58\xd9\x37\xce\xcf\xa1\xb1\x72\xe8\xb8\xd3\xc1\xf8\x99\x06\xfc\xd0\x64\x06\xdc\x4a\x63\x30\x40\xe6\xfb\x7b\xe3\xfd\xee\x1c\xf0\x1a\xcf\x5e\xce\x20\xe5\x19\x58\xf0\xe2\xa8\xd0\x47\x40\x9b\x07\x64\x6c\xa8\x7a\xd4\x05\x02\x02\x9c\xc4\x98\xeb\xde\x7c\x7f\x40\x3b\xf1\xd5\xf6\x4b\xd0\x50\x32\x58\x73\x24\x11\x27\x50\xb8\x54\x4a\x64\x14\x01\xde\x6b\x19\x8c\x44\x51\x84\x53\xd9\x8f\x09\xf7\x6e\x8a\x50\x91\xc6\x65\x04\xf2\xe0\x10\x98\x32\x78\x9a\x6c\xf4\x56\x1b\x5b\x57\x97\x79\xf0\x21\x3e\x6d\xc1\x98\xe8\x34\xcc\xb7\x3c\x45\x7b\xf5\xf3\xf5\xc6\x6c\x2d\xf3\x11\xba\xa0\xc3\x2f\x8d\x12\xf9\x5a\x56\x65\x8d\xf6\xb4\xdf\xe9\xd1\xf4\xaf\x1b\xc0\xe8\x6c\x21\xe4\xb7\xe1\xf7\xc2\xaa\x4c\xc8\xbf\x9a\x47\xaf\x59\x52\xee\x19\xe9\x2c\x10\x84\x94\x21\x1b\x74\x08\xa0\xb4\xb7\x6e\x27\x5f\x4b\xb9\x1d\x39\x07\x2e\xcd\x69\xc8\x04\x70\x85\x89\x3f\x36\xe3\xe7\xa8\x35\xe5\xa7\x6b\x5f\x82\x5f\xad\x34\x2f\x13\xcb\x58\x3e\xae\xa9\xd7\xae\x00\x22\x27\x6f\xb6\xdb\x13\x3e\x42\x7e\x18\x8e\x31\x1e\x08\xd6\xb6\xe8\x82\x19\x8f\xdf\x8c\x20\xf1\x36\x66\xec\xa0\x12\x5c\x9c\x8d\x11\x0c\xf6\x42\xd7\x00\x8e\xf3\xbf\xf6\xb9\x33\xdb\x10\xe6\xc8\x91\xeb\xf3\x87\x4d\x2b\x1b\x9e\x05\xf7\xc3\x7b\x2f\x23\x0c\x00\xdc\x65\x09\x9f\x5f\x7c\xb8\x09\xb1\x45\x12\x7b\x7b\x14\x6d\x44\x74\x07\xde\x34\xbc\xf2\x70\x33\x50\x3a\x3c\x00\x9e\xbd\xf8\x68\xa1\x2d\x50\x66\xe7\xf4\x58\x9c\x26\x91\xce\x58\x2c\xf3\x34\xe1\x3b\x80\x24\x28\xcc\x14\xf4\x70\x06\x97\x62\x6b\x8e\x82\x7d\xc1\x84\xe1\x33\x6d\x66\x65\xe6\x7f\x37\x76\x2c\x3d\xf4\xdb\x0f\x66\xf3\x3c\x60\xb9\xd8\x72\x55\xc8\x68\xae\xb6\x82\xab\x10\x43\x4a\x90\x0c\x33\xc8\xb1\x16\xa4\x58\xb0\x5a\x89\xa8\xf0\x94\xc7\xf0\x08\x71\x23\xb5\x6f\x0f\x8e\xeb\xbb\xdb\x79\xbd\x5d\xff\xde\x0a\x24\xcb\x2d\x20\x94\x69\x0d\xd1\xd5\xf8\xc8\x50\x23\x88\xd5\xd2\x95\x6b\x1f\xb5\xf0\x2f\xbb\xa6\xd8\x52\x14\x0f\x02\x18\x7d\x88\x82\xa0\xcd\xc6\x3f\x58\xb0\xe8\x90\xf4\xbd\x99\x63\x00\x24\x82\xf7\x06\x85\x2f\x6d\xb0\x10\xfa\xe8\xa8\x07\x55\x8d\x43\xf0\x05\x91\x22\x80\x2b\xf0\x05\x39\x35\x5f\xc0\x35\x6d\x5e\xc1\xd9\xbd\x88\xe7\xaa\x4a\xec\x48\x36\xa3\xdf\x70\xcc\x4b\x71\x3e\xcd\x69\x63\xc7\x78\x50\xa0\xe7\x14\xc8\xac\x3c\x8d\xb5\x4b\xfb\xef\x91\x06\xc5\x4e\x3f\xe7\xab\xca\xaa\x12\x0f\x7d\x0c\x7b\xb5\x4e\x92\xda\x23\x65\xde\x0a\xfa\xc7\x2d\x4a\x47\x5b\x87\x9c\xb6\x0e\xae\x4d\x4e\xeb\x86\x1b\xbc\xad\x8c\xb9\xb2\x7c\x2e\xab\x32\x41\x9e\xf2\xae\xac\x19\x62\xb1\xb4\xb9\xa7\x9f\x2f\x07\xd9\x39\x5d\x59\xa0\x6e\xea\x40\x3a\x01\x74\x1e\xcf\x3a\xbb\xea\x85\xca\x4b\x30\x29\xac\xb0\x21\x44\x25\xd6\xa2\x80\xdb\x3c\x2e\x13\xa4\x27\x81\x70\x0a\x30\x62\xf2\x24\x61\xb2\xc8\xe7\xca\x11\x78\x62\x6a\x0c\x9c\xb0\x36\xde\x12\xd3\x93\x0b\xaa\x80\x62\xe1\x63\xae\xc0\x0e\x93\x91\x2c\x1a\x09\x07\xbb\x50\x0c\x2c\x4d\x05\xc7\x6c\x7a\x9c\xb6\xb9\x0a\xdf\x5c\xf5\x49\xa0\xd4\x73\xd0\x8f\x7f\x8a\x2c\xf0\x1e\xc7\xad\xa9\xe2\x51\x28\x1b\xec\x9d\x79\x70\x59\x9d\x6f\x6c\x2d\x31\xf8\x10\x2e\xd8\xbc\x6a\x8a\xdc\x06\x50\xfc\xbb\x15\xb2\x6a\xa2\x32\xe1\x19\xa6\x13\xad\xca\x84\xc9\x55\x20\x59\x0e\x73\x80\xf4\x8d\x66\xba\x22\x0d\x77\xb5\x0d\xa1\xe4\x7c\x2b\x02\xe6\x18\x72\xef\x24\x01\xe2\x07\x35\x29\x10\x4a\x62\xca\x7a\x35\x65\x6f\x3d\x41\x2d\xce\x30\xec\x89\x80\xf6\x59\xe6\x78\xfc\xb9\xf6\x06\xa4\x07\xa8\xe5\x2f\x57\xe6\x49\xf9\x22\xd8\x75\x1d\x33\x08\xf2\x31\xe3\xe0\x44\x56\x3c\xa8\x1f\xe3\xde\x4a\x7a\x62\x7e\x5a\x03\x19\xb9\x0d\xd1\xd1\x40\x7b\x2b\x8c\x6c\x64\x48\x99\xfd\x88\x86\x3a\x4a\xf2\x96\xc6\x6e\x7b\x14\xd2\x61\x1e\x47\x36\x35\xd0\x1b\x1c\xdf\xd0\x60\xe5\x84\xe0\xb1\x21\x23\xbb\xe6\xc5\x58\x24\x99\x4b\x1d\x1b\xdf\xd0\x56\xd4\xde\x90\x66\xc2\xe9\x31\xb2\x9d\x33\xf3\x9b\x47\x36\x34\x2f\x97\x47\x78\x40\x3b\x45\x22\x38\x2a\x04\x8f\x36\x55\x16\x07\xcb\xb5\xec\x7a\x00\x59\x7c\xb0\x1f\xc7\x13\x50\xcc\xfc\x9a\x03\xc9\x45\x66\x9a\x3f\x65\x1f\x94\x40\x9c\xa7\x5e\x05\x97\x0a\x35\x80\xb4\x19\x41\xee\xc6\x9d\x72\x4b\xd3\x30\x75\x67\xc9\xad\xcc\x96\x9b\x30\xee\x4b\x87\x53\x0f\x97\x0d\x9e\x22\x1d\xb6\x64\x9b\x38\xd4\x01\xe6\xe5\x30\x8a\x88\xf6\x37\x7f\x00\x97\x1e\x7f\x02\xb4\xf5\x63\xf8\xb4\xf4\xe6\x3d\xb8\x57\x9c\x4d\x76\xa8\xae\x1b\x86\xe0\xe7\x7d\xe3\x7b\xb9\xa9\x62\x66\x47\x48\x29\xde\x5e\xbc\x3d\x7d\x77\x76\x51\xd5\x3f\xfc\xe3\xed\xe9\x6d\xf5\x2f\x57\xb7\x17\x17\x67\x17\xdf\x85\x7f\xba\xbe\x3d\x39\x39\x3d\x7d\x5b\xfd\xde\xbb\xd9\xd9\x79\xed\x7b\xe6\x4f\xd5\x2f\xcd\xbe\xfd\x70\x55\x53\x5c\xb4\x72\x89\xc1\x9f\x6e\xce\xde\x9f\xbe\x5d\x7c\xb8\xad\x88\x36\xbe\xfd\xaf\x8b\xd9\xfb\xb3\x93\x45\x4b\x7b\xae\x4e\x4f\x3e\xfc\x78\x7a\xb5\x47\x73\xd1\xf7\xb7\x75\x48\x9f\x02\x5b\xf8\x68\x05\xce\x19\x5b\x65\x52\xa8\x38\xd9\x61\xa6\x88\x7d\xd9\xd6\xa0\xdf\xe1\xdd\x2b\xb7\x42\x97\x87\x24\x7c\xdc\x6c\x04\xd3\xf7\x22\x03\x1e\x2e\x2c\x8d\x48\x3b\x7c\xce\x7f\xbd\xd6\x4c\x14\x59\x33\x2a\xd0\x9b\xd7\x56\x64\x3b\x97\x39\xd9\xd7\x1c\xcf\xe1\x48\x95\xb0\x54\x64\x7d\x6d\x01\xcb\x28\x2b\xd3\x42\x2e\xbb\x53\x78\x46\xa7\xbe\x0f\x7d\x7b\x23\xe3\x70\x3b\x3d\xdb\x45\xfb\xc1\x58\xc9\x64\x39\x04\x26\x0f\x25\x3c\x56\x58\xd6\xfd\xda\x42\x8b\xd3\x72\x99\xc8\x88\xc9\xb8\xee\x4f\x21\x46\x0a\x70\x19\xd7\x89\xc9\x53\x91\x81\xa9\x6a\x5e\x00\x69\x26\x8e\x78\x59\x6c\x90\x44\x93\x12\x67\x48\x46\x66\xae\x72\x11\x65\x02\x63\x01\x22\x07\x27\x2d\x2a\x8a\x06\x35\x41\x63\x88\x43\x26\x06\xba\xba\x69\x20\x12\xd3\x11\x23\xc0\x5f\x62\xe9\x23\x9c\xa4\xf8\xfd\xde\xa1\xa1\x16\x4b\xd4\x2c\x0d\x60\x61\x70\xc3\xe3\x87\x56\x97\xd4\xf4\xdb\x9c\xd4\x4e\x97\x13\x27\xd9\x66\x1a\xb5\x77\x63\xdf\x1a\x0b\x17\x4a\x35\xf5\x86\x4a\xa7\x8f\x4e\x32\x01\x97\x08\x41\x1a\xac\xff\x02\x70\x4d\x94\x99\x04\x09\x49\xe6\xa9\xb6\x14\x1b\x9e\xac\xd0\xe2\x30\x53\xd3\xce\xeb\x81\xe5\xdf\xe8\x3b\xa1\xae\x70\xc2\x3e\xcb\x71\xa8\xf0\xe5\xe3\x59\x85\x9c\x47\xc8\xbb\x30\x4d\x1b\xed\xaa\xb2\x99\x99\x60\x4c\x15\xf8\x4e\x08\x3e\xc6\x04\x24\xaf\x19\x60\x93\x3a\x57\x2b\xf9\x8b\x29\x70\xae\x44\x2b\x6b\x3a\x80\xc9\x2c\xbf\xa3\x3b\x97\x01\x38\x87\x24\x79\x77\x42\x81\xa2\x29\xd0\xf3\xed\x5f\xb3\xe3\xfc\xe7\xcd\xb9\xe8\x71\xe8\x83\xcf\x4f\x56\x84\x5e\xc3\x28\x8f\x1d\xa7\x02\x33\xc2\x1c\x0b\x06\xac\x9b\x93\xf3\xb3\xd3\x8b\x9b\xc5\xc9\xd5\xe9\xdb\xd3\x8b\x9b\xb3\xd9\xf9\xf5\xd0\xed\xf7\x14\x59\x7c\xb5\xdd\x57\x4f\x66\x73\x27\xc4\x31\xed\x3c\x9f\x4c\xee\x3a\xe5\xb7\x1d\x4c\xc9\xfe\xd6\xcb\x38\x5d\xc4\x32\x8f\xcc\xf5\xb7\x5b\x08\x15\x83\xdc\xc4\xa3\x96\x6a\x7b\x51\xf5\x5e\xb8\x6f\x30\xf7\x0d\x7b\x82\xe0\x6d\x77\x6f\x57\xb4\xfb\x1c\x20\x99\xe0\x86\xcc\x84\xd9\xfc\x71\x85\xe5\x63\xba\x5f\x63\xcc\x14\x77\x58\xdf\xaa\x45\xd4\xfb\x84\xed\x95\x79\x5e\x02\x99\x88\xfd\x1a\xe0\x51\x3b\x46\x85\x38\x80\x43\xcd\x0b\x19\xe8\xb5\x33\x99\xcf\xd5\x96\xab\x98\x17\x3a\xdb\x75\x74\x71\xd8\xe1\x19\x6e\x9b\xea\x11\x1a\x5e\xd9\x4a\x88\xd8\xce\x02\x7e\x95\xab\xfa\x52\x42\x65\x8c\x9b\x0f\x3f\x9c\x5e\x5c\x2f\x4e\x2f\x7e\x5c\x5c\x5e\x9d\xbe\x3b\xfb\x93\x83\xc9\xa6\x3c\x6f\xd3\x67\x4e\x33\x61\x4e\x17\x4b\x34\xd6\x7a\xbe\xa0\x68\xb2\x2d\x87\x84\x32\xe5\x6a\xae\xec\xc9\x92\xf9\xe2\x37\x99\x2e\xd7\x9b\xf6\x82\xea\xad\xbc\x9c\xdd\x7c\xff\xa8\x66\x02\x0d\x24\x2a\xab\xe2\x6e\x6b\xc2\x85\xe5\x8a\xce\x3d\xc4\x18\xd7\x9a\x07\x64\xa6\xf0\xd5\xb6\x28\x43\xc7\x89\xf6\xa8\xd7\x4b\xf3\xd0\xea\x35\xfe\x5b\xbe\xde\xb5\x80\x6e\x82\x73\xb3\x72\x8d\x00\x7c\x1d\x05\xba\x1b\xa5\xbd\x69\xf9\x5b\xe5\x06\xfb\xea\x28\x11\xeb\xb5\x88\x71\x79\xd5\x0b\x26\x1f\x1c\x1d\x81\x91\xbf\xd7\xdb\x46\x91\x24\x74\x0f\xb8\x98\x1d\xde\x6b\xf8\x01\x7e\xe9\x7e\xd2\x7e\x56\x9c\x10\x95\x13\xc4\x37\x0b\xae\x3a\x02\xc9\xfb\xf3\xc1\xda\x8b\xff\x90\x31\x97\xaa\x47\x0e\x13\x1b\x32\xf0\xfb\xa0\x0b\xf0\x72\x38\xbe\xd5\xb5\xe3\x4a\xa4\x09\x8f\x84\x4b\x70\x41\x0e\x5e\x78\xd7\x3f\x26\x80\x47\x42\xc5\x8a\xfc\x2d\x81\x80\xb1\xd7\x66\x6b\x5b\x02\xe0\xb9\xbd\xb2\xe7\xf1\xf3\xbb\x56\x7a\x1f\x6e\xc4\xbc\x09\x8e\x66\x54\x8a\xa4\xbc\x08\xf4\x45\x81\xfc\x6a\x27\x66\x7d\xd4\x72\xa8\xd5\xfc\x23\x4d\x3c\xbe\x99\xab\x8e\x6e\x6e\xb9\x6d\xdd\xf2\x70\xa6\x63\x9f\xbf\xb0\x28\xb2\x5e\x3a\xec\xa7\x08\x47\x5c\x66\x7a\x2b\x73\x31\x2b\x8a\x4c\x2e\xcb\x50\x0f\x78\x24\x60\xae\xf2\x38\xf1\x1d\x4e\x33\x1d\x97\x91\x25\xb0\x82\xde\x7a\xd8\x0f\x79\xf9\xac\xd5\x11\xb3\x23\xb3\xfa\xe8\xe5\x26\xe2\x23\xc8\xf6\x40\x86\xb5\xb6\x18\x9b\x3d\x18\x3b\x7c\x7f\x97\xf6\x2a\x7f\xe2\x9c\xd1\xee\xc1\xb4\x6b\x60\x58\x1a\x38\xb3\x5f\x07\x0b\xb8\x03\x35\x45\xcb\x65\xc9\x31\x80\x5e\xb5\x51\xba\xf8\x6a\xdc\x55\x33\x0e\xdc\x35\x0c\x1b\x53\x4d\xa7\x42\xbb\x61\xc3\x73\x34\xe7\x8b\x68\x53\x6d\x38\xf4\xa6\xca\xdb\x5b\x6f\xae\x33\x8f\x0f\x73\x9b\x0c\x0a\xa3\x4d\xd0\xd1\x20\xc9\xb1\x5d\xd1\x60\x75\x82\xd2\x9d\xfe\x7b\x4c\xb9\x58\xfc\x5c\x8a\x31\xba\xca\x36\x55\xe3\x8f\xf0\xb3\xbd\x80\x14\x89\xd8\x2d\xe7\x7b\x2d\xe4\xd6\x58\x40\x3c\x8b\x36\x6c\xc9\x73\x22\x04\x0c\xd9\x12\x50\x00\x9e\x49\xf3\x2b\x1e\x15\x24\x88\x6b\xab\xb5\xa2\xb8\x37\x16\x0a\x69\xcc\x5a\xef\xf5\x68\x5b\x6e\xfb\x06\x60\x8c\xf7\xda\x36\xe3\xec\xed\xa8\x18\x42\x68\x87\xbb\x77\x32\x5e\xb1\x70\x3b\x25\xbc\x54\xd1\x86\xa5\x09\x47\x42\x89\x0d\xcf\xf1\xa0\xb0\x08\x1d\xbe\x94\x89\x2c\x80\xa9\x0b\x03\xc7\xb5\x75\x6b\x1e\xcf\x3c\xbb\xb3\x82\x07\xdc\xd3\xb2\xf5\x1d\x25\x07\x22\xa1\x5d\xaf\x3e\x29\x16\xda\x1f\x84\xe1\xe1\x3e\x6c\xb3\x13\x0e\xda\x4f\x87\xb9\xde\x60\xb3\xfb\xbe\x8c\x8b\x0e\x51\x89\x97\xf5\x9f\xd7\xc6\x1b\x29\x20\x0f\xa2\xe9\xed\xcd\xcb\x7a\x16\xd0\xb9\xcf\x2a\xeb\xbf\x46\x9b\x1d\x6e\x31\x82\xc7\x03\x9f\x48\xba\x68\x84\x11\x53\x17\x36\x6a\xdd\xf7\xab\x44\xf3\xa2\x3f\xcb\x0d\x75\x8a\xba\xca\x8e\x75\xb9\xec\x52\xc6\xc0\x56\x3d\x3e\x87\xce\x1e\xff\x4f\xe5\x73\x0f\xef\x51\x5e\x08\x73\xfa\x3e\x6e\x40\xcd\xaf\x8f\xe0\xe7\xed\x85\x53\x16\xf3\x68\x46\x0a\xb7\x0c\xbc\x5a\x9e\xb3\xfd\x01\x92\xda\xb2\x9d\xea\x46\xde\x41\x39\x8f\x87\xcd\x97\x54\x7b\x96\xd2\x7e\x01\xae\xdf\x7f\x35\x24\x1b\xf1\x8f\x25\x37\x17\xc0\x87\xd5\x35\x12\x84\x1d\xd2\xe9\x42\x36\xb7\x55\xfb\x31\x50\xaf\xf5\xa6\x1a\xa5\x0d\x17\xfe\x60\xb6\x83\xb6\xde\x5c\x9b\x5f\x0f\x3f\x76\xcf\x2a\xde\xd8\x34\x93\x1a\x88\xb2\xf4\xaa\x62\x6b\xb4\x9c\xc4\xad\xf5\x1e\x30\x92\x3f\x97\xa2\x14\x66\x01\x2d\xcb\x78\xdd\x0c\x96\x8c\x78\x70\xf9\x2e\x6d\xf4\x03\xdb\x96\xd1\x86\xd9\xc2\x59\x2c\x12\xbe\xab\x9a\x51\xe6\xad\x51\x68\x20\x31\x1e\xc5\x17\x18\x50\xcf\x47\x65\x5e\xe8\x2d\xe0\xd4\x7d\xb9\x59\xa9\x60\x97\x33\x6e\x77\x57\xdb\x85\x56\xa1\xd4\x7c\x64\x84\xfc\xfa\xf2\xf4\xe4\xec\xdd\x59\x2d\x3c\x3d\xbb\xfe\x21\xfc\xf7\x4f\x1f\xae\x7e\x78\x77\xfe\xe1\xa7\xf0\x6f\xe7\xb3\xdb\x8b\x93\xef\x17\x97\xe7\xb3\x8b\x4a\x10\x7b\x76\x33\xbb\x3e\xbd\xd9\x13\xa7\x6e\xd6\xda\x3d\x11\x3c\x60\xfc\xb4\xc8\x79\x2b\x67\x63\xdd\x55\x54\xeb\x1b\x36\xb3\xfc\xa7\x15\x86\x5e\x8b\x35\x00\x70\x52\x82\x18\x4b\x84\x24\xbc\xe5\x05\x3f\xe1\x05\x4f\xf4\x7a\xca\x66\x8c\xf2\x0a\x30\x5f\x24\x37\x26\x21\x91\x43\x9a\xd9\xc1\x22\x8c\x5d\x18\x79\x57\x90\xd7\xeb\xd6\x2b\xa2\x65\x4d\x44\xa8\xec\x64\x93\x3c\xe7\xea\xf4\x5e\xa8\xa2\x04\x43\x9b\x27\x09\xa3\x6a\xed\x17\x02\x56\x10\xdb\xca\x5c\x6e\x65\xc2\x33\x2f\xad\xfc\x81\xca\x82\xc7\xae\x6d\xab\x63\xa5\x6b\x52\x4e\x58\x7f\xc0\xed\x19\x83\x76\x9f\x9c\x9f\x81\xa1\x1b\x15\x56\x37\xd0\x56\x3e\x57\x48\xfb\x49\x35\x6e\x39\xe4\x30\x15\x9a\x1c\xf4\x58\x3d\x7d\xb9\x7b\x21\x1e\x64\x58\xd9\x50\xd6\x73\x39\x26\x5c\x23\xed\x7f\x9c\xaa\x22\xdb\x0d\xb6\x5e\x6f\x80\xd1\x21\x87\x77\x1d\x41\x22\xab\x72\xcb\xe8\x3f\x65\xb6\xf4\x0b\x30\x69\x2d\x5e\x97\xc2\x7b\x2e\x8a\x87\xf0\xa8\x8e\x27\x51\x62\x6e\xde\x5f\xeb\x38\x84\x2c\x60\x30\x0a\x4b\x5d\xaa\x38\x27\xf0\xe6\x56\xaa\xe3\x2d\xff\xe5\x95\xed\x29\x92\xd8\x38\xd1\x33\x60\x4c\x14\x89\x79\x0f\xee\xcc\x21\xd7\x3f\x5c\x73\xd5\x33\x5e\xfb\xdf\x04\xf6\x64\x05\x97\x81\xf7\xef\x20\x0c\xf5\x5e\xec\xda\xe6\xaf\x21\x5c\xc9\x42\xf5\x05\x28\x24\xcd\x84\xf9\xa2\xc3\xb8\x26\x08\x5d\x76\xff\x86\x5c\x96\x8a\xb8\x76\xfb\xd9\x1d\xc2\x46\x0e\xda\x36\xad\x80\x95\xe1\x86\xcf\x60\xe5\x51\xaa\xc9\xcc\x19\xc2\x57\x6c\xe4\x84\x72\x77\x28\x2e\x6f\x26\xeb\xbf\xf5\x92\xad\x20\x91\x8d\xfc\x04\x99\x80\x48\x19\x4c\x85\x95\xca\x01\x5e\xbd\x06\x26\xc6\x2e\x81\x44\xe4\x10\x3f\x52\xe6\x51\x2d\x7e\x2e\x09\x02\xf0\xfa\xcb\x71\xf7\x6c\x81\x7a\x0b\x48\xb0\x5d\x57\x22\x70\x77\x39\xb4\xab\x54\xb2\x8d\x6c\xf3\xaa\x54\xe6\x2a\x7e\x0a\xf4\xd4\xf0\xf0\x78\xad\x52\xfa\xe7\xde\x5c\x33\x1b\xd9\xc9\xf0\xfb\xcf\xc6\x9d\xfc\x63\x8d\x32\x99\xaa\x83\xcc\x06\x2a\x3d\xbc\xd0\x96\x3c\xba\x7b\xe0\x59\x8c\xee\x7f\x80\x33\x4d\xd9\xf7\xfa\x41\xdc\x8b\x6c\xc2\x22\x91\x15\x9c\xf8\x0a\x73\xc0\x73\xc0\x86\xa2\x72\xe6\x0a\x12\x7d\x90\xfc\x51\xe5\x65\x26\x58\x21\xd7\x9b\x42\x64\x21\x1a\x47\x67\xe6\x38\x2a\x90\xaa\x36\x15\x11\x11\xb2\x75\x0c\xc0\x2a\xe1\xf7\x4d\x02\xc6\xc7\x30\xc9\xb0\x33\x97\xad\x6c\xc3\xdd\x56\x7e\xac\x0f\x3f\x45\x03\x46\x87\x26\x52\x68\x4d\xd8\x5a\x27\x5c\xad\xa7\xd3\x29\x48\x6d\xbc\x1a\xb5\xd0\xa9\xc0\x30\x80\xee\x50\xfa\x89\xd6\xb9\x48\x76\x8e\x44\xcc\xe5\x51\x01\x70\xf7\x97\x42\xa8\x5c\xa2\x63\xab\x65\xf9\x5f\xd7\x83\x4b\x9f\x36\x16\xd7\xfe\x3c\x1f\x9d\xa5\xdb\x51\x0e\xa8\x99\x8e\x28\x09\xbf\xdf\xfe\xf2\x7a\x54\xd6\x79\x7b\x59\x4a\xab\xb1\xa9\xd4\x3f\x6a\xd9\x01\x05\x79\x14\xd9\x68\x6b\x49\x44\x84\xf4\xa8\xf4\xd3\xf6\x31\x6b\x64\x04\x1f\x90\x0c\xdc\x93\xd7\x3b\x32\xa5\x77\x88\x23\xe0\xba\x3e\xdd\xa3\xb7\xc5\x7e\x81\xb5\xd6\x0e\x8d\x4c\x99\xf6\xdc\x06\x63\x4c\x27\xcc\xba\x4c\x76\xf0\xe2\x72\x09\xd4\x10\x1e\x88\x83\xa8\x52\x25\x68\x06\xa9\x7c\x3e\xea\xe6\x08\xea\x82\x20\x5b\x5e\xe8\x8c\xaf\x05\xdb\x8a\x58\x96\xdb\xd6\xc3\xc6\x35\xf7\x10\xf8\xa8\x4e\xca\x6d\x37\x55\xe8\xa1\x06\xb4\x6f\x24\xfe\xd7\x09\x54\x37\x9c\x43\xc7\x65\x46\x58\x9d\x4b\x6a\x2f\x86\x90\x68\xac\xcd\x4d\x99\xc9\x1c\x58\x76\x1f\x93\x39\xeb\x8a\xc1\xa2\x21\x00\xbf\x4b\xd1\xc9\x5e\x99\xdd\x23\x1b\x19\xa5\x9f\xe4\x38\xab\x10\xb5\xef\xbe\x14\xea\xa0\xd4\xf1\x6a\x77\x99\x2e\x1b\xdc\x53\x83\x80\x12\x60\x36\x06\xda\x17\x84\x9a\x83\x02\x09\xda\x53\x68\xb6\xb2\xb9\x98\x77\x22\xa0\x3e\x8c\x41\x15\xe3\x01\x29\x9f\x7e\xf8\x3a\xb7\x20\x20\xc2\x69\x79\x8b\xa5\xf0\x95\x60\x04\xe8\xfe\xb5\x85\xe7\x61\x0f\xb1\x08\x20\x28\x8c\xb9\x2a\x5a\x0b\xf0\xe8\x55\x28\x0b\x7f\xf2\x23\x2f\x93\xf6\xaf\x53\xf9\xf0\x55\x54\x4d\x9d\xfd\x74\xcd\x70\xa8\x49\x3f\x21\xeb\x6b\x68\x50\xc8\x7e\x80\x20\x0c\xd7\xe2\x11\x96\x60\x65\x1e\x70\xd0\xad\x80\x86\x19\x76\x51\x44\x1b\x6f\x79\x00\x41\xa3\x23\x96\x24\x49\x6c\xea\xe7\xd6\x2b\x42\x20\xf6\x3a\x04\xb1\xca\xb5\xd2\xa1\x98\x91\x56\x02\x42\x71\xe6\x00\xd2\x61\xb1\x4c\x16\xfb\x91\x82\x23\x59\x09\xf7\x2d\xb5\x42\x23\x02\x8c\xfa\x59\x89\x53\xc3\x93\x42\x22\x5d\x95\x85\x59\xe3\x9b\x88\x14\x96\xeb\x4a\x01\x55\x02\x90\xb9\xaa\x56\xd5\x18\x24\x0b\xe5\x93\x99\x40\x82\xef\xdc\x58\x6f\x85\xbc\x37\x1b\xb5\xb9\xac\xdd\x02\x85\x13\xa0\xb9\xf6\x28\x6c\xcb\x02\x96\xf0\x3b\xb1\xcb\x43\x39\x67\x5a\x51\xac\x6b\x41\x4a\xd3\x1f\x9a\xaf\xfd\x53\x01\x03\xb7\xc8\xbc\x28\xe3\xb0\xbb\x0c\x2b\x7d\x6f\x7e\xdc\x83\x11\x6e\x14\x6e\xd6\xa0\x4f\x76\xf5\x3e\x45\x3a\x26\xfc\x38\xd3\x1c\x7a\x18\x20\x80\x3c\x43\x18\x67\x98\xb9\x04\x0f\x5f\xf3\xbe\x9d\x2b\x12\x12\x08\x2e\x39\x73\xe0\x34\xa7\x8d\x32\xf0\x91\xbe\x7c\x57\x61\x0f\x02\x6a\x55\x4b\x33\x5b\xad\xd2\x46\x97\x41\x19\x0f\x96\x07\x54\x8d\x39\xca\xd6\x87\xd7\x5a\xe1\x23\xb1\xa5\x34\xb9\x9d\x78\xd2\x20\x11\x10\xbf\x49\xec\xa2\xa8\x0b\x8e\xaf\x9f\x48\x98\xe1\x9b\xa9\x56\x28\xa7\x05\x72\x5e\x9f\x9e\x5c\x9d\xde\x7c\x32\xbc\xa9\x05\x7b\x8e\x06\x9c\xda\x76\xbe\x3d\x7d\x37\xbb\x3d\xbf\x59\xbc\x3d\xbb\x7a\x0e\xc4\x29\x7d\xf4\x08\xc8\xe9\x35\xe9\x93\x9c\x68\x55\x88\x5f\x0e\xba\x93\xb3\x52\x2d\xf8\x88\xd4\x27\xa7\x50\xd4\x67\xee\x60\xa1\x4d\x7d\x15\x27\x7e\x42\xdc\xb6\x84\x3a\xb1\x72\x2a\x2b\xef\x34\x5c\xc9\x24\x81\x4c\x70\xe7\x5e\xa7\x2c\x43\x33\xa8\x70\xfe\x58\x3a\x5f\x3a\x53\xe7\x6a\x59\x91\xbf\x01\x97\xdf\xc6\x3c\x82\x31\x07\x3c\x35\x03\x90\x49\xc8\xb0\xed\x93\x60\x59\x4b\x25\x7c\x33\x60\xd6\x4c\xfb\x3a\x69\xea\x69\x12\x9f\x13\x59\x47\x86\xd7\x50\x5b\xd3\xae\xb8\xca\xfa\xb4\xe6\xa7\xfd\xd0\xf5\x10\x37\xb1\x54\x68\x98\x56\x76\xf3\x75\xfb\xd2\x3d\xf6\x5b\x00\xc6\xdd\xcc\x24\x87\x18\x44\x5e\xf0\xac\xf0\x13\x49\x13\x81\xd2\x6c\x3e\x38\x71\x27\x11\x81\xa6\x57\xb5\x71\x36\x47\xa1\x19\x6b\x09\x91\x0a\x4e\xe4\x36\x51\x52\xe6\x85\xc8\xc8\x6d\x32\xfb\xe9\x7a\xae\xbe\x35\xd7\xd7\x2b\xba\x85\x48\xbe\x0b\xab\x40\xa4\x8e\xae\xd4\x6f\x2d\x94\xf0\x04\x7b\x89\x3e\xea\xad\xe0\x2a\x67\xb0\x35\x92\x44\x64\x7e\x65\x60\x7b\x84\x88\x49\xc6\x1a\xa8\x9e\xfd\xef\x5f\x31\x02\xb7\x9a\xa1\x30\xed\xa5\x4f\x33\xb1\xd5\x45\x73\x3d\x75\x11\x0d\x00\xe2\xfc\x39\x57\x4e\x4b\xe2\xd3\xd0\x55\x44\x60\xfd\xd6\x45\x54\x4d\x43\x1a\xb4\x96\x6e\xb0\xb8\xdf\x96\xd2\x13\x2e\xa5\x01\xf7\x7a\x78\x4b\xb0\x8d\x36\x07\xa8\xd3\xb6\xf2\x61\x66\x47\x74\x92\x00\xca\xcd\x0c\x63\xeb\xad\x53\xd3\x77\x3d\x04\xfb\x01\x45\x1d\x86\xd0\x9e\xb5\x30\x2a\x79\x21\x41\x1b\xdb\xe9\x95\x8e\x7d\x1e\xe6\xc2\x99\xc5\xaa\x2a\x5d\x58\x0e\x12\x07\x0f\x25\xac\xab\xf9\x82\x23\xbf\xe9\x6d\x23\x11\xca\x58\x2b\x65\x71\xa0\xfc\xe2\x4d\x88\xa9\xad\x64\x65\x63\x2b\x42\x3e\x07\xcb\xe1\xe0\x38\x60\xc6\x2c\xbe\xc7\x0b\xfc\x56\xd7\x9c\xe3\x13\x7d\x14\xd8\xe1\xe2\xc3\xc5\x69\x08\x55\x38\xbb\xb8\x39\xfd\xee\xf4\xaa\x92\xcf\x7f\xfe\x61\x56\xc9\xc9\xbf\xbe\xb9\xaa\xa5\xe2\x7f\xfb\xe1\xc3\xf9\x69\x03\xf3\x70\x7a\x73\xf6\xbe\x52\xf8\xdb\xdb\xab\xd9\xcd\xd9\x87\xca\xf7\xbe\x3d\xbb\x98\x5d\xfd\x57\xf8\x97\xd3\xab\xab\x0f\x57\xb5\xfa\x6e\x4f\xfa\xd1\x13\x95\x6e\xb4\xbb\x7f\x7c\x70\x36\xa0\x56\x6d\xdd\xc6\x55\x01\xe4\x03\x76\xf1\x40\xe4\xd9\xbe\xe5\x68\xd3\xf5\xe3\x50\x8e\x03\x37\x86\x69\xea\xa8\x55\xf7\xf4\x8a\xcd\x95\xa1\x4b\xf9\x61\xc7\x9e\xb9\xd5\x16\x4f\x81\x04\xec\x35\x00\x5d\x2d\x35\xc7\x2d\x09\xa4\xe3\xd0\xa6\x10\xc1\x5a\xf3\x4e\xbd\x32\x15\x3f\x7b\x4b\x6d\x1d\xfb\xda\xe9\xa9\xbc\xf6\x30\x22\x3d\x15\x1b\x4a\x5f\xa3\x83\xca\x2c\xd9\x80\x8c\xad\xa1\x60\x3f\x0c\x61\xf7\xa6\x1b\x66\xe5\x04\xcb\xb1\x4b\x5a\xb7\x3d\x6d\xa9\x9f\x7d\x6f\x6c\xfb\xa9\x92\x66\xdb\x6b\x54\x2d\x23\xda\x0d\x94\x59\x63\xda\x7d\xc3\xf3\xbb\xb1\xed\xa6\x4a\x9a\xed\x06\xb3\xef\x51\xed\x06\x87\x77\xd1\x4e\xa3\x33\xe2\x10\x0b\x8b\xa9\x36\xcf\xe5\xf8\xbb\xaf\x04\x0a\xd6\xc3\xda\x68\x36\xc0\xf3\x3e\x2f\x53\x3e\x3c\x90\x01\xad\x71\xdb\x95\xd7\x58\xe5\xaf\xe1\x53\xe8\xe1\x32\x13\xfc\x2e\xd6\x0f\x34\x1f\x75\x64\x28\x1b\x74\x9a\x57\x07\xc8\x9c\xe1\xf6\x8a\x28\x32\x8a\x40\x21\x4a\xcd\x17\x0f\x30\x39\x49\xbc\xe8\x68\x83\x05\xd2\xcb\x75\x22\x22\xa0\x7e\x52\x7e\x76\xe6\x0a\xad\xf9\x36\xf9\x66\x33\xab\xa6\x45\x44\x1d\x02\x5d\x75\x36\x34\x06\xd7\xf3\x60\x62\x29\x0f\xa8\xcc\x00\x4c\xb7\xcc\xe0\xcd\x04\x03\x22\x15\x38\x93\x33\xf3\xe0\xc9\x44\x24\x73\x11\x28\xc6\xb5\xde\xd8\x3f\x1f\x26\x85\x52\xf0\xa2\xd5\xed\x3a\xd8\x1f\xce\xa3\xa2\xe4\x09\x83\x74\x25\x62\x60\x44\x5f\x25\xfe\x25\xe2\x0a\x53\x63\x0a\xb1\x4d\x21\xab\x3f\xcc\xe9\x98\xab\x9f\x00\x28\x81\x53\xf0\x22\x67\xdf\x01\xe4\xc1\x7e\x99\x2e\xe1\x2d\x2f\xe0\x2e\xfe\x23\xd6\xe1\x3e\x9b\xce\x55\x45\x81\x29\xf8\x55\x45\x8c\x69\x3a\x57\x56\xad\x23\xd6\x51\x3e\x85\x17\xdf\x54\x67\xeb\x63\x52\x33\x37\x8b\x5d\xdf\x2d\xb5\xbe\x3b\x16\xea\x18\x7c\x52\xc5\x31\x2f\x0b\x7d\x0c\x70\x29\x9c\xff\xfc\xd8\x8a\x1e\x5b\xd5\xe8\xfc\x78\x23\xef\x05\xfc\xbf\xe9\xa6\xd8\x26\xff\x27\x4f\x37\xbf\x1c\xad\x93\xec\xc8\xfc\xf6\x28\xfc\xed\x91\xfd\xed\x91\xfd\xed\x91\xf9\x19\xfe\xbf\x74\x87\xe1\x1d\xf1\x0b\x37\x77\xd9\x64\xae\xa4\xca\x45\x56\x80\xf5\xf3\x90\xc9\xc2\x4b\x5d\xed\xd8\x8b\xbf\xfd\x8d\x4d\x33\xfe\x80\x19\xb1\x6f\x79\xc1\x2f\xd1\xbf\xf8\x8f\x7f\xbc\x80\x80\x2a\x66\x31\xa5\x3c\xfb\xb9\x14\xc5\x5c\xe5\xc2\x6c\x42\xf6\x7f\xe7\x0a\x22\xb0\xdb\xdd\xa2\x40\xbf\x2b\xfa\x20\xe3\x9c\x7d\x83\x65\x9e\x21\x1b\x69\x9c\x9b\x92\x3a\xd2\x09\x24\x4f\x5a\x74\xf2\x3b\x5c\xf4\x3f\x27\x6f\xe9\xfb\x23\xb6\xf5\xcf\x49\x75\x57\x5b\xb1\xa5\xfc\xe7\x04\x2e\xd0\x44\x73\x0b\xd6\x62\x6e\xf1\xc2\x3b\x99\x1a\xd7\xb6\x47\x1a\xd0\x80\x67\x0d\xd3\xb7\xef\x95\x6b\x64\x44\xb7\x9e\xfb\xc6\x31\x02\xb1\x02\x1f\x87\x80\xe8\xb9\x34\x3b\xe4\x1a\x3d\xa1\x60\xb9\x61\xcf\xc1\x26\xa5\xd0\xb9\x2b\x0f\x1d\x17\xf9\xef\xdf\x1c\x1f\x4f\xd8\x3a\x87\xff\x59\xfe\x0c\xff\x03\xe8\xa1\xa7\x22\xf5\x6d\x0c\xa6\x03\xc2\x35\x67\x79\xff\x4c\x3c\x05\x8a\xee\x53\xf0\xc8\xd7\x96\xe9\xb7\xa5\x8a\x13\xe1\x53\x1b\x2b\x21\x91\x44\x9b\x99\xb4\x13\xd5\x54\x1e\x82\x39\x5e\x8a\x88\x9b\x83\xaf\x51\x37\x82\x4b\xf5\xaa\x10\x0a\xbd\x61\x99\x57\x7b\xe4\xe8\xb9\x02\xb3\x18\xa0\x90\xbc\x20\xc8\xb9\x80\x3f\x42\x25\x40\xcc\x3e\xa9\x7f\xc4\x76\xba\x24\x8e\x71\x60\xce\x8d\x45\x94\x80\x90\x83\x65\x0f\x62\x99\x28\xca\x4c\x31\xce\x52\xae\x62\x9e\xc3\x0a\x5c\x65\x10\xed\xcc\x18\x6f\x36\x74\x82\x70\x5c\x5d\x16\xc0\x89\x85\xc8\x82\x70\x24\x90\x04\x3e\x68\xf3\x24\x68\x04\xde\x09\xc0\x45\xdd\xf8\xe1\x74\xae\xac\x1e\x21\x61\xe1\xd0\x53\x16\xe9\x74\x47\x8c\x47\xf5\x41\x97\xd6\x73\x46\xc3\x3d\xf1\x78\x93\xfa\x77\x27\x4c\x56\x43\x6b\xc0\x37\x5f\x04\x12\xef\x56\x24\xff\xa5\x50\x91\x8e\x45\x96\xbf\x32\xdb\x50\xba\x77\x07\xda\x0f\x32\xf7\x93\x01\xa7\x94\xb9\xdc\xc8\x5b\x68\x8a\x77\x02\x53\x66\x74\x2a\x0c\xe5\x6d\x76\xce\xfe\xad\xf2\x6b\x47\xc1\xb4\xb5\x97\xfe\xf3\x93\x22\x62\x42\x5c\xa7\x7d\x73\x3e\xde\x05\x81\x5b\x36\x3c\x71\xb1\x50\xb4\x71\xc8\x38\xb1\x7a\xda\xb2\x00\x85\xcc\x4c\xe4\xc5\x5c\xd1\x0d\x3c\x61\x2b\xc1\x8d\x9d\x37\x61\x51\x7e\x8f\x87\x31\x5e\xf7\xc5\x83\xf6\x18\x1c\x2b\x6f\x03\x60\xd8\x4a\xe1\xde\x49\x8c\x5f\xe3\x94\x81\x8d\x00\x83\xae\x17\xba\x33\x55\x60\xb0\x5a\x0f\xc4\x47\x8c\x83\x55\x4b\xa9\x2b\xac\x85\x62\x3d\x30\x12\x3b\x0c\x14\xb3\x7a\x3b\xf0\x03\x73\xf0\x60\xef\x10\x06\x12\x1c\x8e\x60\x71\x13\x96\x16\xf7\x99\x8f\xe1\x86\x94\xf5\xe0\x9b\xe9\xda\x54\x3d\x03\x01\x0d\x78\x9c\xdf\xc2\xfc\x74\xaf\xc3\x2a\x17\x99\x95\x72\xc1\xbe\x22\xc1\xe4\x46\x66\xf1\x51\xca\xb3\x62\x67\x97\x6f\x22\x97\xa0\x00\x91\xc8\x3b\xc1\x66\x59\xa6\x1f\x9e\x7a\x14\x3a\x8f\x96\xae\x17\xf6\x21\x48\xf6\xb1\xaf\xfc\x56\x7a\xd9\xba\xbb\xe3\x71\x54\xb6\x5d\x8e\x8f\xd6\x7a\x32\x51\x64\xbb\x85\x59\x88\xdb\xb4\xf3\xa4\x18\x94\x34\x31\xdc\xc8\x1d\xc7\x92\x5b\x73\x61\x74\xb2\xe4\x56\x66\xf5\xd7\xc3\x92\xdb\x42\x80\xdb\x64\xc9\x3d\xbb\x38\xbb\x39\x9b\x9d\x9f\xfd\x7f\xb5\x12\x7f\x9a\x9d\xdd\x9c\x5d\x7c\xb7\x78\xf7\xe1\x6a\x71\x75\x7a\xfd\xe1\xf6\xea\xe4\xb4\x9f\xf6\xaa\xd9\x7a\x6f\x82\x1f\xb1\xb0\x9e\x37\xec\x26\x00\x6a\x60\xb2\x01\xd9\xdf\xa4\x8f\x0b\xab\xca\x6c\x66\xa9\xd6\x13\xd8\xa8\x6f\xd8\x69\x96\x9d\x6d\xf9\x5a\x5c\x96\x49\x02\x70\x2a\xcc\xec\x39\xc9\x04\x3c\x3c\x27\xec\x52\xc7\x67\xc1\xef\x20\x1d\xb1\xb5\x1b\x50\x3f\x8f\xe3\x4c\xe4\x39\x56\x3f\xa1\xfa\x03\xf0\x90\x4b\x75\x24\xf0\x1c\xbf\xe7\x32\x31\xef\xb7\x37\xec\x5b\x1e\xdd\xe9\xd5\x0a\xd3\x67\x26\x2e\x71\x8a\xfd\x5c\xea\x82\x33\xf1\x4b\x04\x54\x6f\xed\xeb\xe4\x5c\xaf\x3f\x03\x54\x79\x40\x78\xaa\xe3\x91\x02\x52\x77\x8b\xf6\xeb\xbc\xfd\x20\xa0\x5e\xbe\xc7\x9f\xbe\xc3\x5f\xb6\x3b\x28\x8b\xe4\x09\xd2\xe3\xcf\xf5\xba\x5d\x78\x08\xac\x6b\x52\x4b\xa2\x40\x42\x44\xec\x22\x7a\xcd\x72\xa9\xee\xe6\xea\xa7\x8d\x50\x4c\x97\x19\xfe\x09\x9e\xf9\xc6\xcc\x4c\xca\x7c\x23\x40\xa6\x7a\xc2\x1e\x04\xdb\xf2\x1d\x9a\xcd\xf0\x26\x70\x6a\x29\xb0\x64\xe0\x16\x31\xbf\x4e\xa4\x32\xa7\x45\x2a\x6d\x5e\x42\x7d\xea\x9f\xe2\xc5\x65\x89\x0e\xf9\xe1\x3c\xc4\x7d\xf7\x69\x05\x9f\x07\xae\x32\x8f\x9b\xb4\x00\x21\x3a\xb9\x41\x54\x56\xeb\xbb\x32\xf5\x94\xa8\x2f\x6c\x70\x12\x86\xfb\x5e\xcb\x98\xc5\x65\x9a\xc8\xc8\x9d\xbb\x0f\x3a\xeb\xe4\x7d\xc6\x04\x9a\xe1\xb7\x4e\x3d\x2d\xac\xaf\x63\x2d\xd9\x39\x01\x92\xae\x87\x01\xfa\x99\x39\xb0\x99\x54\x51\x52\x82\xcc\x5c\x99\x8b\xec\xc8\x49\x47\xbb\x5c\xbf\x5f\x3f\x49\xb6\x27\xe1\x3c\x3c\xad\x2d\x4c\x3a\x4f\xf4\x5a\x46\x3c\x09\xc1\xcd\x1e\x15\xe1\x58\x78\xed\xb6\x27\x31\x61\xc8\x83\xb0\x0d\xea\x24\xd2\x4a\x33\x01\x44\xd0\x0b\x38\xca\x17\x74\xdc\x1d\xd2\xee\x15\x33\x0f\x74\x6c\x57\xc8\x91\x6b\xc3\x0b\xf6\x86\xf3\x75\x5b\x25\x36\x30\x31\x51\xc2\x9f\xe9\x07\x25\x32\xb0\x60\x01\xf6\x61\x7a\xaa\x34\xd8\x26\x4e\x9d\xcd\xe1\x93\xad\x3a\xe1\xca\x01\xb1\x31\x73\x76\x2d\xef\x85\xfa\xf4\xa4\xe6\x41\x05\x11\x8f\x36\x62\x61\xed\xf2\xa7\x3e\xb2\xdc\x05\x30\xf2\xb0\xb2\x32\x29\xe1\x51\xea\xc2\x9b\xf0\x74\xc2\x16\x37\xcf\x2e\x0c\x24\xf6\x64\x64\x99\x46\x2c\x62\x11\xdd\x7d\xf2\xa3\xd9\x83\xac\x6c\x43\x18\x67\x6f\x45\x74\xc7\x6e\xaf\xce\x30\x1b\x58\x16\xcc\x1c\x05\xf9\xc6\xcb\x3e\x75\xbe\xdd\x0a\xbe\x7e\x06\x0a\xab\xa1\xba\x55\x5e\xaa\xc0\xa9\xf5\x99\x06\x11\x20\x0a\xf2\x25\xcd\x21\x49\xb9\x34\x00\x04\xe3\x85\x55\x33\x02\x47\x3c\xcb\xb7\x20\x5e\x54\x16\x81\xe2\x5f\xc2\x97\x22\xe9\x20\xee\x4c\x75\xbc\xb0\x71\x92\x43\xc1\x3c\x8d\xb2\xac\x1f\x83\xa2\x8e\x36\x8f\x81\x1b\x8b\xf5\x86\xbe\xc8\xee\xbe\xce\x03\x7a\x0d\x1d\xf2\x87\xc3\xbb\x9e\xe7\x90\xde\xbd\x92\x6b\x1b\x6d\x93\x2b\x92\x58\xc2\x84\x7e\x63\x07\xc3\x79\x69\x4a\xba\xd4\x31\xc1\xf4\x1c\x17\x9e\xb1\x82\x04\x79\x4f\x3c\xae\x22\x6c\x82\xc5\x01\x42\xbd\x66\x47\x08\x1e\x33\xbd\x22\x6f\x62\x9a\x26\x12\x98\xa1\x63\x24\xa1\x07\xf6\x8c\xbc\x8a\x8e\x0f\x4b\xb3\x8d\x0d\x48\x3e\x2e\x2d\x10\xaf\x4b\x8c\x17\x0e\x0c\xcc\x60\x58\x00\x1b\xdc\xe2\x9e\x77\x93\xa9\x3d\xbb\x62\x5a\x47\x7b\x5c\x34\xb9\x4a\x09\x5b\x21\xed\x23\x5f\x01\x5e\xeb\x36\x21\x3f\xe2\x49\x54\x52\x9c\x0c\xe4\xf2\xad\x0a\x7e\x3f\x82\xd0\x47\xfd\xcc\x44\x57\xbd\xfe\x75\x23\xf3\x50\x75\x45\x97\xa0\xf5\x58\x9f\x42\xbf\x7b\x71\x9d\xe8\x25\xac\x9c\x6e\x94\x60\xcf\x8d\x65\x8e\xeb\x4c\xc6\x63\xec\x1d\x3b\x26\x1f\xdc\x4f\xfb\x1a\xf8\xc1\xba\x7e\x5c\x4d\x76\xdd\x33\x12\x32\xa8\x31\x37\x8e\xa3\x40\x58\x91\xaa\x6a\xf5\x79\x52\x90\x8c\x07\x2c\x2b\x77\x3f\x75\xf8\x19\xaa\x7d\x39\x68\xa2\x9b\x4c\x31\x7b\xc6\xd2\x93\xcb\xf4\x4f\xf2\x01\x74\x1f\x78\x94\x39\xce\x8f\x6e\xcf\xa2\x8a\x45\xbc\x78\x44\x1f\x4e\xe9\xb7\xc3\xfa\xe2\x46\x1a\x9b\x07\x3e\x40\x75\x64\x4c\x85\x98\x67\xb1\xef\xc7\x04\xf6\x7b\xc4\x53\x70\xc3\x43\x58\xe3\xfe\xf5\xd4\xd6\x71\xe5\xb3\x8b\xcc\x79\x89\x39\xff\x88\xdf\xd6\x2d\x1a\x38\xfb\xd6\x91\x5b\xa4\x08\xef\x36\x2b\xc7\x2f\xd7\x4a\xde\xcd\xa0\xb5\x5b\x5f\x61\xf6\x00\x3f\x64\x71\x3d\xc7\xd9\x51\x16\xda\x47\x7b\xa0\x3f\x67\x40\x3b\x1c\x66\xf4\xc1\x01\x79\x16\x77\x20\x45\xac\xf9\x6d\x0f\xa1\x11\xf8\xe3\x51\x08\xe8\x34\x13\x36\x6e\xb8\x13\x85\xe3\x75\x48\xac\xae\x20\x84\xc5\x5c\xaf\xab\xc4\x36\x96\xbb\xc2\x91\x91\x41\x10\x8b\x4c\xfd\x48\x6f\x53\xad\x00\x96\x84\x59\x6a\x73\x45\x85\x5b\x75\x78\x17\x59\xab\xa4\x3a\x4e\xc8\xa1\x89\x89\x33\x22\xd7\xc9\x3d\x85\x50\x03\x11\x13\xd0\x95\x34\x0d\x3c\x31\x6f\x43\x9d\x21\xc1\x96\xbd\xd9\x21\x13\xa0\x26\x91\x9e\x89\xb5\xcc\x0b\x11\x66\x87\x86\xbf\x7f\x32\x35\xdb\x8a\xf3\xa4\x6f\xe8\x3b\xd5\x6c\xf7\xbd\x82\xcc\xf9\x34\xa2\x3d\xbb\x54\xc4\x67\xee\x77\xfd\x8b\xa1\x96\xc0\xef\x8f\xc3\xca\x7d\x87\x6b\x00\x5f\x7f\x39\x52\x7d\xe5\x4e\x7e\xc4\x4d\x12\x91\x30\x71\x0f\x68\x34\x53\xb4\x2e\x79\xc6\x55\x21\x44\x3e\x57\x14\x78\x46\xca\xba\x90\x95\xa5\x06\x84\x74\x6f\x9b\x48\xe7\x05\x32\x40\xc1\x4f\x56\x5c\x26\x65\xd6\xe9\x6e\xc0\x55\xf9\x28\xda\x89\xbe\x51\x3a\x81\x62\x59\xdb\xa4\xb9\x04\xe6\x60\x17\x39\xd6\x94\x7a\xd8\xb8\x9a\xdf\xdb\xd1\x05\x7b\xb9\x0c\x9f\x6f\xe7\x6b\xee\xc8\x69\xfe\x3a\x5f\xa4\x7a\xc4\x89\xf7\xc3\xd7\xf9\xa5\xee\xc8\x06\xcf\x7f\x6e\xf8\x44\x7b\xe0\x13\x3f\x77\x09\xb2\xf0\xfc\x0e\x22\x8f\xfb\x5c\x31\x83\xd8\x38\xf7\xc6\x27\x3b\xcf\x2e\x58\xb5\x1b\xae\xe2\xc4\x98\xbc\xbc\xa8\xf3\x5e\x3b\x9c\xb7\x79\x12\x15\xf6\x70\xec\x4e\xea\x83\x1c\x99\x45\xd4\x48\xb0\xdc\x37\x4e\xb5\xcc\xcc\x5e\x2c\x65\xad\x96\x6a\xbe\x64\x5b\x9e\x8e\xb7\x61\x48\x06\xd9\x6d\xd8\xcf\x6e\xbf\x9c\x86\x6d\xff\x44\xe6\x4b\x75\xaf\xad\xe4\xfa\x57\xe0\x48\x78\xdf\xbc\x12\x22\x3a\x73\xe8\xa2\x76\xd9\x0d\x07\x9e\x3a\x90\x48\x66\x4e\xed\x90\x71\x7c\xae\x48\x0e\x1e\xd1\x05\x10\x56\x46\xbe\xb5\x9c\xbd\x76\xd9\xc5\xaf\xff\xcd\xb2\x6d\xed\xd8\x0a\x16\x15\x50\xda\xe9\x28\x2a\x33\x08\xfd\x93\x7b\x92\x09\xbc\x84\xf3\x51\x44\x32\x60\x7a\x38\xc0\x16\xda\x89\x6d\x66\x92\xf3\x47\x57\x3a\x75\x03\x6e\x48\x14\xb6\x77\x97\x3e\xe9\x95\x65\x79\xc1\xf2\x42\xa4\xad\xc7\x6f\xc5\xba\xdc\xa5\x62\xa6\x94\x2e\xea\xf9\x29\xa3\xed\x4b\xee\x4a\x19\xb8\x75\x46\x5c\x46\xb3\xc0\x65\xf4\x87\xeb\x0f\x17\x2c\xe5\x3b\xc0\x3e\x16\x9a\xe1\x57\x81\x70\xb4\x7e\x50\xed\x9b\x81\x6a\xe7\xab\xa7\x0a\x8e\xa9\x05\x51\xb7\xc7\x27\xa8\xc6\xa6\xb1\x08\x6b\x86\x96\xa4\x39\xb3\x32\x9d\x1c\xa5\x09\x57\x01\xbc\x3d\x9f\xb2\x5a\xf5\x21\x9e\xc1\x45\x36\x09\x31\x06\x0d\x00\x7f\x05\xad\x85\xac\x6c\x05\x40\x03\xef\x8e\x5d\x50\x87\x41\x18\x3a\xcf\x88\x5e\x60\xe7\x7b\x54\x81\x41\x4d\x04\x64\xcf\xb0\xb0\x0c\x87\xec\xe1\x39\x80\x6e\x3b\x19\xc0\x79\x94\xf0\x3c\xef\x45\xe9\x3c\x0b\x95\x7c\x90\xb5\xb8\xff\xf8\xaa\xb6\x13\x61\x84\xc0\x6d\x82\xef\x52\xf7\x31\xb0\x25\xd8\xa3\xcb\x8b\xbe\x05\xf6\x7e\xa0\x06\x41\xd0\x07\xe2\x8b\x82\xdf\x23\x13\xe4\x9d\xd8\x59\x0f\x17\x1d\x55\x7c\x2b\x26\xce\xd9\xea\xbc\x89\x01\xe8\xaf\x59\xf0\x5c\x01\x2a\xf6\x5d\xd8\x3c\xf6\x4e\xeb\x09\xe2\x33\xa9\x72\x8e\xc5\xf2\x10\xe1\x34\x57\xef\xb4\x9e\x72\xf7\x88\xa5\xf6\xd3\x71\x53\xaf\x90\x50\x51\x80\x39\xac\x4d\xe7\xf0\xbd\xf9\xbd\x54\x28\x4f\x28\xb7\xe6\x01\x45\xe3\x04\x2b\x0a\x1a\x64\xd5\xf0\xf5\x43\xce\x62\xa4\x94\x29\x65\xbe\x81\xb0\x0b\xc6\x39\xa1\x7e\xba\x52\x10\x90\x95\x71\x95\x9b\x3d\x0c\xa1\x1a\x71\x2f\xc8\x5f\x5b\xc1\x18\x9c\xbd\x3d\x77\xb0\x25\xdc\x97\x24\xdd\xd1\xb1\xdb\x82\x47\xc7\x21\x8f\x73\x80\x9b\x8f\x20\xb4\x23\x07\xe7\x7b\x9e\xf6\x25\xc3\x1e\x5c\xe2\xbe\x59\x72\x84\x5a\xf5\x17\x15\x28\x99\x83\x86\x61\x25\x23\x36\x1c\xbd\x5b\x75\xe0\x8d\xd3\xca\x69\xbf\x5f\x72\x67\xb0\x83\x61\xe4\x51\xb1\xff\xba\x09\xb8\x2d\x1d\x64\xd0\xbd\x05\xcd\xc1\x0e\x0a\x71\x40\xca\x87\x5b\x7a\xca\xae\x85\x60\x1f\x61\xa4\x4c\x65\x1f\x49\x81\x14\x50\xd0\x05\x97\xad\x02\x71\xf0\xed\x33\xb5\xd2\x87\x9d\xff\xd9\xba\x81\xb2\x3d\x68\x54\xda\xdb\x79\x28\x8e\x17\x3c\xfd\xea\x79\x69\x45\x06\x5d\x0c\xb5\xb9\xbe\xf4\xfe\x26\x4a\x36\xb6\x2d\x35\x26\x19\x4c\xf1\x63\x88\xeb\x6a\x8b\xc4\xf4\x72\x82\x64\xec\x77\x4a\x3f\x28\x3c\x8f\xa9\x26\xf6\xd2\xec\x3f\xb0\x59\x30\x2e\x84\x96\x60\x89\xa7\xe1\x2b\x60\x87\x9f\xb9\x7f\xb3\x6b\x0c\x81\x63\x9b\x41\x3a\x2c\x07\x7b\x97\x44\xbf\xe0\x02\x7f\x39\x9b\xb0\x6f\x27\xec\x64\xc2\xa6\xd3\xe9\xab\x09\x13\x3c\xda\xd8\x16\xe1\x4f\xf0\xe8\x2f\xf8\xda\x94\x4d\xb2\x3f\xab\xa0\x02\x90\x07\x34\xf6\x89\x25\x41\xe4\xfe\x5b\x81\x57\xcd\x76\x01\x53\xb3\x29\x8f\x8c\xe0\x42\xd1\x46\x4b\xdf\x28\x40\x9e\x8b\x48\x67\x16\xbb\x9e\x17\x3a\xb3\x38\xdc\x7b\x9e\x71\xa9\x80\xb1\x82\x37\xb3\x10\xa8\xe6\x80\xb3\x5e\xfc\xc2\xb7\xd0\x7f\xa9\x1c\x6d\xaf\x19\xa6\x1b\xd7\xfe\x62\x97\x52\x9c\xed\x21\x93\x45\x61\x0c\xb2\x7c\xae\xae\xd9\x9b\x6f\xd8\x2c\x4d\x13\xc1\x66\xec\xef\xec\x5b\xae\xb8\xe2\xec\x5b\xf6\x77\x76\xc2\x55\xc1\x13\x5d\xa6\x82\x9d\xb0\xbf\x9b\x61\x33\xe5\x5d\x68\x63\x01\xed\x26\x8c\x33\x55\x26\x68\xe8\xbd\xb4\x18\xd7\x57\xae\x5f\xdc\xcf\xce\x52\x14\x0f\x42\x28\x96\xeb\x2d\x5d\x85\x7f\x72\xb7\x7f\x2e\xd5\x3a\x11\x05\xad\x87\x2a\x1a\x19\x2b\x38\x82\x9e\xbe\x99\x2b\xe7\xa7\xfe\x93\x69\xf1\x9f\xd8\xdf\xd9\x45\x99\x24\xa6\x49\xe6\xa0\x31\x0b\xe9\x0d\xb3\xd9\x61\x42\x4d\x1f\xe4\x9d\x4c\x45\x2c\x39\xe4\x87\x99\x7f\x1d\xdf\xc0\x6c\x2f\x4a\x4f\x05\x1a\xee\x69\x27\xc7\x76\xc8\xd1\xf3\x2c\x5c\x13\x4e\x2c\x30\xb4\x56\x3a\x41\x28\xe1\x4f\xc7\x1b\xc1\x9e\x00\x99\xf6\x03\xbd\x51\x50\x4a\x2f\x0c\x50\xb6\xd7\xef\x54\xbf\x52\xf3\x5f\xad\xf4\x1f\x83\xd4\xbf\xfa\xc6\xc3\xb7\x11\x8c\x53\x9c\x1c\x9f\x9c\x09\x0f\x19\xc8\x25\xc4\x7d\xb7\x03\xc9\x0f\x5b\x36\x3e\x3b\x31\xbc\x6d\x9e\xd2\x68\x8d\x16\x7c\x3d\x61\xa9\xd3\x91\xb2\x9b\xca\x05\xb6\x71\x1f\xa3\x66\x02\x19\x9b\x2f\x2d\x80\xc8\xac\x65\xca\x3f\x3c\x8e\xf5\x96\x4b\xf5\x0a\xea\xb0\xd4\x79\x7b\x06\xaa\xe5\xb9\xb2\x7f\x84\x6e\x78\x2f\x9a\xb1\x9b\xda\xbf\x6a\xec\xd4\x24\xdc\xda\xb6\xc3\x81\x1a\x66\x5e\xe1\xf4\x13\x3e\x87\x7e\x6c\x2c\xd1\xc1\xda\x07\xa4\x37\x56\x61\x4f\x01\x5b\xde\x33\xc8\x0d\x8a\xad\x3b\xe5\xb2\x1f\xab\x12\xaf\x95\x21\xd6\x72\x90\x16\x6e\xad\xb1\xb7\xf4\x12\xc3\xbc\x67\x73\x4c\xca\xe4\xd8\x1c\x95\xc7\x17\x5a\x09\xc6\xf3\x5c\xae\x91\xf5\x0e\x1c\x6a\x28\x22\x6b\x8d\xb2\x9b\xea\x93\x21\x38\x82\xc0\x3e\x33\x4d\x42\xc4\x74\x61\x4e\x61\x33\x05\xc9\x6e\xae\xcc\x2f\xc8\x22\x80\xec\x29\xe9\xc8\xd1\xb1\x36\xe2\x1e\xb7\x75\xd1\x85\x18\x14\xde\xb2\xc0\xfa\xa8\x19\x0e\x58\x70\xb4\x13\x0f\x88\xb8\x5d\x04\xc4\xa0\x54\x9a\x65\x8d\x42\x38\xcd\x52\x24\x5a\xad\xcd\xaa\xe8\x3a\x84\xe1\x14\x78\xa2\x26\x60\x61\x9d\x2d\x30\xc6\x0a\x7d\x85\xa6\xc4\xd8\x29\x32\xf6\x2e\xb5\xbc\x5c\x1a\x3b\xce\x45\x7b\x9c\x35\x42\x9d\xeb\xe2\xa9\x38\x0c\xb6\x74\x6b\xce\x60\x9d\x59\xe0\x9c\x8b\x24\xa2\xe1\xe2\x39\x9c\xb0\x47\xc3\x36\x55\x6f\xbe\x45\xbb\xf7\x91\x22\x95\x0d\xc6\x8e\x01\xeb\xf1\x73\xa6\x5e\x0c\x49\xc8\x78\x37\x3b\x3b\xaf\x7d\xaf\x99\x90\xd1\x92\xb5\x71\x73\xf6\xfe\xf4\xed\xe2\xc3\xed\x4d\xe3\x7b\xa6\x34\xfa\xd3\x9e\x9c\x8c\xce\xd1\x7b\x0a\x54\xfa\xcf\xa8\x22\xb6\xd0\x2b\x9b\xa0\x3f\xfc\x82\x6c\xe8\xb8\x0d\x03\x3f\x16\xc1\xfb\x36\xd4\x3b\x6b\x2e\x9c\x4e\x9a\x11\xb5\xa0\x68\xe7\xb0\xc6\xd6\x07\xec\x83\x7a\x87\x3f\xbf\xd4\x89\x8c\xfa\xb1\xd4\xf6\xba\x32\x76\x4d\x13\x9c\xba\x14\x90\x5c\x40\x2e\x57\x6a\x14\xbe\x90\x0a\x11\x15\x3e\x9a\xdf\xec\xdc\xff\x6a\xfc\xe6\x7e\x1f\x08\x7a\x42\xdd\xb0\x81\x3c\xb8\xc3\x07\xc0\xdd\x0a\xbc\xcd\x20\x57\x82\x76\x26\xf8\x56\x01\x37\x13\x71\x8a\xfa\x54\x46\x1e\x0e\xe8\x87\x8d\x4e\xc8\x23\x8a\x1c\xd8\x73\x95\x8a\x2c\xd2\x80\x7b\x44\x7a\x15\xcd\xa2\x8d\x4c\x62\xaf\x09\xf6\x12\x12\x45\x00\xce\xfd\x8a\xe4\x6d\x85\xc3\xaf\xd8\xe2\x7b\x6e\x5d\xbb\xec\xde\xe2\xee\x3e\x08\xfb\xf5\x94\xc8\xef\xbe\x65\xff\x13\x21\x94\x71\x28\x88\xb5\xae\x86\x44\x00\xc3\x3b\x6c\xcf\xa8\xa0\x8a\xb9\x6e\x49\xee\x29\xf2\x0f\xd7\xa2\x36\xaf\xb4\xcc\xea\x43\x09\x5c\xe6\xe8\xc9\x46\x18\x5e\x2e\xa0\x39\x5b\xc1\xd1\x16\xf3\xcc\xc2\x34\xa9\x73\xe5\xb1\x17\x2f\xf2\xd0\x2e\x6b\x9d\x67\xf4\x7f\x5b\x6c\xf9\x84\xbd\xa8\x74\xf4\x05\x70\x5d\x2b\x0d\xf5\x51\x7c\xbc\x32\x34\xb0\x5c\x27\x4c\x16\x73\x65\x5e\x4d\x66\x65\x66\x22\x11\xf7\xa6\x75\x61\x7c\x86\x10\x83\xd6\x77\x61\xbb\x0d\xe9\x49\xdc\xb2\x5a\xd0\xb2\xa1\x4d\x98\x85\x9c\xc9\x18\x18\x8e\x45\x6e\xec\x46\x50\x7b\x12\xbf\x98\x0d\x20\x21\xfc\x88\xd0\xb2\x58\x28\xdb\x3e\x40\x9c\xa1\xd2\xfe\x5c\x9d\xad\x80\x5a\x00\x08\x0d\xe2\x18\xbd\x00\x56\xff\xc7\x11\x58\x4a\x8a\xc7\x68\xf2\x89\xd8\x89\x20\x75\x66\xdc\x49\xe2\x5e\x64\xbb\x02\x9c\xea\x30\xae\x4a\xf0\x62\xc3\x64\x31\x01\xe6\x51\x7b\x52\xce\x15\x8f\x63\xca\xc8\xc6\xe2\x82\x07\x65\xe7\x3c\xd3\xe7\x4b\x7d\xdf\x67\xd8\x1e\x8a\x9d\xc5\x5d\x9d\x26\x5c\x2d\xf0\x06\xf9\x0c\xe8\xd9\x40\x38\xbb\x0b\x46\x51\x2e\x17\x8e\x2d\xed\x49\xda\xe9\xce\xfb\x2b\x0b\x1e\xa6\xc7\x45\xb9\xb4\x15\x4d\x2a\xe0\xe8\xa5\x27\xd6\x70\x7e\x32\x42\x2e\x65\xcc\xa2\x3b\x86\x9f\x02\x1e\x58\xcb\x6b\x28\x27\xbb\x5a\xf7\x21\x6b\xed\x0a\xf8\xb5\x62\x1f\x87\xcc\x7c\xed\x0e\xa9\x4f\xfb\x78\xd8\x5d\xc3\x42\x7c\x14\xf4\x6e\x4f\xb3\x9e\x17\x7e\xd7\xe9\x47\x69\xc2\xf0\x6c\x6f\x83\x08\x3b\x81\xf7\xd1\x0f\xea\x5c\x58\xed\xc2\xe8\xe1\x3b\x4c\xb7\x20\xd4\x9f\x32\x46\x00\xe7\xd4\x50\x4f\x89\xa7\xf4\x80\x76\x4d\xd9\x99\x62\xd6\xdc\x9b\xb0\x17\xb8\xb0\xf2\x17\xe4\x02\x26\x75\x7d\x82\xab\xc4\xb4\x7b\x88\x04\xa1\x0e\xf3\xc2\x54\x34\xbf\xdd\x30\x12\xd7\xcb\x98\xfb\xac\xe3\xf2\xad\x84\x54\xb8\xc7\xb0\x9d\x60\x14\x77\x89\x05\xd8\x4c\x8e\xc0\x19\x49\xdd\x85\x68\x82\xef\xb0\x8d\x37\xb2\x6f\xed\x0f\xcd\x10\xa5\x25\xdd\xa7\xf6\x73\xa6\xb3\xb9\xb2\xa5\x91\x4b\x38\x47\x89\xbe\x7a\x51\x41\x66\x0e\xd9\xfc\xc1\x4a\x85\x60\xbc\x55\x65\x04\xb1\x4f\x4f\xeb\x5d\x3f\x05\x00\x87\xb4\x74\x18\x50\xd0\x81\xf0\xb5\x19\xc3\xc3\x2c\xf0\x2d\x5e\xf3\x75\xea\xdf\x24\x31\x83\x22\x0b\xcb\x34\x1c\x64\xcd\xe5\x25\xf0\x65\xaf\x4a\x73\x18\x05\xa4\xe2\x73\x65\x06\x8f\xad\x24\x64\x4f\xd0\xb8\xcc\xd5\x7b\x9d\x5b\x92\x96\xdc\x8f\x87\x0d\xed\xd3\xb0\xbd\x70\xe2\x94\xf4\x87\xb7\x70\x69\x53\xcc\x05\xe9\xd6\xdc\xd5\x02\xe9\x92\xc4\xb4\xb4\xd3\x65\xe6\x3b\x15\x71\x35\x57\xff\x6d\x86\x07\x9e\x53\x5c\xd9\x69\xd5\x2b\xdc\xc2\x30\x83\x10\xac\xfa\x88\x85\xbe\xfc\xb7\x57\x1f\x5f\x61\x7a\x53\x99\x83\x1e\xf0\xa4\x7a\x81\x38\x7d\x89\x32\x49\x00\x09\x60\x7b\xe0\x38\x8e\x7c\x15\xbd\x48\x38\x7a\xd4\x2d\x54\xd5\xc4\x18\xb2\xd1\x87\x39\xd6\x67\x2c\xe2\x45\xb4\x39\xb2\xb6\x1c\x1d\x63\xf6\xf6\xa3\xe9\xc3\x3c\x24\x63\x69\xb5\x4b\x2c\x98\x07\x67\xb6\x75\xa4\xaf\x95\xf5\x62\xba\x00\x0e\xf8\x9b\xba\xde\x98\xe3\xa4\xc6\xc5\x89\x48\x9c\xaa\x9d\xe7\xbe\x6e\xd5\x3e\xfd\x8b\x93\xa2\x14\x8a\x6f\x45\xcc\x5e\x40\x22\xee\x0b\x3b\xf9\x73\x95\x2e\xa7\xc9\x6e\x55\x10\x73\xa0\x19\x94\x29\xe8\xe2\xed\xb9\xe5\x16\x71\xf3\x99\xb4\x67\xb0\x3b\x1f\x5a\xed\xb6\x8e\x1b\x1b\x57\xd3\x70\x83\x05\x7d\x5c\x6e\x74\xae\xab\xa8\xbc\xaa\x40\x07\xcf\xef\x26\x6c\x99\x71\x05\x92\x46\x71\x68\x54\xf9\xdd\x09\x8f\x67\xa4\xe5\xb3\x99\x79\x8a\x27\x3b\xc8\xc0\x99\xcc\x15\x72\x18\x02\xd9\xfd\x2e\x4a\x64\xc4\xd6\x19\x4f\x37\x35\x3b\x48\xdc\x0b\x55\x80\x32\xf6\x95\xe0\xf9\x61\x68\x89\xac\x5e\x02\x1b\x1c\xcf\x9a\x29\x78\x7d\x70\x55\x63\x9d\x86\xe6\x75\x5c\x2d\x80\x90\x14\xf1\x62\x1c\xe3\xd4\x5e\x5e\xe4\x0a\xdb\x26\x51\xbf\x41\x04\xd8\x74\x8e\xd9\x5a\xf7\xc1\x0f\x70\x5c\x89\x0c\xc9\x62\x6a\x0f\x85\x4c\x38\x72\xa5\x83\x28\x72\xcf\xaa\x56\x24\xf7\xac\x51\xde\x73\x4d\x81\x37\xf4\x54\xd8\x44\x04\x77\x70\x4c\x48\xb9\x14\xe8\x33\xd9\x1f\xcb\xa5\x4e\x2c\xff\xe8\xd9\x5b\xa6\x33\x90\xfe\x29\x34\xfd\x49\xc6\x5d\xd6\x81\x54\xb1\xf8\xe5\x20\x12\xa0\xfe\x8b\xde\x9a\xcd\xa6\x9a\x40\x61\xa6\xde\x59\x38\x9d\x32\x61\x2e\xe1\xc2\xbe\x8c\x1b\xdf\xca\xeb\x60\xe1\x59\x52\x6c\x00\xc1\x8b\x49\x32\x7e\x50\xb7\x7c\xc7\xa2\x0d\x57\xeb\xc0\x35\x01\x80\x4a\x91\xea\x0c\x25\x72\xef\x81\x6d\x53\x67\x96\x64\x81\xa8\x03\x28\x53\xc7\x05\x12\x10\x20\xaf\x2d\x3f\x00\x5f\xaf\x33\xb1\x86\x44\xd2\xb9\xaa\x90\x9f\x00\xd3\xa8\x55\xe7\xc1\x7a\xfa\xb8\x23\x9e\x86\x80\xa9\xeb\x35\x58\x64\x3b\x97\x79\x4f\xfa\xd2\x7e\x3f\xd7\x87\x75\xc2\xa4\x98\x4e\xd8\x57\x3e\x29\x40\x44\x5a\xb9\xd4\xfd\x8e\xbc\xed\x9a\xcb\x9f\xed\x79\x3a\x34\x99\x9a\xda\xdb\x0e\x9f\x35\x54\xaa\x5b\x17\x4d\x2f\xf7\x41\xc1\x8b\x72\xc4\x1d\x74\xc2\x0b\x9e\xe8\xf5\x89\xf9\xf1\x35\xfe\xb6\x6f\x5d\x9f\x20\x62\xdf\xb2\xe4\x99\xef\x9b\x9b\xd3\xd4\xed\x59\xf4\xdb\xc6\x7a\xaf\x03\x39\xd1\xdd\x0e\xe4\xa7\x30\xd5\x2d\x15\xd2\x7e\x1f\x72\xd2\x41\xef\xd3\xd3\xa7\xb1\x2e\x62\x8b\xab\xa7\xd4\xa0\xbc\xfe\x8c\x6d\x39\x01\xd2\x4c\xc7\x65\x24\x62\xb3\x73\xe1\x3d\x84\x88\x24\xc7\x32\x54\x39\x24\xdb\x2e\xda\x0a\x55\x1a\xdc\xba\x9f\xca\xe7\x30\x88\x9d\xde\x0d\xff\x6d\x87\xbf\xc1\x5a\x7c\x6d\x83\x1e\xee\x4f\x1c\xa7\x6c\xe4\x3d\xe5\xaa\xaf\x72\xca\xeb\x4c\xae\xa5\xe2\x85\xce\xd8\x4b\xc7\x25\xf0\xca\x09\xd1\x75\x5b\x08\x23\x8f\x89\xca\x10\xe1\x31\xf1\x49\x0d\x8f\xb6\x45\x6a\xbe\x95\x17\x7c\x9b\x86\x2c\xcd\x4e\xe6\x9f\x46\x26\xc1\x41\x70\xb6\x09\xf8\x4e\x65\xee\xf3\x66\xe7\x8a\x22\x0e\x38\x6f\x3a\x0b\x65\x06\x3a\xef\xe6\xb4\x2c\x16\x8f\x64\x1e\xc3\x1f\x8f\x73\x3c\x11\x0c\xe1\x3d\x4f\xfb\xb9\x9c\x38\xb9\x1c\x30\x71\x90\xdc\x11\xde\x52\xa9\xae\xcf\x7e\x21\x9f\x91\xdc\xd2\xf5\xd0\xf9\xd5\xb9\x0d\x14\xf9\xf7\x60\xe5\x81\x05\x13\x81\xa4\xb6\x98\x88\x85\x4f\x7b\x77\xac\x99\x5b\xdc\x12\x40\x9d\x24\xba\x8c\x19\x1d\x6a\x14\x86\xcf\xa6\x78\x3b\x02\xcb\xf4\x74\xda\x95\x58\x36\x52\x60\xdc\x9d\x3f\xf0\xbb\xf6\x1d\x08\x9f\x75\x9c\xc0\xbd\x5b\x9f\x46\xf6\xd9\xa6\x9e\x46\x1a\xe6\xde\x1d\xc7\xa3\xe6\xde\x79\xc1\x81\xf2\x72\x9c\x83\x14\xde\xa3\x32\x4e\x60\xbf\x85\x01\x84\x16\x52\xee\x4a\x60\x36\xbf\x3b\xb8\x3a\xcb\x03\xd1\x5f\x55\xca\x33\xa1\x8a\x05\xd4\x38\xae\x32\xa8\xe4\x12\x7e\x5e\x31\x98\x06\x39\x82\xff\x7c\xa3\xd1\xbf\x6f\xf9\xad\xfe\xc2\xae\xc9\xa7\x65\xce\x2b\x09\x20\xde\xfc\x8e\xbd\x94\x80\x39\x0a\x62\xa1\x6e\xe2\x3a\xa6\x8b\x3a\xf4\x88\xd1\x0b\x3a\x54\x39\xda\x07\x75\xc8\xb7\x1e\x42\xd5\x50\x0a\xb9\xf7\x28\x2b\xdf\x1c\xb5\xf6\x6f\x81\xe6\xc5\x45\xe5\xdf\xc0\x4f\x6c\xe6\x2f\x61\x7f\x15\x99\xf6\x19\x58\xe8\xac\x0a\x0b\xee\xb5\xd7\x1f\x2f\xd7\x8d\xf6\x38\x0a\x45\x87\x4a\xa9\xf0\x17\xa2\x10\x43\x8f\xc2\x72\x67\x9f\x23\x1d\x21\xa4\x54\x44\x8b\x0e\x59\x9c\x41\x4d\x09\x1e\x9e\xa1\xcc\x8d\xac\x5d\x66\x76\x83\x1e\x83\xbf\x82\x52\x9b\xb6\x3c\x25\x7c\x1f\x41\xb9\xeb\xc1\x9b\x29\x74\xe2\xcf\x7f\xfa\xcb\x54\x76\x24\x59\x43\xd3\xc7\xc2\xa5\x5c\xe3\xdf\x65\x52\xa8\x18\x82\xb1\x3c\x6e\x2a\xb6\xa9\x8a\x77\xbe\x72\x3c\x9b\x65\xf8\x24\x19\xd9\xed\x57\x6d\xbe\xc0\x45\xf4\x09\x22\xfa\xfe\x90\x75\xdb\xb7\x12\xef\xeb\x32\x25\xf2\x45\xbc\x53\x7c\x2b\xa3\x4f\xda\xc6\x9d\x14\x49\x0c\x4d\xa4\xda\xf7\x45\xa5\x62\x11\xdd\x8d\xb5\x09\x1e\xad\x37\x21\xa2\x3b\xf6\xfd\xcd\xfb\x73\x94\x17\x96\xf9\x5c\x5d\xf0\x42\xde\x8b\xdb\x2c\x71\xe1\x00\x82\x49\x67\x89\xdd\x23\x55\xfe\xf3\x80\x6b\xcb\x92\xa5\x5b\xc3\x21\x94\xa7\xd8\xee\x8e\x96\x65\x74\x27\x8a\xe3\x8c\xab\x58\x6f\xb1\x1b\xc7\x79\xb9\x5a\xc9\x5f\xa6\x05\xcf\x3a\xb4\x2a\xd0\x8f\xf0\x19\xed\x5c\xaf\x40\x56\x78\x9b\x17\x4d\xdd\x07\x48\xb4\x26\x5d\xfb\x8a\x71\x8b\x79\x81\x7c\x2b\x80\x6c\x94\x55\x75\x5e\xa0\x14\xcc\x5d\x06\x39\xd4\x3c\xa7\x0c\x06\x4d\x62\xeb\x1f\x03\xe3\xfe\x63\xd0\x2a\x1f\xc2\x0e\x1b\xe5\x25\x46\xb7\xfc\x0e\xdf\x87\xeb\x4c\xe4\xf9\x84\xe5\x1a\x5a\x3c\x57\x36\x17\xc0\xe6\xab\x01\xee\x05\xe8\x8a\x93\x1d\x8b\x74\xea\x40\xeb\xd8\xaf\x8d\x7e\x00\x3f\x7d\x98\xa9\x0b\x22\xda\xa5\x2a\x64\xc2\xf8\xaa\x20\x27\x3e\x68\x33\x58\x2d\xb6\x7c\x3a\x57\x10\x8a\x8d\xa0\xfb\x00\x91\x70\xe1\x17\xd7\x89\x9c\xad\x78\x24\x13\x59\x10\x63\x1c\x24\x79\x71\xd3\x5f\x73\x1f\x98\xb1\xcc\xf8\x8e\x27\xfe\x61\xc5\x93\xd2\x27\x27\x1f\xe5\xa2\x87\x91\x54\xe6\x0b\x74\x10\x3c\xdf\x06\xf7\x28\x40\x19\x06\x1f\x90\xbd\x7d\x66\x2a\xbf\xa8\xdd\xa2\xbf\x0b\xff\xb7\xf2\x0e\xef\xb3\x0a\x0e\x78\x90\x1f\x72\x39\x36\x9f\xdc\x4e\xc0\xdc\xdb\x19\x32\xb6\xf8\xe0\x8a\x29\xee\xd3\x7f\xdd\xf5\x08\x31\x93\x8e\x47\xff\xd4\xca\xce\x35\x6b\x18\x31\x7a\xed\x46\xe2\x27\x72\x67\x74\x51\xea\x0f\x69\xbe\xf5\xc6\x5f\x6a\x9d\x1c\xea\x91\x27\x52\x0c\xa9\xd5\x02\x94\x98\x0f\x79\x4e\xe2\x02\x70\x8e\xad\xb3\xb7\x2e\xe6\xee\x38\xea\xab\xfa\x6d\x04\x07\xa3\x26\xc0\x41\x06\x8d\xe8\x41\x8a\xe7\x69\x0b\xe8\x62\x24\xe2\x1d\xca\x40\xb4\x96\x35\xed\x9b\x21\x82\x80\x1f\x85\xfb\x36\x02\x8f\x6f\xad\x85\xa3\x9c\x75\xa8\x9b\x5c\xab\xca\x39\xee\x42\xbe\x6f\x37\x8e\x41\xdd\x76\x3c\xb7\x5c\x91\xe7\x8f\xac\xf8\xb9\x0a\x2c\x76\xe4\xa4\xb3\x29\x05\x6e\xd4\xda\xfc\x79\x95\x65\x78\xb0\x3f\xef\x10\x51\x87\xde\x93\xf3\x6d\x28\xcf\x08\x58\x90\x48\x6f\x97\x52\x59\x56\x08\x72\x72\xc3\x53\x63\x66\x39\x73\x5d\x40\xc2\x3e\x19\x50\xb4\xa7\x36\xf6\xce\xcc\x09\xe9\x87\xc3\x23\x6b\xdf\x73\x3c\x7c\xdf\x3d\xad\xfe\x44\x47\xa4\xb1\xde\x03\x73\x81\x24\x0f\x7c\x97\x83\x84\xb9\x30\xa7\xe2\x0a\x1d\xbb\xd5\xf6\x4f\x02\xf3\xc3\xf2\x31\xcf\x15\x8c\x10\xf2\x75\xd9\x83\xd4\x9c\xac\xb0\x00\x13\x2b\xd6\xee\xb9\xd6\x5e\xe4\xed\x83\xf3\x79\x62\x35\x59\x6f\xac\x06\x83\xd0\xff\x33\xc2\x33\x3d\x4e\xe0\x03\x7d\xd1\xc1\x35\x89\x16\x23\xc1\x84\x20\x71\xcb\x85\xa8\x27\x6c\xcb\xa5\xa2\x6d\x80\x82\x98\xb1\x58\x96\xeb\x75\xa7\x8b\xf4\xd7\x1f\x6b\xa9\xee\x93\x7f\x7a\x5f\x78\x2f\x5b\xe0\x53\x78\x8b\xcf\x6c\x4d\xe8\xbe\x36\xef\xbe\x4f\xe3\x20\xfe\x8c\xde\xf8\xd6\x90\x58\x63\x11\x3d\x8d\x37\xfe\x6c\x88\x37\xde\x62\xbb\x20\xc5\x8e\x9e\xd3\x16\x7f\xf3\x9b\x9b\xfe\xd3\xb8\xe9\x07\x2d\x0a\xa4\xd5\x59\xc8\xaa\x81\xde\xd3\xc2\x47\x32\x4f\x3a\x32\x66\x68\x15\xb2\xbb\x99\xd3\x3d\xce\xd9\x92\x47\xcf\x40\x45\x09\xb7\xe3\xe1\xfe\xc0\x3d\xe0\x97\x6b\xbd\x15\x0c\xaa\xca\x51\x4a\x89\x51\x16\xe3\x04\xd0\xaa\xa6\x83\x1e\x31\x42\x78\x14\xb8\x4e\x11\xb9\x12\x7b\xa3\xfa\xa5\x12\x0f\xcc\xdc\x56\x93\x10\xbe\x17\x4c\x0f\x68\xec\xbd\x32\xd6\x61\x05\xeb\xef\x28\x33\x32\xb1\xe6\x59\x0c\x19\x26\xb4\x25\x13\x1e\xdd\x99\xff\x86\xf6\x51\x8d\x04\x31\xb4\xd9\xfa\x08\x7b\xf5\xa5\x49\x15\x21\x19\xa1\x65\x55\x77\xed\xc3\x9f\xe7\x8c\x47\x99\xce\xd1\x69\xe4\xa4\xa9\x21\xc3\x19\x0c\xd8\x7b\x19\x97\x3c\xc1\x1a\x3b\x3d\xed\x63\xe1\x6b\x75\xc0\x51\xa0\x22\xd7\x44\xb3\xd1\x74\x20\x47\x14\x0c\xe3\x74\xae\xde\xba\x80\xc9\x1b\x76\x9b\x0b\x42\x99\xe5\x96\x87\xbf\xb7\xa5\xcf\x66\x3e\x34\x30\x81\x9d\x36\x44\xcf\x00\x58\x90\x75\x30\x10\x79\xf7\x48\xec\x21\x34\x3d\x64\x52\x46\x13\x33\x9f\x05\x52\xf6\x7e\x58\xf0\x9d\x90\x09\x1e\xef\x42\x36\x44\xa9\x18\x44\xe9\x18\x8f\xb7\x52\x99\x4d\x60\xe5\x52\xdd\x4d\x63\x95\x13\x10\x72\x0c\xaa\x62\x49\x52\x3b\x04\x73\xa6\x84\x31\x2e\x79\x26\x93\x1d\xbc\x27\xd2\x4c\x1c\x05\xf5\x04\xf3\x43\x19\x4f\xa0\x01\x41\x34\x2e\x65\x2e\x56\x65\x82\xaf\x0e\x78\x97\xbb\x0e\xd0\x89\x74\x7b\x36\x31\x06\x47\x41\x5a\x3e\x41\xc5\xa8\x90\xf9\x14\xd9\x23\x8d\x68\xe5\xb8\x88\x9b\x67\xeb\xcc\x00\xe4\xbe\xd1\x0f\x36\xd5\xed\x81\x7b\x2c\x73\xd7\xed\xfa\x64\x51\x96\x7e\x3b\xd4\xbe\x00\xed\x39\x15\x50\xee\xb9\xd0\x1a\x7d\x26\x62\x77\x36\x49\x05\xdd\x21\x91\x69\xef\xb9\x2e\x73\xcc\x98\x33\x73\x09\xf7\x97\x75\x74\x54\x1d\xd7\xcc\xf5\x4e\xe6\x5a\xb1\x79\xf9\xe5\x97\xbf\x17\xec\x4b\x48\x21\xa4\xf7\x08\xc6\xc7\x80\xaf\x13\x4b\x87\x23\xdb\x55\x20\x90\xcc\xb3\x31\x23\xac\x0d\xa2\x6a\xf3\xf5\x01\xe4\xc9\xa3\x0d\xcb\xcb\x25\x22\x18\x39\x85\x58\xb8\x72\xbc\xdf\xe7\x1a\xc0\x88\x78\xb3\xdb\xd6\xff\x2f\x09\x28\xa0\xec\xca\x5c\xa5\x1a\xa9\xe9\x01\xfa\xb9\x14\x6c\xcb\xb3\x3b\x50\xd1\x45\xf7\x3c\x50\xf1\xbf\x94\x62\x5a\x0d\x2f\xbc\xaa\xb4\x87\x02\x3a\x48\x39\xcd\xb2\x52\x29\x2b\x0b\xc6\x8c\x61\xea\x7d\xfd\x93\xb9\x5a\x96\xe1\xdb\xb3\x12\x2c\xf0\x4b\x0b\x02\x06\x70\xd8\x6a\xe0\x0a\xa1\x46\xf1\xdc\xb7\x6b\xca\x06\x44\x0d\xe6\xea\x89\xc3\x06\xfb\x1c\x7e\x97\x64\x83\x59\x67\x5e\x90\xaf\x00\xdd\x0d\x95\xab\x61\x3a\x70\xd9\x83\x91\x73\x09\xf2\xd5\x13\xf6\xbd\xbc\x17\x13\x76\x9d\xf2\xec\x6e\xc2\xde\x62\xf8\xef\x0f\x7a\xd9\xe6\xc3\x6b\x10\x4a\x1c\xec\xc7\x7b\x9c\x1b\xab\x8f\x68\xa5\xdd\xfa\xff\xa9\x41\x0c\xc0\xba\x62\xdf\xff\x33\x11\x79\x1d\x5c\x1f\xff\xec\x9e\x88\x3d\x61\xea\xdf\xc0\x6b\xff\x94\xaf\xe2\x7e\x9a\x8f\xdf\x85\xff\x6b\xcf\x2f\x6b\x71\x81\xed\x49\xa7\x5c\x2b\x2a\xed\xd7\x95\xd8\x2c\xe3\xfa\xa5\xdc\xcc\x6f\x1e\xb6\x15\x28\x7d\x3c\x76\xa9\xed\x23\x40\xf7\xf4\x53\x3b\x5e\x27\x89\xce\xcb\xac\x7f\xf3\x5f\x55\x5b\x6d\x6b\x6f\xa1\x5a\x85\xc5\xb6\x5d\x0a\x60\x2d\x18\x0a\x3f\xc1\xaf\x2d\xfe\x5b\x2f\x17\x80\xb5\x3a\x6c\x87\xb7\x15\xe7\x08\x9c\x75\x54\x69\xaa\xbf\x21\xaf\x53\x01\x8c\x53\xde\x14\xf5\x01\x81\xda\x0a\x73\xae\x91\xb9\xb2\x9c\xf7\x98\x31\x9b\x65\x02\xc8\xb9\x33\x01\x52\x8b\x8c\x38\x06\x93\x5d\x60\x11\x05\x2f\x1f\x0f\x8a\x09\xb3\xdc\x20\x59\x95\xde\x5b\x4b\x21\x94\x1b\xed\x31\xa6\x04\x10\x51\xd7\x46\x9f\xd0\x6e\x0f\xc2\x4a\x1f\x74\xc8\xc2\x36\x7e\x17\xbc\x05\xc1\xe4\x5e\x8b\x22\x38\xcd\x6b\xa6\x45\x65\x6b\x56\x22\x54\xbf\x2a\xc4\x7f\x6b\x0c\xba\x46\xce\x55\x71\xa0\x0c\x8a\xe9\x3d\x85\xbf\xfc\x92\x17\x1b\x7c\xd0\x6e\x75\x21\xf0\xcc\x44\x96\x20\x5c\x2f\xe8\x75\x5e\x26\x7a\x09\x1a\x87\x45\x0f\x87\x63\x44\x5b\x7b\xd0\xd0\x35\x27\x6c\xc8\xc9\x60\x4e\x13\xc8\xb4\xcd\x44\x0e\x84\x2b\xcd\x28\xd5\x50\x7c\xf2\xb8\x47\x77\xb3\xb9\xe6\xd0\x7f\xdb\x78\x6c\x37\x45\x31\xcc\xb6\x06\xb0\xea\xe9\x23\x32\x68\x1a\x12\x23\x44\x16\x4d\x61\x60\xe4\x8b\xad\xf5\xd7\x4a\xe9\xcf\xd5\x0c\x3f\x09\x2e\x01\xee\x55\xae\x1c\x1e\x94\x54\x93\xdd\xfe\xc3\xf4\x55\x36\x0b\x11\x88\xe4\x21\x98\x78\x5f\x26\x3c\x06\x26\x90\xd5\xa8\x0a\x99\x09\xa6\x00\x85\x30\x57\x79\xb9\x3c\xf2\xc4\x24\xe6\x15\x77\x0f\x64\x3a\xb9\x48\x39\x3c\x65\x80\xaf\xe8\xa8\xe5\x1a\x46\xcf\xa4\x57\xab\xb1\x04\x7e\x3c\xa1\xc3\x1f\x72\x25\x31\x33\xde\xf5\xdd\x95\x63\x1e\x6b\xf0\x8a\xb6\x70\x25\xbc\xec\xfa\xce\x0b\xd0\xd3\x82\x0c\xcc\x2b\x44\x51\x7c\xee\x0b\x3c\x8c\x86\x0e\xbd\xba\x21\x9e\x36\x57\xff\x62\xef\x86\x6e\x50\xf1\x88\x95\x6e\x46\xc6\x5c\x51\x9d\x60\xe7\x4a\xdb\xec\x13\x32\x30\x02\xbb\x1b\xd5\x58\xf2\x6d\xa5\x72\x8b\x6b\x09\x45\x55\x34\xa5\xcb\xc2\xa7\xf7\x32\x0f\xe8\xd6\xa1\xb6\x6b\x21\xd8\x9b\x4c\xac\xde\x7c\xcc\xc4\x6a\x61\x67\x7a\x0a\x1d\x9a\x9a\x1e\x35\x49\xd7\x07\x2e\x8e\x3c\xd5\xaa\x9d\xfc\x70\x0f\x39\x69\xad\x4b\x58\x4e\xd0\x27\xb9\x62\x5e\x5f\xd6\xf4\x07\x18\x20\x44\x5c\x67\x83\x6f\xb4\xec\x93\x5f\x73\x5d\x48\xb0\x01\x50\xab\x0e\x19\xd2\x7f\xfe\xeb\xad\x32\x66\x43\xae\xb7\x9b\x2a\x64\xc6\x1e\xf6\x5c\xb9\x0b\xaf\x1b\x17\xfa\x69\xd1\xe9\x30\x81\x79\xca\x1f\x14\xf1\xd8\x8c\x72\x3d\x0d\xbb\xd6\x6a\x00\xa2\xe0\x5a\x6b\x60\xe0\xfc\x2e\x53\xd6\xd3\x27\x9d\x92\xe5\x24\xd0\xff\xe7\x49\x12\x6a\x5a\xf8\x48\xdb\x5c\xf9\xbc\x54\x63\xb5\x26\x89\x75\xe1\x55\xec\x0d\x27\x39\x9c\x17\xbc\x10\x13\x4b\xba\x42\x74\x85\x14\x0f\x3b\x5a\x72\x10\x97\x76\x2a\x66\xfb\x76\xf3\x53\x3d\x22\x7f\x65\x79\xd1\x7b\x22\xcf\x58\xed\xe2\x4e\x34\xe0\xcc\x7b\xdb\xda\x1e\xe9\x08\x28\x25\x60\x33\xdb\x53\x36\xe2\x59\x66\x51\xfe\x54\x2b\xb3\x84\xe3\xe1\xab\xa4\xa3\x9d\x1b\x11\xdd\xa5\x5a\xaa\xd1\x67\x51\x85\xe2\x02\x16\x7b\xc1\x7c\x69\xee\x75\x38\xe8\x72\xac\xd8\x93\xd8\x91\x1c\xe0\x15\x16\x1a\xea\xc9\xd8\x38\x73\x5a\xd5\xdd\xcb\xee\xa9\xfd\x17\xc2\xdf\x0d\xcf\xe0\x8b\x6d\x89\x0f\xd5\x6e\x15\xde\xe2\xd8\xa9\x30\x81\xf2\x46\xf6\xd7\xc0\xc1\xe6\xac\x42\x61\xd8\x3a\xa4\xe0\x82\xfc\xcd\x33\xf4\x9b\x67\xe8\x7f\xb8\x67\xe8\x53\xba\x85\x00\x1b\xf3\x9c\x3e\xa1\x9e\x00\xf9\x01\xdb\xd1\xd5\x3a\x3a\xc7\xb1\xd5\x3a\x9e\x04\xb2\xdb\x41\xa6\x63\x13\xe8\x6f\x89\x30\xcc\xf8\x2c\x79\x74\x27\x54\x67\x8c\xde\xd2\x17\x75\x2a\xa0\x3e\x2d\x82\xa5\x8d\x7d\x29\xf8\x75\x3f\x94\xc5\x43\x9d\x88\x34\xb8\x8d\x10\xc4\xec\x13\xb0\x3d\x4d\xc7\x8f\x00\x34\xa6\x33\x47\x6c\x9d\x53\x16\x1e\x06\x23\x91\x26\x09\xc1\x52\x35\x2a\xe8\xa1\x98\x38\x5b\xf1\x22\xd5\x3a\x69\x85\xc6\x3d\xe9\x00\x36\x12\x65\x86\x0e\xde\x19\x1a\xa3\x79\x08\x18\xb3\xa3\xe8\x93\x2e\x7c\x8a\x06\xe6\x63\x80\x16\x06\xac\xa6\xb8\x84\x5c\x4a\x3f\x1c\x81\xc0\x21\x77\x0e\x17\xc2\x88\x2d\x45\xc4\x41\x7a\xd5\x82\xf7\x22\xee\xb2\x4f\x42\x52\xa4\x46\x3a\x48\xde\xac\xa7\x23\x6a\x09\xe5\x2e\x64\x9b\xf0\xc5\xd8\xcd\x55\xb3\x10\x2c\xb4\x1c\x5b\x6e\x91\x24\x96\x76\x71\x9f\xa4\xb0\xe5\x98\x5e\x80\xfe\xe1\xb0\x1b\xae\xf5\xdc\x39\xa3\x82\x4e\xa0\x9c\xe1\x07\xe9\xf7\x90\x8e\xb3\x1d\x88\xdc\x99\xab\x99\x53\x9a\xf5\xd8\x2f\x87\xdc\xc3\x70\x29\x62\x16\x1b\x53\x83\x5c\x8e\xfe\xe5\x32\x61\x79\x19\x6d\x80\xad\xb2\x7a\x4e\x85\xe7\x56\x73\xc7\x4e\xe6\xca\x3c\x88\xc0\xd5\xb2\xe5\x90\x17\xff\x60\x8c\xd5\x5c\xfe\x55\x38\x78\x16\x91\x77\x85\x88\x2c\x7c\x38\x69\xd5\x8a\x5e\xb3\xc4\xa1\x08\xb0\xf0\x98\x92\x32\x8d\x79\x21\xa6\x73\x8f\xb6\x91\xe8\xe9\xb4\x28\x0f\x32\x99\xf3\xb0\x63\x21\x8e\xb1\x76\xd2\x26\x72\x25\xa2\x5d\xd4\xd0\x01\xea\xa7\x89\xf8\xed\xd9\xf6\xeb\x7a\xb6\x21\xcb\x2e\xe6\x0c\x8e\x19\x5a\x6a\xea\x95\xff\xf9\x61\x83\x2b\x58\xd0\x92\x7c\xc4\x38\x7f\xc2\x67\x67\x8b\x0d\x3c\xce\x9e\x1f\xfc\x0e\xea\xbf\xce\xfc\xc3\xd6\x5f\xd6\x01\x05\x42\xc3\x2c\x0c\x83\x8b\x45\xb8\x74\x8c\x41\x3b\x38\xac\xdf\xcd\x32\xf3\xab\x02\x27\x0d\x79\xb8\x1a\x8b\xdb\xc1\x95\x2e\xac\xa5\xad\x04\xde\x77\x3d\x16\x77\xc0\xea\xce\x8b\x17\xb9\x1b\xf5\xea\x09\x68\xb1\xff\x33\xb5\x3b\x28\x01\x73\x97\x8a\x45\x99\x25\x07\xc1\x8d\x6f\xaf\xce\x8f\x9d\xb5\x01\x96\x73\xa7\xee\x51\x51\x13\x67\xb6\xaa\xc0\x22\x26\x38\x68\xa4\x13\xb6\x2c\x57\x2b\xd0\x2f\x21\x60\xa8\x3d\x8c\x40\x1b\xbe\xcc\x0b\x7b\x9f\x20\xd3\x0c\xcf\x8b\xb9\xd2\x4a\xb0\xf9\x17\xc7\xf3\x2f\xcc\x55\x96\xf1\xa8\x10\x19\x92\x0c\x24\x3c\x2f\x58\x2e\xd6\x60\x6a\x51\xa5\xb7\x57\xe7\x90\x95\x58\x6c\xb0\x38\xf7\x64\xc5\x7c\x4f\xe4\x7c\x06\xad\x1f\x20\xa8\x56\x81\xe6\x15\xb4\xfd\x25\xcf\x99\x54\x73\xf5\xd1\x14\x71\xbc\xd6\x7a\x9d\x88\xa9\x9d\x90\xe9\x5b\x72\x3d\x7e\x7c\x85\x2d\x80\x9f\x87\xb0\x7e\x73\x21\x72\xa5\x95\x8c\x78\x02\x09\x39\x73\x05\x56\xf3\xc4\x74\x06\x5c\xa3\xf3\x2f\xa6\xf3\x2f\x18\x84\x4f\x0b\xc6\xa3\x48\xa4\x85\x88\x51\x5c\xf4\x4c\xb1\x14\xf0\x8b\x91\x98\xb0\x42\xf0\x6d\x6e\x29\x9d\x59\x6a\xde\x98\xf0\x34\x64\x52\x11\xd2\x69\x29\x15\xcf\x76\x08\x66\x42\xb9\x70\x4a\xfe\xd8\xcd\x95\xf8\x05\xe8\x3f\x25\x30\x80\x96\xb9\xa3\xa5\x21\x61\x02\xd3\xe5\x99\xda\x4d\xd9\xf7\xc8\xd0\x80\x14\xa8\xb7\x57\xe7\x96\xde\x88\x72\x40\xe7\x2a\x8f\x36\x62\x2b\xd8\xc7\x4d\x51\xa4\x1f\x27\xf8\xbf\xf9\x47\x88\x38\x2a\xcd\xf0\xd3\x09\x33\x53\x64\x0c\x55\x8b\x97\x4f\x76\xa0\xe2\x5a\xa6\x24\xf9\x3e\x57\xc0\xc5\x9e\x85\xe8\x5e\x33\xda\x50\x63\xf0\x04\xaf\xe0\xc2\xcd\x29\x0e\xf2\x8a\x6f\xcc\xe0\xfc\x5f\x76\xb6\xf2\x55\x9a\x01\xb4\xea\x5e\xae\x55\x60\x90\xe4\x90\xb2\x35\x35\x3f\x98\x29\xf6\xfd\xcd\xcd\x25\xfb\xee\xf4\xc6\x1a\x3b\xb7\x57\xe7\xb8\x2e\x80\x4e\x85\x71\xf6\xe7\xfa\x14\xdf\xec\x52\xf1\x97\x3f\xff\x65\xae\x98\x55\x09\x57\x76\xa4\x71\x47\x4f\x90\x12\x16\xf0\x4e\x10\x98\x05\x2a\x67\xa8\x0f\x25\x77\xa8\xf9\x19\x5a\xe7\x0f\xe4\x2d\x80\x3b\x2a\xd1\xfa\xae\x4c\x9d\x9b\x3b\xb4\xc3\x4c\x85\xb7\x57\xe7\x50\x3a\xd0\x29\x15\x1b\x50\x30\x13\xce\xfb\x02\x13\xcf\x6d\x63\xcc\x7f\xdf\x6b\x19\x33\xae\x76\xe6\xb7\x58\x34\x2c\xcb\x4c\xac\x74\x26\x26\xf6\x9b\xa6\x00\x5e\xc8\xa5\x4c\x64\xb1\x83\x53\xca\x2a\xcb\xa7\x96\x23\xdf\x14\x60\x5e\x33\x04\xf0\x36\x0b\x0c\x85\x64\x5f\xde\xe6\x21\x02\x1c\x26\xcd\xa9\x13\xe2\x43\xc7\xfc\x76\x99\x09\x7e\x67\x56\x37\x95\x30\x7d\x45\xaa\xad\xe2\x0d\xde\x31\xab\x52\x45\xb8\x34\x4c\x1b\x68\xf5\xd3\xcb\x29\xd9\x05\xf2\xfd\x36\x5c\xbe\x5a\xc9\x48\xf2\x84\x4e\x8e\x65\xb9\x02\xd9\x18\x9e\x93\x64\x11\x82\x0f\x4d\x21\xf0\xca\xb0\x92\xf9\xb8\xa0\x96\x62\x2d\x11\x70\xfc\x20\x8b\x0d\xe6\x15\x4c\x71\x9e\x79\x2a\xf3\x69\xa4\xb7\xb0\xdf\xae\x61\x29\xe5\xf4\xe8\x05\x1c\x78\x6d\x9d\xb3\x97\x16\x6a\xb7\x4d\x8b\x1d\xad\xbd\x57\x6c\x2b\xd7\x9b\x02\x84\x5c\xa0\x76\x80\x44\xc8\x6d\x9a\xc0\xa3\x8f\x22\x8c\x16\xef\x9b\x8b\x2d\x57\x85\x8c\xba\x62\x4a\xad\xa2\xdc\xc3\x30\x9e\xcb\x5d\xd1\xef\xc7\x7b\x4f\x3c\xfb\x1c\x29\xf4\x83\x13\x99\xd5\x0f\x64\x3a\x03\x41\x5e\x26\x20\xf0\xaf\x8b\xbe\xee\x7b\x42\x7d\x9c\xa9\xdd\x47\x4f\x42\xca\x55\xa0\x7d\xd5\x53\xbb\xdd\xff\x3c\xd1\x34\x6b\x8c\xcf\x15\xa0\x3a\xcd\x81\x41\x72\xb0\xbd\x77\x8c\xbb\x52\xcc\xcc\x5e\xda\x45\x93\xc8\x25\xd4\x4d\x67\x45\xce\xf2\x32\x85\x7c\x82\x42\xb3\x94\x47\x77\xc7\xa5\x32\xff\x63\x0e\x43\xdc\xee\x79\x48\x4e\x34\x57\x7a\xc5\xca\x02\x37\x8e\x5d\xc2\xe0\x14\x09\x5c\x01\xfe\x81\xb6\x15\xc5\x46\xc7\x2e\x2f\xcc\x94\x09\xe3\x67\x5a\x74\x4a\xf4\xd2\xaf\xdf\xb0\x4b\x53\xa1\x59\xc4\x54\x37\x77\xdd\x97\x8a\x9d\xfc\xcb\xbf\xc0\xf7\xcd\xe0\xbe\xd3\x9a\xad\xb4\x66\xdf\xb0\xe9\x74\xfa\x9f\xf8\x37\x53\x28\x57\x3b\xfa\x17\x57\xbb\xa9\x29\xee\x5d\xa6\xb7\x2f\x57\x5a\xbf\xa2\xbf\x83\x6c\xb2\xf9\x0f\xb9\x62\x2f\xcd\x97\x6e\xa1\xaa\x1b\xfd\x72\x5e\x7e\xf9\xe5\x57\xff\x6e\xbe\xfa\x8a\xfd\x0d\xbf\x13\x7c\xfd\x1f\x61\x53\xbf\xda\xd3\xd4\x3f\xf0\x7b\x3e\xa4\xad\xec\x1b\xb8\x6b\x4c\x01\xbd\x6d\x94\xf9\xcb\x77\x5a\x4f\xe1\xf5\x1f\xb6\x0e\x8b\x35\xdf\xc0\x56\x04\xdf\xfa\xcf\xa0\xd9\xcc\xb6\xfb\xf7\x7b\xda\x8d\xa8\x7a\xd7\x72\x2c\xfe\x9d\xd6\x2f\xa7\x53\x73\x6e\xd1\xb8\x62\xab\x5f\xbe\xaa\x0e\x34\x74\xa0\xd9\x7e\xf3\xf1\x19\x36\xff\xed\xe9\xf5\xc9\xd5\xd9\xe5\xcd\x87\xab\x57\x6f\x6c\x0f\xfc\x0c\x04\xbf\x67\x56\xdc\xda\x35\xfc\x5f\xf7\x34\xfc\x3b\x6d\xdb\x0c\x8d\x7e\xf3\x0d\xc3\xd9\x4c\x97\xd3\x77\x5a\xff\x6d\x3a\x9d\xfe\x83\x3e\xe6\x6a\x37\x31\x17\x93\xf9\x4e\x8a\x47\xf9\x7b\x9e\xe5\x1b\x9e\x98\x3e\x05\x6d\x70\x9d\x68\x2d\xd1\x16\x27\x57\xb5\xc2\x6e\xd5\xd6\x17\x07\x95\xc1\xc4\xc2\xb7\xfe\x9f\x6f\x98\x92\x89\x9f\xbe\xa0\x0e\x98\xa7\x1b\xa0\x96\x88\xee\xdc\x76\x71\x2a\x9d\xcb\x1d\x4b\xeb\x1b\x17\xf3\xce\x76\x56\xa1\xc0\x1c\xf7\x73\xf5\xa2\xe5\x44\x3f\x36\xa6\xdd\x14\x3e\x30\x17\xd4\x0b\xab\xdf\x6e\xaf\x05\xa7\xac\x85\x23\x0b\x81\x68\xdc\xad\x8a\x72\xd4\xda\xec\x43\x77\xe1\x05\x64\x55\x60\x76\xbe\x38\x7e\x41\x89\x42\xbe\x8a\x2a\x91\xfc\xfc\x8b\x95\xd6\xd3\x25\xcf\xa0\x75\xbf\x1c\xef\xa6\x7f\x9d\x7f\x81\xfd\x41\xe3\x03\x0d\x23\x28\x7c\xfe\x05\x7c\x0a\xcb\x61\xae\xfe\x70\xfd\xe1\x62\xae\xbe\xf9\xe6\x9b\x6f\x70\xb4\xcc\xbf\x5b\x62\x2f\xe6\xba\x82\xe3\x16\xed\x94\x32\xb7\x92\x92\x62\x5d\x26\x3c\x9b\xab\xf6\x70\x4d\x2c\xfc\xa1\x39\xf1\xc1\x1b\x5a\x67\x13\xab\x6e\x01\x22\x65\xf6\x8c\x43\xdf\xe4\xc7\xff\xd7\x34\xf9\x23\x99\x88\xee\x90\x0f\x87\x60\x6a\x17\xf3\x1b\xbb\x54\xcd\x60\x9b\xf5\xeb\xed\xac\x95\x4c\x04\x6d\x5c\xbb\xb8\x2f\x45\x96\x6b\xe5\xd7\x0c\x3d\x08\x80\xdb\x0c\x02\x00\xec\x1b\xf6\xfa\x3f\x6b\x9f\x9a\x79\xb0\x1f\x7e\x55\x39\x09\x18\xf3\x45\xcd\xbf\x80\x56\xcf\xbf\x78\xc3\xe6\x5f\xb4\xad\x9b\x6a\xc3\xa6\xd8\x94\xf9\x17\x13\x5f\x00\x34\xe3\x82\x6f\xb1\x90\xf2\xcb\x2f\x7f\x1f\x61\x13\x30\x75\x2d\xf8\xa6\x69\x52\xf7\x17\x83\x26\x9e\xd5\x42\x67\x76\x20\x6c\x0a\xe4\x83\x48\x92\xa3\x3b\xa5\x1f\x50\xe9\x1b\xe2\x44\x94\xa5\xcc\x70\x79\x54\x27\x97\xb4\xc9\x6a\x33\x6e\x93\x36\x5d\x35\x4e\xde\x0e\x26\x74\xae\x3e\xc2\xd2\xb1\x33\x4a\x74\x44\x40\x07\xea\x6a\x82\x47\x0d\xad\x04\x9b\x63\x41\x0b\x61\xae\xa0\x18\x37\xe7\xec\x25\x00\xbf\xa8\x2b\x0d\xcb\xda\x3e\x9e\xfe\xf2\xe7\xbf\xbc\x7a\x73\xc8\x3c\x55\x8b\xab\x4c\x15\xf4\x07\xcb\x78\x3d\xfd\xea\xf5\x57\xf9\xfc\x0b\x1a\xf5\xf6\x27\xf6\xb9\xcc\x8b\x1f\x6b\x16\xd8\x23\xe4\xc6\x8d\xe1\xf0\x5c\xc1\x0b\xdb\x54\x6c\xe6\xd0\xa0\xc5\x55\x35\xac\xa0\x57\xd6\xad\x03\x8f\x33\x2b\xc4\x6e\xda\x3d\xca\xbc\x73\xe3\x85\x8f\x2d\xf6\x90\xf1\x34\x15\x99\xf5\x95\x37\xc2\x19\xa0\x6a\x0e\xb5\xd8\xa3\xbf\xed\x30\x33\xcb\xa6\x56\x34\x7c\x0d\x86\x6e\xda\x3e\x73\x17\x65\x92\x74\xce\xdc\x7e\xb1\xe4\x8b\xdb\xf3\xf3\xc5\x8f\xb3\xf3\xdb\x53\xdb\xfd\x56\xf1\xe1\xe0\x6b\x9d\x63\xe2\x5a\x42\x63\x82\xb8\xaa\x02\xb0\x54\xe5\x56\x64\x96\x29\xcc\xf7\x1a\x71\x24\x65\x92\x54\x85\xa9\xe7\xea\x23\x95\x03\xc7\x40\xa9\xa4\x35\x53\x7a\x07\xae\x5a\x3f\x7c\xed\xa3\x29\xfc\x23\xfe\xf6\x88\xf9\x4e\xbc\x61\x17\xae\xd6\x8e\x71\x25\xc2\x89\x03\xb6\x03\xe6\xdb\x76\x6d\x87\xa7\x96\xde\x7f\xdc\xf6\xb8\x55\x20\xfa\x65\x4e\x5e\x54\xcc\x7f\x92\xdd\x81\x63\xf7\xb1\x0a\x05\x77\xee\xd2\x18\xa3\x86\x50\xee\x04\x05\xd3\xf3\x82\x38\x8b\x71\xcc\xe6\x0a\x0f\x62\xd3\xa6\x42\x77\xb7\x89\x9d\x51\x04\x29\xe1\x6a\x5d\xf2\xb5\xc8\x27\xcc\x56\x3e\x57\xf6\x75\x6a\xdf\x3a\x0e\x98\x03\x8c\xac\xb5\x25\x54\x4b\x01\x96\x6a\xae\xa8\x4f\x70\xc3\x52\xf1\x98\x8e\xfa\x87\x6b\xd7\x1d\xca\xfb\xc6\x82\x48\xf3\x5d\xcd\x15\x4e\x2e\xfa\xc6\x2c\xd8\x10\xcc\x8e\xe6\xdd\xc4\x01\x1e\x8c\xef\xba\x98\x15\x7a\x0d\xb0\xc7\xb9\x72\x2c\x58\x08\xce\xb0\xef\x35\xaf\x0d\x8a\x4d\xda\x7f\x9e\xd8\xc9\xb0\x7b\x82\xda\xd6\xbe\xea\x0f\xbe\x03\xcc\x86\x5b\xb4\xbe\xe5\xfb\x97\xad\x3f\xc6\x06\x02\x72\x78\x70\x70\x74\x51\x23\x02\xf5\x59\x7b\x6b\x6c\xbf\xf0\x3b\x9d\xd9\xa3\xba\x5c\x26\x23\x9a\x84\xdf\xef\x6d\x14\x1e\xc9\xfd\x8d\x1a\xe0\x91\xbe\xaa\x6d\x2d\xb3\x4c\xfb\xaa\x5d\x6a\xdd\x31\x2f\x4f\x88\xd9\xad\x34\x8a\x7e\xb0\x6f\x30\xca\xa8\x78\xcc\x7a\x19\xc0\x07\x54\x1f\x22\x7b\xfa\xf4\x35\x28\x91\xf9\xa3\x9a\xe3\xed\xa7\xc1\x2d\x72\x16\x02\x5d\x76\xa3\x4e\x58\xba\xe7\x2a\x07\x6c\xc7\x31\x69\x9f\x29\x98\xde\x22\x24\x1e\x2f\x66\xf3\x4c\x60\x13\x99\xf5\x3f\x71\x8b\x68\xe2\x67\x6e\x02\x8d\x8c\xca\x2c\x37\xc7\x25\x9d\x77\x74\x6a\xeb\x8c\xf1\xb9\xb2\x6c\x30\xf6\x38\x9e\x59\x7f\x70\xe6\xfe\x8a\x1c\x4b\x29\x4a\xd6\x41\x50\xa8\x00\x2f\x39\x9d\x86\x73\x75\xcf\x33\xc9\x15\x60\x9a\x97\x39\xe8\x0d\xc3\x93\x6e\xc7\xe8\x03\x47\xc0\x91\x87\x4e\xe6\x3d\x67\x5e\xcd\x0c\xa8\xdc\xf3\xbf\x33\xff\xf7\x8f\xdf\xfd\xff\x01\x00\x00\xff\xff\x8b\xc2\xd9\xfd\x45\xd9\x04\x00") +var _adminSwaggerJson = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\xfd\x7f\x73\xe3\x36\x96\x2f\x8c\xff\xbf\xaf\x02\xd5\xfb\xad\xea\xc9\x8c\xed\xce\x4c\xe6\xee\x77\xca\x5b\xb7\x9e\x47\xb1\xd5\x1d\xdd\xb8\x6d\xc7\x96\x93\x4d\x5d\x6f\x29\x10\x09\x49\x18\x53\x80\x02\x80\x76\x2b\x5b\xf3\xde\x9f\xc2\x01\x40\x82\x14\x29\x51\x3f\x4d\xb9\x31\xb7\xea\x6e\xc7\x22\x41\xe0\xe0\xe0\xe0\xfc\xfc\x9c\xff\xf9\x37\x84\xde\xc9\x17\x3c\x1e\x13\xf1\xee\x1c\xbd\xfb\xdb\xd9\xb7\xef\x4e\xf4\xdf\x28\x1b\xf1\x77\xe7\x48\xff\x8e\xd0\x3b\x45\x55\x42\xf4\xef\xa3\x64\xae\x08\x8d\x93\x0f\x92\x88\x67\x1a\x91\x0f\x38\x9e\x52\x76\x36\x13\x5c\x71\x78\x11\xa1\x77\xcf\x44\x48\xca\x99\x7e\xdc\xfe\x13\x31\xae\x90\x24\xea\xdd\xbf\x21\xf4\x2f\x18\x5e\x46\x13\x32\x25\xf2\xdd\x39\xfa\xbf\xe6\xa5\x89\x52\x33\x37\x80\xfe\xb7\xd4\xcf\xfe\x37\x3c\x1b\x71\x26\xd3\xc2\xc3\x78\x36\x4b\x68\x84\x15\xe5\xec\xc3\x3f\x25\x67\xf9\xb3\x33\xc1\xe3\x34\x6a\xf8\x2c\x56\x13\x99\xaf\xf1\x03\x9e\xd1\x0f\xcf\x7f\xfd\x80\x23\x45\x9f\xc9\x20\xc1\x29\x8b\x26\x83\x59\x82\x99\xfc\xc0\xc5\xf8\xc3\xff\xd0\xf8\x8c\x8b\xf1\xbf\xe0\x1f\x33\xc1\xff\x49\x22\x65\xfe\x23\xe6\x53\x4c\x99\xf9\x37\xc3\x53\xf2\xaf\x6c\x50\x84\xde\x8d\x89\xf2\xfe\x53\x2f\x3d\x9d\x4e\xb1\x98\x6b\xf2\x7c\x24\x2a\x9a\x20\x35\x21\xc8\x7c\x14\x39\x7a\xf1\x11\xc2\xe8\x5c\x90\xd1\xf9\x6f\x82\x8c\x06\x8e\xea\x67\x86\xda\x57\x30\xb5\xdb\x04\xb3\xdf\xce\x2c\xcd\x60\x64\x3e\x23\x02\x16\xda\x8b\xf5\xe8\x9f\x88\xea\xc0\xb0\xf9\xf3\x7f\xf3\x1f\x17\x44\xce\x38\x93\x44\x16\xe6\x87\xd0\xbb\xbf\x7d\xfb\x6d\xe9\x4f\x08\xbd\x8b\x89\x8c\x04\x9d\x29\xbb\xb3\x1d\x24\xd3\x28\x22\x52\x8e\xd2\x04\xb9\x91\xfc\xd9\x98\xb5\xea\x6d\xc6\x0b\x83\x21\xf4\xee\xff\x27\xc8\x48\x8f\xf3\xef\x1f\x62\x32\xa2\x8c\xea\x71\xa5\xe1\xa6\x7c\xba\xef\x0a\x6f\xfd\xeb\xdf\xaa\xfe\xfd\x2f\x6f\x45\x33\x2c\xf0\x94\x28\x22\xf2\xfd\x37\xff\x2b\xad\x45\x6f\x92\xfe\xb8\xd9\xd1\xf2\xa4\x4b\x2b\xbd\x81\x7f\xe1\xe4\x04\x71\x31\x46\x4f\x64\x8e\x80\xa5\x48\x8c\x14\x87\xbd\x13\x44\xf2\x54\x44\x8b\xab\xa7\xf0\xbe\x66\xb3\xf2\x2f\x82\xfc\x9e\x52\x41\xf4\x36\x29\x91\x92\xd2\xaf\x6a\x3e\x83\xe9\x49\x25\x28\x1b\xfb\x44\xf8\xd7\x49\xa3\x45\x59\xee\x5c\xb1\xb0\x6b\x3c\x25\x9a\xd3\xf4\x1a\xec\x1b\x85\xf5\xa0\x21\x49\x38\x1b\x4b\xa4\x78\x7b\x96\x66\xce\xda\x1a\x2b\x33\x2f\xd4\x2e\xec\x91\x75\xdc\x23\x11\x66\x68\x48\x90\x16\x37\x34\x26\x82\xc4\x08\x4b\x84\x91\x4c\x87\x92\x28\xf4\x42\xd5\x84\x32\xfd\xdf\x33\x12\xd1\x11\x8d\x1c\xcd\xda\x43\x1b\xf8\xe7\x72\xca\x3c\x48\x22\xf4\xc4\x9f\x69\x4c\x62\xf4\x8c\x93\x94\xa0\x11\x17\x45\x3e\x7e\x64\xfd\x89\xa6\xc3\x74\x48\x19\xc8\x13\x4d\x4b\xc7\x21\x7f\x71\xe4\xfa\x0b\xd2\xdf\x43\x29\xa3\xbf\xa7\x24\x99\x23\x1a\x13\xa6\xe8\x88\x12\x59\x1e\xed\x2f\xdc\x1e\x21\x74\x8a\x34\x9d\x89\x50\x40\x6f\xce\x14\xf9\xa2\x24\x3a\x45\x09\x7d\x22\xe8\xfd\x15\x95\x0a\x75\x6e\x7b\xef\x4f\xd0\x7b\x23\x04\x10\x88\xdf\xf7\x07\xa0\x70\xf6\xef\xff\xf6\xe4\x89\xc2\xe3\xb2\x24\x79\xd7\xd1\x22\xea\xde\xdc\x7e\xf9\x08\xff\xfd\x6f\xfe\x38\x76\xbf\x56\x5f\x29\xe6\x3e\xc9\x2f\x13\x7b\x93\x34\xbd\x3f\x80\x60\xc5\xab\x43\xea\xbd\xda\xf6\xe6\xd0\xe3\x96\xaf\x0e\x79\x64\x77\x87\x5e\xc3\xbe\xef\x8f\xb7\x77\x79\x6c\x73\x73\x60\x05\x47\x1a\x53\x66\x24\x40\x26\x10\x84\xd4\x42\xc0\x4d\xbb\x25\x2b\xdd\xe6\x22\xf1\x56\xe6\xdd\x25\xee\x8a\xf0\xa8\xd2\xc2\x75\x27\x74\x4a\x57\xed\x6f\x8f\xc5\x5a\x65\xb6\x92\x9c\xa5\xd3\x21\x11\x9a\x0c\x8e\x59\x61\xb5\x43\xcd\xbc\x2a\x15\x8c\xc4\x0d\x96\xf9\x7b\x4a\xc4\x7c\xc9\x3a\x47\x38\x91\x75\x0b\xa5\x4c\x11\x6d\x9f\x94\x7e\x1e\x71\x31\xc5\xca\x3e\xf0\x1f\x7f\x5f\x97\x10\x8a\x3f\x91\x55\xfb\xdf\x33\xbb\x19\x61\x09\x6c\x30\x4d\x13\x45\x67\x09\x41\x33\x3c\x26\xd2\x52\x24\x4d\x94\x3c\x81\xc7\xb4\x4d\x44\xc4\x69\x76\xbd\xc2\x17\x9c\x5a\x91\x4a\x73\xe8\x47\x99\xce\xcf\xc8\x17\x05\x23\x3d\x32\x50\x2c\x80\x44\xfe\x75\xb9\x07\x52\x6e\xc6\x33\x92\x0b\x35\x18\xce\xcf\x9e\xc8\xc2\x77\x6b\x39\x07\x33\x84\x95\x12\x74\x98\x2a\xa2\xd7\xad\xc7\x70\x12\x0f\x04\xbe\xd1\x3e\x9a\x88\x86\xd7\x5b\x70\x4c\x05\x89\x60\x6d\xeb\x1c\x98\xec\x2d\xbd\x6e\x2d\xef\xe7\x66\xf5\x5a\xfc\x6b\x65\xab\x82\x02\xd9\x96\x3f\xb2\x47\x86\x4e\xd1\x65\xf7\xfe\xa2\x7b\x7d\xd9\xbb\xfe\x74\x8e\xbe\x9f\xa3\x98\x8c\x70\x9a\xa8\x13\x34\xa2\x24\x89\x25\xc2\x82\xc0\x90\x24\xd6\x0a\x95\x9e\x0c\x61\x31\x65\x63\xc4\x45\x4c\xc4\xfe\xc8\x58\xfa\x95\xb0\x74\x5a\xba\x29\xe1\xef\xf9\xec\x4b\x6f\x68\xfd\x29\xfb\xa9\xf0\xcb\x7f\x2f\x10\x18\x56\xac\xbf\xed\x8d\xf6\xaa\x1a\xdb\x11\xdb\xfd\xc7\xa5\xba\x1d\xc0\xec\x0f\x16\x72\xb0\x90\xab\x29\x13\x2c\xe4\xad\x28\xbc\x7f\x93\x68\xc7\xda\xc0\xe1\xaf\x91\xe3\x30\xf7\x8f\xeb\xca\x38\x84\xb5\x1f\x6c\xe3\x60\x1b\x07\xdb\x38\xd8\xc6\x45\x52\x05\xdb\x38\xd8\xc6\xad\xb3\x8d\x1b\x6c\x63\x50\xd4\x7c\x45\x2d\x9a\xd0\x24\x16\x84\x99\xb0\x8c\xc2\xf2\x69\x40\xbe\x90\x28\xd5\x14\x18\x68\xc3\x81\xc7\xa4\xf8\x97\xc2\x7f\x40\x1c\xa7\xf8\x96\xdc\x60\x98\x5c\x33\x5c\xfb\xd5\xcc\x17\xb1\xf6\x9b\xe0\xb9\x68\xf6\x1e\xfc\x85\xc6\x95\x4f\xc3\x5f\x56\xac\xc1\x3d\xb3\x64\xb2\xee\x91\xda\x59\xb9\x07\xac\x02\x5c\xf9\x8c\x20\x4a\xcc\x07\x58\x29\x32\x9d\xa9\x35\xbd\x32\x18\x25\x5a\xcd\x5e\xa6\x56\x5f\xf3\x98\x74\xdd\xf7\x7e\x43\x46\xbb\x27\x31\x1a\xce\xed\xb1\x18\x11\x41\x58\x44\xea\x47\xe8\x63\xf9\x94\x8f\xb0\x4a\x37\x2f\x7c\x4f\x7e\xe4\x42\xbf\x7e\x14\xe1\xb8\xc2\xcc\x0f\xa1\xa3\x6f\x72\x72\xdf\x58\x08\x6f\x53\xa9\xf3\xe6\x7c\x61\x1b\xca\xd0\xe0\x39\xdb\x9e\x92\x4d\xfd\x6c\x5c\x20\x39\x97\x8a\x4c\x57\x7a\xdc\x8e\x87\x10\xf6\x92\x6c\xeb\x84\x4b\xf7\xf4\x57\x70\xea\x8b\x5a\x47\x38\xde\x6b\x90\x6c\x57\xfe\xf2\xb6\xaf\xd3\xa5\x2c\x2f\x5f\xea\xbd\xdb\x3e\x2f\x5c\x77\x14\xcb\x2c\xe8\xc3\xbb\x9e\xe4\x9e\x3c\x50\xb5\x7b\xe5\xa8\x3d\x80\x09\xac\xf0\x3d\x14\x23\x2e\xd9\xf9\xd3\xaf\xfa\x4e\x3b\xe3\xa1\x55\x13\x2a\x3d\xff\x25\x8a\xb8\x30\xea\x70\x6c\xcf\xbb\x71\x3f\x74\xfa\x9d\xfb\x6e\xff\x1c\x75\x50\x8c\x15\xd6\x07\x5c\x90\x99\x20\x92\x30\x05\xae\x1d\xfd\xbe\x9a\xa3\x29\x8f\x49\x62\x9c\x10\x1f\xb5\xf6\x8f\x2e\xb1\xc2\x17\x58\xe1\x84\x8f\xcf\x50\x07\xfe\x53\xbf\x4c\x25\xc2\x89\xe4\x08\x3b\xb6\x22\xb1\x1b\x02\xb3\xd8\x89\x16\x8c\x22\x3e\x9d\xd1\x24\x8b\x36\x65\xfe\x36\xca\x62\xfa\x4c\xe3\x14\x27\x88\x0f\xb5\x54\x91\x67\x8f\xac\xfb\x4c\x98\x4a\x71\x92\xcc\x11\x4e\x12\x64\x3f\xeb\x1e\x40\x72\xc2\xd3\x24\xd6\xe3\xba\x59\x4a\x3a\xa5\x09\x16\x5a\xa7\x35\xb3\xbd\xb1\x63\xa1\xfe\x84\x64\x73\x85\x79\x69\x6a\x4e\xf1\x13\x91\x88\x2a\x34\xe3\x52\xd2\x61\x92\x9f\xf9\x87\x1e\x82\x79\x5f\x5c\xf5\xc0\xc5\x13\x29\xc4\x8d\x0c\x75\x1f\xb7\x2e\x3d\xf7\xc5\x29\x66\x8c\xc0\x87\xb9\x9a\x10\x61\x3f\x6f\x1f\x7e\x6d\x6f\xcd\xc3\xf5\xfd\x6d\xf7\xa2\xf7\xb1\xd7\xbd\x5c\x74\xd7\xf4\x3b\xf7\x3f\x2e\xfe\xf5\x97\x9b\xbb\x1f\x3f\x5e\xdd\xfc\xb2\xf8\xcb\x55\xe7\xe1\xfa\xe2\x87\xc1\xed\x55\xe7\x7a\xf1\x47\xcb\x56\x8d\x3d\x3f\xfe\xcc\x76\x76\xb6\x8e\xce\x29\x14\x9c\xfa\x6b\x6c\xfb\xae\x9d\xfa\x27\x6f\xd7\xab\x3f\xa2\x09\x38\x1d\x1a\x7b\xf4\x33\xaf\x91\x7d\x13\xcd\xb0\x94\x46\x0f\x34\x33\x38\x7b\x64\x9f\xb9\xd0\xe2\x7a\xc4\xb5\x44\xd4\xba\xa2\x12\x69\xa4\x28\x1b\x67\x2f\x9d\xa3\xc7\xf4\xdb\x6f\xbf\x8b\xae\x28\x7b\x82\x7f\x91\x36\x12\x27\x84\x3c\x42\xc8\xa3\x75\x21\x8f\x7f\xab\x78\x75\xf7\xe1\x81\xe0\xe3\x0f\x3e\xfe\xc3\xf9\xf8\x83\x8b\xdf\x9b\x43\xf0\x6f\x6f\x4b\x88\xe0\x00\x0b\xfe\xed\xed\x09\x11\xfc\xdb\x2d\x5d\x71\x38\xde\xc1\xbf\x1d\xfc\xdb\xc1\xbf\x1d\xfc\xdb\xc1\xbf\x1d\xfc\xdb\x5f\x8d\x7f\xbb\x85\x29\x4f\xc1\xc9\x1f\x9c\xfc\xc1\xc9\x1f\x9c\xfc\xc1\xc9\x1f\x9c\xfc\xc7\xe3\xe4\xd7\xda\xee\x07\xcf\xb7\xbf\x4f\xd0\x3f\xad\x5c\xb2\x59\xaa\x40\x95\xe4\xa9\xd2\xff\xd4\xdf\x07\x5e\x59\x02\x01\xd0\xcc\xa1\xfc\x89\xa8\xec\x41\xad\xda\x1e\x45\xae\xf8\x2f\x5c\x3c\x8d\x12\xfe\x92\xcd\xfc\x13\x51\x7a\xf2\x77\xf6\x2b\x01\x0c\x30\x80\x01\xa2\x00\x75\xb0\x6b\xa8\x83\x56\xb9\xa8\x0f\x2a\xdf\x8f\x5a\xa4\x07\x89\x1e\x84\x5f\x10\x7e\x75\xb4\x39\x4a\xe1\xd7\x6c\x69\x47\xe7\xbd\xd9\xbf\x4c\x2f\xfa\xbb\x8c\x60\xaf\xc9\xb9\xa9\x4d\xa9\xa9\xc9\x98\xf1\x12\x62\x0e\x72\x19\x14\xd3\x4e\x56\x5c\x08\x85\x87\x8f\xe5\x52\x28\x4c\xfa\xf0\x17\xc2\x57\x91\x19\x12\x12\x3f\x36\x24\xd4\x1b\xbe\x37\x5a\x95\xb6\x71\xf4\x21\x89\xfd\x5f\x6a\xce\xfb\xb4\x88\x33\x11\xae\xbb\x63\x72\x6b\xb5\xe9\xbe\x7b\x93\xfe\xad\x70\xa3\x37\x59\x66\xb8\xd1\x6b\xc9\x14\x6e\xf4\x55\xf3\x39\xdc\x6d\xb7\x11\xce\xd2\x3a\x45\x17\x6b\x54\x59\x34\x2e\xab\x58\x51\x47\x51\x59\x38\x51\x55\x29\xb1\x58\x1a\x51\x59\x0b\xb1\x5d\xf1\xc3\xa6\x57\x75\xf3\x72\x86\x4f\x44\x15\x1e\x3e\x9a\xab\xba\x30\xeb\xc3\x5f\xd5\xaf\x9e\xb6\xf3\x5a\x32\xec\xeb\x2b\xdd\x08\xb5\x1a\x7b\x24\xdd\x5b\xbf\xf2\xdb\x5b\x8d\xf1\x15\x94\x5f\x84\x7a\x8b\xb5\x68\xf4\xb6\x0a\x2c\xde\x6a\x45\xc5\x71\x96\x50\x84\x9a\x89\x50\x33\xb1\x53\x07\x6f\xe9\xd7\xaf\xaa\x66\xe2\x98\x8b\x24\xf6\xef\x9e\x08\x2e\x87\xe3\x73\x39\x04\x8f\x83\xfd\x5f\xb0\xbe\xd7\x5e\x79\x50\xed\x83\xf5\xdd\x64\xe5\xc1\xfa\x0e\xd6\x77\x0b\x8f\x68\xb0\xbe\x83\xf5\x1d\xac\xef\x60\x7d\x07\xeb\x1b\x05\xeb\xdb\x1b\xe8\xb5\x4a\x6b\xdb\x10\xdc\x3c\x2a\x9f\x43\xbe\xf4\x81\x3b\xe0\x8b\xe5\xa8\x05\xb1\xdb\xa4\x42\x15\xfe\xe5\x8c\xfa\x75\x11\x13\x6b\xad\xf4\xcb\x7c\xb2\x5d\x10\x92\xbf\x59\x51\xb1\xc2\x60\x5f\x78\xef\x28\x72\x04\x16\x66\x1d\xca\x52\x37\x55\x79\x5e\x49\x69\xd8\x13\x05\x8e\xe4\xfa\x5a\x7f\xa3\xde\xb0\x2d\x19\x6c\xc8\xed\x4b\xe8\x8e\xc6\x76\x3c\x1e\x9b\xf1\xb5\x75\x8b\x86\x8a\xc5\xce\x31\x97\x2b\x34\x09\xef\xfe\x5d\x85\xa0\x5c\x7e\x9b\x12\xf9\xf7\xa3\xd4\x28\x0e\x01\xa2\xfc\x26\xb5\x8a\xa0\x52\x04\x95\xc2\x0e\x15\x54\x8a\x37\xac\x52\x3c\xb2\xfe\x44\xd3\x61\x3a\xa4\x2c\xf3\xbe\x39\x0e\xf9\x8b\x23\xd7\x5f\x90\xfe\x1e\x4a\x19\xfd\x3d\x25\xc9\x3c\x3f\xbe\xb2\x3c\x5a\x86\xc4\x85\x4e\x91\xa6\x33\x11\x0a\xe8\xcd\x99\x22\x5f\x94\x44\xa7\x28\xa1\x4f\x04\xbd\xd7\x82\x19\x75\x6e\x7b\xef\x4f\xd0\xfb\x2b\x68\x08\x80\x66\x09\x66\xf2\x7d\x6b\x1c\x2d\x01\x06\x71\x5f\x30\x88\x01\x05\x31\xa0\x20\x36\x25\x50\x40\x41\x0c\x28\x88\xc7\x8b\x82\xb8\x33\xfb\x70\x43\x1c\xad\x57\xb1\x14\x8f\xd3\xf7\x1c\x2c\xc5\x60\x29\x06\x4b\x31\x58\x8a\xc1\x52\x3c\x16\x4b\xb1\x1d\x14\x0e\x66\x62\x30\x13\x83\x99\xb8\x43\xe2\x04\x33\x31\x98\x89\xc1\x4c\x7c\x93\x59\x49\x47\x69\x18\x1e\xc0\x28\x0c\x26\x54\x30\xa1\xec\x50\xc1\x84\x7a\xc3\x26\x54\x6b\x16\x74\x3c\xf9\x3b\xcd\xd6\x13\x52\x9e\x57\xeb\x13\x6f\x26\x13\xe9\xbb\xa3\x54\x23\x0e\xe1\x5f\x0e\x7a\x44\xd0\x23\xec\x50\x41\x8f\x78\xc3\x7a\x44\xeb\x5c\xb1\xad\xf1\xa5\x1c\xa3\x2e\x10\x9c\xcc\xcd\x09\x11\x9c\xcc\x8d\x49\x15\x9c\xcc\x4b\x88\x13\x9c\xcc\xc1\xc9\x1c\x9c\xcc\x6f\x32\xfd\x28\x58\x87\xc1\x3a\xf4\xff\x1e\xac\xc3\x60\x1d\x06\xeb\x30\x24\xea\x04\xd3\x30\x98\x86\xc1\x34\x0c\xa6\xe1\x2a\xe2\x04\xd3\x30\x98\x86\x5f\x97\x69\x48\x9e\x09\x53\x12\x7a\x21\xfa\x86\xd2\xbb\x19\x97\xf5\x06\x9e\x2f\x1d\x2a\x8c\x3b\x18\xb3\xd8\x93\x10\x50\xdb\x7e\x43\x13\x2c\x11\x8f\xa2\x54\x94\xce\x40\xd9\xbc\xbb\x10\x04\x2b\x02\x23\xe8\x17\x8f\xc1\xac\x5b\x5c\xee\xa1\x00\x88\x87\x3c\x5e\xe0\x76\x73\x10\xaa\x7e\x59\xae\x66\xed\x6c\xe9\xbf\xa7\xa4\x99\x55\xbb\x47\xa6\x86\x6a\x2b\xc3\x8c\x8b\xdd\xcf\x5e\x6c\x53\xfd\x1d\x73\xfd\x42\xaf\xfe\x8d\x38\x3f\x1b\x45\xbf\x7c\x14\x35\x55\xd5\xeb\x3e\xd4\x11\xa8\xde\xe4\x3d\xeb\xfb\x87\x36\x62\x5e\xfd\x9c\xd7\xed\x71\xcb\xce\x7a\x6d\xb3\xdf\xf6\x5e\x70\x47\x71\xc4\x5f\xef\x86\xab\xdd\xd7\x70\xc2\xbf\xba\x9b\x7c\x86\x05\x61\x6a\xd0\xb4\xc1\xe9\x8e\xcf\x7c\xa1\xd1\xc4\x46\x67\x1e\x46\x38\x9a\x33\xbf\xb8\xde\xc3\x9e\xf9\xc6\xbb\x1d\x24\xc1\x6e\x25\x41\xd5\xc6\xb7\x41\x12\xb4\xfb\x4c\x87\x23\x0d\xff\x0b\x4c\xbd\x1e\x53\x1f\x8f\x2d\x7a\x0c\x0c\xfe\xba\xa6\xe8\xab\x33\x79\x3b\xad\xb4\xac\xe7\x5b\x63\x16\xef\x0b\x3a\x1e\x13\x61\x3c\xcd\x91\x66\x45\x1b\xce\x5c\x52\xad\x98\x77\x39\x5b\xc9\xd6\xd9\xa3\xc7\xc0\xd2\xd9\x64\xcd\xdc\xbf\x1a\x5e\x5e\x58\x77\x4b\x98\xb8\x08\xe4\x24\x48\xc4\x9f\x89\x68\xcc\xd9\x77\x04\xd8\x19\x84\xf7\x4c\x90\x67\xca\x53\x99\xcc\x4f\x45\xca\x90\xbb\x09\x50\xf6\x2d\x93\x65\xf3\x42\x93\x04\x71\x96\xcc\x91\x54\x58\x28\xf7\x33\x1b\xa3\x91\xe0\x53\x38\x22\x09\x96\x0a\x3d\x31\xfe\xc2\xd0\x08\xd3\x24\x15\x04\xcd\x38\x65\xea\xec\x91\xf5\x18\xba\x33\x73\x84\xae\x28\x27\x28\x95\xfa\x60\x45\x98\x31\xae\x50\x34\xc1\x6c\x4c\x10\x66\x73\xdb\x5e\x30\x67\x13\xc4\x05\x4a\x67\x31\x56\x04\x3e\x51\xaa\x05\xcb\xe6\x08\x69\x07\x54\x22\x2a\x11\xf9\xa2\x04\x99\x92\x64\xae\xbf\xa1\x0f\x82\xe2\xc8\xd2\xc7\x4c\xd5\x36\x2b\x23\x42\x70\x21\xa1\x9f\xca\x70\xfe\x07\x66\x8a\x32\x82\x20\x12\x22\x4d\x4a\xc1\x29\xba\xe2\x12\xe2\xb2\x3f\xfe\x43\xa2\x28\x49\xa5\x22\xe2\x04\x0d\xd3\xb1\x44\x94\xa1\x59\x82\xd5\x88\x8b\xa9\x9e\x21\x65\x52\xe1\x21\x4d\xa8\x9a\x9f\xa0\x29\x8e\x26\x66\x2c\xa0\x81\x3c\x79\x64\x31\x7f\x61\x52\x09\x82\xb3\xaf\xbb\x1f\xd1\x9f\xfc\xdf\x0c\x37\xc8\x6f\x4e\xa0\xa9\x1a\x9d\xce\x92\xb9\x3f\xfd\x7c\xfb\xcd\x9e\xe8\x41\x48\x8c\x86\x24\xc2\xa9\xb4\x99\x51\x4a\xcc\x11\xf9\x32\xc1\xa9\x84\xbd\xd3\xcb\xb3\x1d\x69\x22\x3e\x9d\x25\x44\x11\x44\x47\x48\x09\x6d\x79\xe0\x31\xa6\x9a\x74\xf7\x84\x34\x90\x68\x76\x03\xed\x11\xf8\x0d\xe2\x6b\x53\x2e\x08\x8a\x89\xc2\x34\x59\x9a\x2d\x67\xdf\xcd\xc6\x3a\x0a\xd3\xf3\x95\x64\x5e\x30\x27\xf7\x2a\xc8\x8b\x6c\xdc\x3e\x49\x9e\x40\xf2\xd2\x0e\x94\x14\x66\xb3\xaa\x22\x9c\x6c\xa9\xaf\xdc\xd9\x49\x85\xe3\x1b\x8e\x6f\x79\x26\x87\x3f\xbe\x86\x17\x5b\x7a\x7e\x0f\x56\xd8\xdc\xac\x9d\xf2\x15\x95\x2a\x7b\xf2\x38\xb0\x31\xb3\xe9\x06\x4c\xcc\xcd\x73\x4d\x43\x12\x77\x48\xe2\xae\xa6\xcc\x71\x26\x71\xb7\x26\x5d\x31\x24\x3c\xef\x29\xe1\x99\xca\x90\xf1\x1c\x32\x9e\x9b\x12\x28\x64\x3c\x87\x8c\xe7\xe3\xcd\x78\x5e\xd3\x76\xd8\xb0\xfe\xb5\x2e\x34\xb7\x8e\xfd\xf0\x89\xa8\x23\x35\xfa\x83\xe5\x10\x2c\x87\x60\x39\xec\xdc\x72\xe0\xc2\x45\x30\x5a\x00\x37\xb8\x2b\x29\xed\xde\x7e\x17\x93\x84\x28\x52\xef\x6b\x25\x62\xaa\x0d\x22\xa3\x81\x50\xa6\x55\xd5\xb1\x20\x52\x6e\x2b\x66\xb3\x81\x8f\x54\xd8\x66\xf3\x0f\x4e\xd6\x20\x7d\x6b\x96\x16\xa4\xef\x1b\x93\xbe\x47\x15\x36\xf0\x24\xd4\xa1\xe2\x06\xd9\xad\x32\x4b\xeb\x35\xf5\x07\x93\xdb\x90\x27\x5b\x18\x0e\xd7\xe6\x96\xe2\xd9\xe1\xb6\x7c\xbe\xe5\x2d\x63\xbe\x75\xa4\x57\x8c\x99\x7c\xb8\x5f\xc2\xfd\x52\xb3\xb4\x70\xbf\x84\xfb\xe5\xf5\xee\x17\x27\x9e\x5a\x15\x94\xe6\x62\x5c\x70\x19\x2d\xbb\x88\x0e\x95\xec\x7a\x5c\xb7\xce\x61\x53\x47\xde\xde\x95\xd3\x9e\x03\xda\xb6\xfc\xdd\x90\xb2\x1b\x52\x76\x8f\x2a\x65\x37\xc8\xed\x23\x90\x72\xad\x4b\x6e\x3d\x8e\x7c\xd6\xc0\xdb\x47\xc1\xdb\x6d\xcb\xfc\x6c\x75\xb2\xe7\x51\xf1\xf4\x81\x72\x3d\x83\xfb\x23\xb8\x3f\xaa\x29\x13\xd2\x22\x03\xbc\xed\xe2\xb2\x42\xb6\x67\xc8\xf6\x0c\xd9\x9e\xbb\x24\x4e\xc8\xf6\x0c\xd9\x9e\x5f\x6d\xb6\x67\xcb\x13\x3c\x8f\xca\x62\x08\xd6\x42\xb0\x16\x42\xb0\x74\xcd\xa5\x1d\x9d\x8e\xbe\x2b\xc9\xec\xde\x6e\x51\x86\xe7\x51\x49\xdb\xd7\x48\xf0\x0c\xe2\x37\x88\xdf\x6a\xca\x1c\xa5\xf8\x6d\x8f\x23\x3d\xe4\x42\x2e\xe4\x42\x1e\x95\x30\x3e\x78\x2a\x64\x90\xc4\x41\x12\x57\x53\x26\x48\xe2\xe3\xcf\x1a\x34\x51\xd5\xc1\x2c\xc1\x6c\x40\x63\x2f\x75\xf0\xc3\xff\xe4\xce\x8a\x7d\x45\x36\xf5\xd1\x8a\x4d\x17\xd2\xac\xeb\xa7\xf8\x4d\xbf\x92\xe4\x81\x0e\xc4\x87\x7a\x1a\x2b\xfb\xb1\x9a\xd8\xc8\x6d\x82\x59\x2f\x3e\x0e\xb0\x9b\xca\xe5\x1f\x22\x18\xfa\xf6\x52\x0d\xb7\xb9\xa4\xb0\x82\xa0\x1b\xa6\xcc\xb8\x5d\xf3\x6e\xb2\x05\xa7\x72\x3b\x16\xba\xcd\x95\xe5\x2d\xcc\xbb\xb5\xdc\x65\xe4\x11\xa5\x7d\xcb\x0e\xb1\xb8\xd0\x6a\x32\x44\x9b\x1a\x2e\x38\x44\x9b\xda\x1b\x6d\x6a\xb0\x8d\x7b\x09\x21\x1f\xf8\x78\x1e\x54\x67\x3d\x6a\x4d\x35\x28\xaa\x28\xa8\x75\x41\xad\xab\x5f\x75\x50\xeb\x82\x5a\x17\xd4\xba\xa0\xd6\x05\xb5\xee\xf5\xd5\xba\x06\xcb\xfc\x6a\xb3\x0c\x56\xa9\xaa\xcd\x5b\x0f\x99\x1a\x1f\x28\x05\x4c\x67\x09\xc7\xf1\xb2\x4c\xaf\x5c\x99\xfc\x0d\xe5\x8a\xdb\x12\x0d\xd4\x8c\x9e\xbf\x76\x0c\x0a\x68\x3e\xdb\xaf\xac\xfe\x69\x71\xe1\x6d\x89\x16\x14\xe1\x2b\x5b\xca\xdb\x47\x11\x06\x78\x2d\xe6\x7e\x93\x30\x37\xe1\xc4\x36\x3c\xb1\x87\xab\x5f\xac\x3e\xc5\x6b\x78\x49\xe4\xdf\x8f\xeb\x18\x87\xde\x15\x01\xa3\xaa\x62\x69\x21\xdb\x24\x14\x69\x86\x6a\xc6\x37\xe7\x6a\x0b\xd5\x8c\xa1\x9a\x31\x38\x22\x97\x2f\x3b\x38\x22\xdf\x44\x35\xe3\xfa\xc6\xc4\x86\xc5\x8d\x87\x31\x2b\x8e\xcc\x3b\x10\xcc\x8a\x60\x56\x54\x2c\x2d\x98\x15\x5f\xa1\x59\xd1\x0e\x0a\x07\x9b\x22\xd8\x14\xc1\xa6\x08\x36\x45\xb0\x29\x76\x4e\xc6\x60\x53\x34\xb0\x29\xe0\x5f\x16\x62\x78\x6d\x03\x63\x4d\xc3\x62\x05\x8e\xca\xd1\xc6\x1c\x83\x45\x11\x2c\x8a\x60\x51\x1c\xdc\xa2\x68\xcd\x82\xac\xf8\x5c\xb1\xa6\x7b\xb7\x21\x25\x40\xf7\xf6\xad\xc7\xcd\x68\x00\x23\xad\x50\x26\x8a\x06\x5a\xc6\x75\xfa\x55\xdf\x46\x31\xd9\xe4\xa0\x96\xe7\xc9\xd6\x28\xe2\xc2\x08\xe5\xd8\x72\xb9\xd1\x27\x3a\xfd\xce\x7d\xb7\x7f\x8e\x3a\x28\xc6\x0a\x6b\xb6\x16\x64\x26\x88\x24\x4c\x81\xae\x46\x20\x8f\x1e\x60\xf5\x13\xa3\x55\x7c\xd4\xf7\x0f\xba\xc4\x0a\x5f\x60\x85\x13\x3e\x3e\x43\x1d\xf8\x4f\xfd\x32\x95\x08\x27\x92\x23\xec\x48\x4f\x62\x37\x04\x66\xb1\x3b\x50\x18\xd0\xe2\x69\x92\x19\xa7\x99\x79\x41\x59\x4c\x9f\x69\x9c\xe2\x24\x2b\x4f\x78\x64\xdd\x67\xc2\x54\x8a\x93\x64\x8e\x70\x92\x20\xfb\x59\xf7\x80\x03\xa0\x1f\x92\x6c\x96\x92\x4e\x69\x82\x85\x16\xc7\x66\xb6\x37\x76\x2c\xa4\x0d\x63\x37\x57\x98\x97\xa6\xe6\x14\x3f\x11\x89\xa8\x42\x33\x2e\x25\x1d\x26\xf9\x01\x78\xe8\x21\x98\xf7\xc5\x55\x0f\x74\xb6\x48\x21\x6e\x24\x87\xfb\xb8\x35\x60\xdc\x17\xa7\x98\x31\x02\x1f\xe6\x6a\x42\x84\xfd\xbc\x7d\xf8\xb5\xd5\xaf\x87\xeb\xfb\xdb\xee\x45\xef\x63\xaf\x7b\xb9\xa8\x7f\xf5\x3b\xf7\x3f\x2e\xfe\xf5\x97\x9b\xbb\x1f\x3f\x5e\xdd\xfc\xb2\xf8\xcb\x55\xe7\xe1\xfa\xe2\x87\xc1\xed\x55\xe7\x7a\xf1\x47\xcb\x56\x8d\x55\x39\x7f\x66\xfb\xd0\xe5\xdc\xdb\x0d\x30\x3d\xec\xe1\x52\x58\xa5\xd2\xf4\x94\x11\x64\x4c\xa5\x02\xf1\xdf\x44\x0b\x5b\x0d\xe5\x71\xb4\xda\x57\x68\x6c\x16\x74\xb1\xa0\x8b\x05\x5d\xec\xd8\x74\xb1\xc3\xb9\x04\x8e\x28\x4d\xf1\xbb\xe3\xba\x7b\x42\xdb\x85\x20\x9c\xdb\x2f\x9c\x5b\x17\x7a\x6b\x8d\xeb\xfc\x18\x21\x5d\x43\x50\xb1\x39\x21\x42\x50\xb1\x39\xad\x42\x50\x71\x09\x71\x42\x50\x31\x04\x15\xbf\xe2\xa0\xe2\x51\xe6\x26\x06\x53\xc2\x3d\x17\x4c\x89\x60\x4a\xbc\x51\x53\xa2\x35\x14\x0e\x76\x44\xb0\x23\x82\x1d\x11\xec\x88\xe5\xc4\x09\x76\x44\xb0\x23\x82\x1d\x71\x6c\xf9\x88\xc7\x65\x49\x04\x2b\x22\x58\x11\xed\xb6\x22\x5a\xb3\xa0\xe3\x89\x16\x37\x5b\x4f\xc8\xdc\x0b\x99\x7b\x21\x73\xaf\x36\x73\xef\x8d\x5a\xf2\xbb\xd2\xdf\xdc\xdb\x6d\x4b\x48\x3c\x2e\xf5\x2b\x74\x17\xcb\x7e\x0d\xca\x58\x50\xc6\xbe\x52\x65\xac\x45\x20\x8a\xad\x68\x92\x36\xc5\x2a\x9a\xe0\x61\x42\x06\x99\x2f\x4b\x36\x35\xef\xaf\xa8\x54\x12\x45\xa9\x54\x7c\x5a\x7f\xb9\x7c\x76\x5f\xe8\x64\x1f\xb8\xe0\x6c\x44\xc7\xa9\xb9\x5b\x7e\x03\xd6\xf7\x4e\x74\xae\xe0\xce\x67\x64\x55\x5c\xb1\x62\xf4\xa3\xb8\x96\xaa\xa7\x7e\xa8\xdb\x69\x1d\x7b\x24\xf7\x5d\x5a\x63\x42\xab\x90\x83\xbb\xee\xfd\xcd\xc3\xdd\x45\xf7\x1c\x75\x40\xc5\x82\x70\x82\x61\x05\xfa\x87\x5e\x14\x52\x58\x3e\xe5\x7b\x29\x0c\x9b\x4b\xd0\xb3\x21\x7e\xa1\x55\x7e\x74\x8a\x2e\xae\x1e\xee\xfb\xdd\xbb\x9a\x01\x2d\xa3\x40\xab\x54\x32\x9d\x25\x58\x91\x18\x3d\xa5\x43\x22\x18\xd1\xda\x4e\x94\xa4\x5a\xb9\xc9\xa3\x1a\x66\xd0\xee\x7f\x75\x2f\x1e\xfa\xbd\x9b\xeb\xc1\x4f\x0f\xdd\x87\xee\x39\x72\x1c\xa7\x87\xd5\xf3\xd2\xb3\x88\xe7\x0c\x4f\xb5\x61\xa5\xff\x90\x37\x67\xfd\x3d\x25\x29\x41\x58\x4a\x3a\x66\x53\xc2\x54\x79\x44\x37\xe1\xab\xce\xf7\xdd\xab\xe2\xc8\x13\x82\x7e\xfc\x47\x3e\xa9\x04\x0f\x49\x62\xc3\x2c\x10\x39\xd0\x8c\x9e\x7f\xc8\xc6\x5f\x52\x43\xd5\x9f\x1e\x3a\x57\xbd\xfe\xaf\x83\x9b\x8f\x83\xfb\xee\xdd\xcf\xbd\x8b\xee\xc0\x2a\xcb\x17\x1d\xfd\xdd\xc2\x97\xac\x4e\x8d\x7e\x4f\x71\xa2\x8d\x2e\x3e\x82\xb8\x05\x8d\x08\x7a\x99\x10\x86\x52\x06\x1c\x67\x2c\x39\x6d\xde\x65\x1f\xd5\xa7\xcc\xac\xe8\xf6\xea\xe1\x53\xef\x7a\x70\xf3\x73\xf7\xee\xae\x77\xd9\x3d\x47\xf7\x24\x01\x5b\xc7\x11\x1d\x76\x71\x96\xa4\x63\xca\x10\x9d\xce\x12\xa2\xa9\x61\x6c\xb9\x21\x99\xe0\x67\xca\x85\x3d\xba\x63\xfa\x4c\x98\xa1\x23\x9c\x59\x18\xdf\xd9\x14\x03\x8f\x74\x37\xd7\x1f\x7b\x9f\xce\x51\x27\x8e\xb3\x35\x48\x18\xa3\xc0\x39\x2f\x5c\x3c\x8d\x12\xfe\x72\x5a\x9c\xb6\x16\x0e\xf0\x79\xc3\x44\xfc\x99\x08\x41\x63\x52\xe2\xa3\xce\xfd\x7d\xef\xd3\xf5\xe7\xee\x75\x1f\x28\xa6\x04\x4f\x24\x9a\xf0\x17\xf0\xd0\xc3\x0a\xc1\x71\xff\x8c\x69\x02\x1f\x73\x9b\xc5\x19\x7a\x99\x50\x88\xea\x50\xe9\x13\xcc\x98\x9d\x22\x65\xaf\xee\x74\x2e\x1c\xbc\x45\x6b\xac\x7c\x92\x16\x9f\x28\x1d\x8b\x65\x0f\x14\xb8\x7c\xf1\xc1\x55\xdc\xba\xf8\x46\x89\xdd\xea\x6d\xd0\x05\x7e\xa9\x5f\x69\xbe\xd7\x8d\x4d\xd0\x22\x0d\xd7\x54\x1e\xd6\xb5\x40\x4d\x0c\xcc\x37\x42\x21\xa4\xe6\xd4\x7c\x27\x13\x8f\xcb\x1a\x6d\xac\x46\xe4\x0d\x57\x8f\x5c\xa1\x38\x8e\xc2\xbb\xd7\xd5\x28\x0e\x7b\x34\x0e\x6d\x35\x04\x7d\x29\xe8\x4b\x41\x5f\x0a\xfa\x52\xd0\x97\xb2\xff\xed\x59\x9f\x20\x4a\xd0\x48\x7e\xc8\xf8\x6a\xbf\xa0\xac\x44\x6a\x86\x55\x74\x4a\x90\xfd\xb2\x3d\xa9\xb5\x4a\x48\xd6\xea\x7e\xa9\xc7\xfc\x13\x51\xd9\x83\x9f\xcd\xc0\x47\xa1\x4c\xfc\x62\x25\x4a\x36\xf9\x4f\x44\xd9\xf9\x87\x82\xfe\x50\xd0\x5f\xb3\xb4\x10\x15\xd8\x3e\x2a\xc0\x05\x92\x73\xa9\xc8\xf4\x48\xe2\x03\x31\x99\x2d\x7e\xb0\xb4\x30\x78\xc6\xe4\x77\x2d\xa4\x23\x9b\xc8\xb9\xcd\x1d\x48\xc8\x33\x49\x40\x91\x55\x02\x3f\x13\x21\xad\x7a\x36\x14\x04\x3f\x69\x9d\x36\xe6\x2f\xbe\x72\x16\x13\x85\x69\xb2\x0f\xfb\xb9\x49\xba\xf2\x77\x7f\x7b\xd5\xfb\xf0\x78\xaf\xc0\x70\x03\x86\x10\x72\xb8\x2c\xbe\xc2\xcb\xe2\x18\x93\x78\xc2\x1d\xd8\x96\x3b\x50\x93\x3b\x1e\xb8\x74\xbe\x92\x3d\x58\xf0\xd0\xed\xd2\x3e\xbc\x83\xfa\x28\xb9\xec\x22\xd4\x52\x2a\xee\xc2\xf6\xfd\x66\x93\x00\x57\xdc\x88\xde\x1b\x47\x61\x0f\x7a\xf3\x0d\x76\xdf\x9e\xfd\xc7\x77\x7e\xb4\xc2\x5d\x7e\x53\xa2\x70\x8c\x15\xd6\xeb\x1b\x13\x75\x86\x6e\x18\xfc\xd6\xc7\xf2\xe9\x04\x39\x65\x45\x5f\x08\x79\x7e\x89\x5f\x77\xb4\x27\x32\x34\x74\xc4\xbd\x76\x46\x6a\xb0\xd2\x83\xe2\xb5\x53\xc5\x2b\x94\x63\xb7\x24\xdb\x79\x77\xb7\xb2\x19\xf1\x88\x2f\xe6\x80\xb7\xda\xae\x6b\x3a\x85\xfd\x08\x77\xb0\xf9\x5f\xb8\x83\xc3\x1d\x1c\xee\xe0\x16\x50\xf8\xd5\x13\xe0\x2b\xae\xac\x57\xcd\x80\xaf\x72\x2b\x54\xfb\x14\x72\x87\xc2\x9a\x40\xbd\xb9\x0f\x61\x25\xbc\x56\x85\xd6\xb2\x12\x5b\x2b\x7f\x87\x1e\x49\xb2\x9a\xb7\xca\x43\xe0\x6b\x05\x4d\x65\xa9\xa6\x62\x30\x6b\x82\x4b\x61\x8f\xea\xcc\x36\xba\x0c\x56\x70\xc9\x60\xca\xcc\x9d\x94\x57\x7b\xcb\x03\xec\xc7\x9a\x0b\xdd\x46\xb3\xf1\x16\xe6\x29\x37\x4e\x67\xf1\x88\xd2\x92\x43\xb8\x17\x18\xae\xd7\xf1\xe3\xbf\x3e\xf4\x16\x79\xbb\xc8\x5b\x01\x5c\x6a\xad\x43\x12\xc0\xa5\x0e\x01\x2e\xd5\x60\x1b\xf7\x82\x18\x77\xe0\xe3\x79\x48\xd3\xe1\x78\x83\x90\x47\x66\x33\xb4\xa9\x3c\x36\x84\xeb\x5e\x5b\xb7\x0e\xae\xc2\xe0\x2a\xac\xa5\x4c\x70\x15\x7e\x5d\xe9\x5a\xbb\xba\xef\xdd\xdb\xaf\x1d\x85\x3c\xb2\x9b\xf9\xb0\x41\xc8\x10\xaf\x2b\xfc\x35\x5c\xc2\xe1\x12\x0e\x97\xf0\x1b\xbb\x84\x43\xbc\x6e\x4d\xa3\xfb\x28\xa2\x74\x47\x76\xad\x1f\x22\x48\x17\x42\x5a\xc7\x75\xe3\x87\x90\x56\x08\x69\x85\x90\x56\x08\x69\x85\x90\xd6\xf2\x65\x87\x90\x56\x08\x69\x1d\x9a\x5b\xbf\x5a\xb7\x5d\xb5\xc5\xc0\x63\x32\xa8\x40\x92\xc9\xfe\x34\xf0\x61\x65\x0a\x7f\x2d\x84\xef\x0a\xbf\xf8\xb1\xbc\xc2\x0f\x79\xbb\x1c\xf8\x2e\x8d\xd7\xae\xc3\x5f\xe6\x4e\xe4\x31\x69\x5c\x77\x5f\x78\xb8\xed\x79\x81\x6e\xa1\xc6\xf4\xf0\x67\x7e\x80\x52\x86\x32\x27\xbc\xb1\x6c\xc1\x1a\xae\x7e\x8b\x9e\xba\x8a\x33\x1a\xdc\x76\x2b\x09\xf5\x86\x01\x07\xac\x10\x3e\xc0\x7c\xde\xf4\x1d\xf6\xc1\xa2\x3f\x0e\x1c\xd8\xe0\x0e\x2f\xb5\x4b\x33\xb4\x16\xfa\xce\x47\xe3\x22\x48\xab\xae\xb8\x8a\x57\xdb\x7e\xd1\xc1\x9a\x97\x2c\x39\x5c\x77\xe1\xba\x0b\xd7\x5d\xb8\xee\xc2\x75\x57\xbe\xee\xdc\xd5\x33\xa8\xb8\xf4\xaa\x7f\xcb\xaf\xbe\xea\xdf\xb3\x0b\xb0\xfa\xe7\xf5\x51\xd4\x1a\x05\x8f\x1a\xdb\x70\x10\x36\xf2\x9f\x3e\x92\xea\x2e\x7f\xca\x87\x08\x1d\xd5\x32\xc6\x1b\xbb\xd8\x96\x32\xf9\x9b\xbb\xde\x96\x1d\xd9\x70\xc9\x35\x24\xd7\x5b\xbd\xea\xf6\x12\xa8\xda\x6f\xe0\xe1\x80\x41\xab\xd7\xf2\x3b\xef\xc5\xbd\xfe\x99\x0b\x82\x28\x1b\x71\xc4\x21\x7d\x47\x2a\x91\x46\x8a\xb2\x71\xf6\xd2\x39\x7a\x4c\xbf\xfd\xf6\xbb\xe8\x8a\xb2\x27\xf8\x17\x69\xa3\x53\x3e\xc4\xcc\x42\xcc\xec\x18\x63\x66\x26\xd7\x6e\x30\xc3\x82\x30\x55\x61\x5b\x94\xaf\x13\x78\xdc\xef\x49\xed\xb4\x0e\x18\x00\x69\xd5\x1e\xd9\x0b\x39\xbb\xaa\xde\x58\x5d\x58\xc9\x7a\x79\xc3\x61\xa5\x96\xdb\x23\xed\x89\x2a\xbd\x55\x2d\x3d\x38\xa1\x82\x13\xaa\xbc\xce\xc3\x39\xa1\x36\xa0\x7b\xc8\x8d\x38\xfc\x55\x75\x3c\xd1\xa3\x96\xdf\x67\x6d\x0b\x1e\x85\x5b\x2d\xdc\x6a\xe1\x56\x6b\x01\xdd\xc3\xad\xb6\xf4\x56\xfb\x4a\xc2\x43\xc7\x70\x7b\xb5\x24\x3a\xf4\x56\x6f\xae\x10\x35\xd9\x01\xb9\xde\xea\x2d\xf6\x5a\x81\xd2\xc3\xfb\x9e\x43\x7c\x28\xc4\x87\x42\x7c\x28\xc4\x87\x42\x7c\xc8\xff\x7b\x88\x0f\x2d\xa3\xfb\xc1\xcc\x13\xab\x02\x0d\xb2\xa3\x2b\x3f\xfc\x4f\xfe\xef\xcc\x2c\xf1\x4d\x8b\x65\x30\x44\x17\x82\xc0\xa9\xe0\xc2\x82\xd8\x48\xdb\x1f\xbe\xde\xc8\xf8\x8c\x55\x34\xc1\xc3\x84\x74\xb2\xcf\xba\x76\xf9\x60\x60\xfc\x86\xb0\x2a\x28\xbd\xd0\x96\x6e\x89\x2d\x62\xa0\x21\x6e\xcd\xd3\xf9\xa0\xc7\x60\x90\x2c\x4c\xfa\xb0\x20\x46\x8b\x1b\xdf\xec\x00\xb9\x9d\xa1\x31\x88\x3b\xaf\x19\xbf\xd6\xbb\xf9\x28\xbf\x18\x24\x7a\xa1\x49\xa2\x35\x19\xab\xb5\xb5\x44\x1b\x7d\x75\x68\x93\xda\x9d\x7f\x55\x80\x93\x2a\xe9\x50\x25\x12\x9a\xf8\xcd\xb7\x97\x03\xa6\x09\xb0\x63\x36\xcc\x62\x6b\xf7\xad\x70\xad\xbf\x0d\x49\xf0\x89\xa8\x43\x89\x81\x4d\xcf\xfe\xd2\x73\x2f\xc8\x88\x08\xc2\x22\xd2\x42\x58\x8d\x75\xf0\x5e\x7e\x31\x8b\xb4\x60\x2f\x53\xc7\xb4\xfe\x52\x15\xb7\x76\x5a\x41\xd5\xb5\x8a\x5e\xbf\x73\xff\xe3\xe0\xae\x7b\x7f\xf3\x70\x77\xd1\x3d\x47\x1d\x10\x83\xf0\x8e\x39\x20\xf4\x0f\x18\x4e\x61\xf9\x94\xbb\x3b\x84\x11\x03\x12\x98\x1e\xec\x49\x4d\x45\x74\x8a\x2e\xae\x1e\xee\xfb\xdd\xbb\x9a\x01\xed\xf1\xd1\x7a\xa2\x22\xd3\x59\x82\xb5\xfe\xf8\x94\x0e\x89\x60\x04\xae\xe6\x24\x95\x8a\x88\xdc\xca\x34\x83\x76\xff\xab\x7b\xf1\xd0\xef\xdd\x5c\x0f\x7e\x7a\xe8\x3e\x74\xcf\x91\x3b\x87\x7a\x58\x3d\x2f\x38\x7a\x26\xf2\x63\xfe\x90\xf7\x80\xfd\x3d\x25\x29\x41\x58\x4a\x3a\x66\x53\xc2\x54\x79\x44\x37\xe1\xab\xce\xf7\xdd\xab\xe2\xc8\x13\x82\x7e\xfc\x47\x3e\xa9\x04\x0f\x49\x62\xcd\x5e\xc0\xa7\xd0\xc7\x3f\xff\x90\xb5\x87\x53\x43\xd5\x9f\x1e\x3a\x57\xbd\xfe\xaf\x83\x9b\x8f\x83\xfb\xee\xdd\xcf\xbd\x8b\xee\xc0\x22\xd8\x5c\x74\xf4\x77\x0b\x5f\xb2\xaa\x26\xfa\x3d\xc5\x09\x55\x73\xbd\x8f\xd2\xc8\x45\xd3\xdf\x36\x65\xa6\x39\x2e\xe8\xda\xd8\xef\x70\x2b\x67\x24\x32\x2b\xba\xbd\x7a\xf8\xd4\xbb\x1e\xdc\xfc\xdc\xbd\xbb\xeb\x5d\x76\xcf\xd1\x3d\x49\x48\xa4\x64\x46\x74\xd8\xc5\x59\x92\x8e\x29\x43\x74\x3a\x4b\x88\xa6\x86\x01\x13\x1b\x92\x09\x7e\xa6\x5c\x58\x81\x36\xa6\xcf\x84\x19\x3a\x6a\xb6\x32\xe3\x3b\x38\x9d\x81\x47\xba\x9b\xeb\x8f\xbd\x4f\xe7\xa8\x13\xc7\xd9\x1a\x4c\x67\xf4\x02\xe7\x38\xc7\xca\x69\x71\xda\x74\xa4\x0d\x16\x2d\x60\x60\xfb\xf8\x33\x11\x82\xc6\xa4\xc4\x47\x9d\xfb\xfb\xde\xa7\xeb\xcf\xdd\xeb\x3e\x50\x4c\x09\x9e\x48\x34\xe1\x2f\x60\xd0\xc1\x0a\xc1\xce\x7b\xc6\x34\x81\x8f\xb9\xcd\xe2\xcc\x3f\xfd\xde\x97\x27\x3c\x4d\x62\xbd\x4d\xaf\x6e\xa3\x14\x0e\xde\xa2\x99\x52\x3e\x49\x8b\x4f\x94\x8e\xc5\xb2\x07\x0a\x5c\xbe\xf8\xe0\x2a\x6e\x5d\x7c\xa3\xc4\x6e\x8b\x0f\xd4\xf2\x4b\xfd\x4a\xf3\xbd\x6e\x6c\x99\x15\x69\xb8\xa6\x94\xdd\x81\x6f\x6f\xb9\xc3\xb6\xa5\x76\x97\x7b\xfb\x5d\x4c\x12\xa2\x48\xad\xa2\x74\x09\x3f\xbf\x96\xa2\x64\xbe\xfe\x36\x74\x25\xb3\x96\xa0\x2e\x7d\x35\xc6\x92\xdb\xf0\x56\x18\x4b\xe6\xac\xf9\x36\x13\x94\x0c\x7a\xc6\xb5\xa9\x13\x7c\x9b\x6e\x96\xa3\x28\x0b\x6c\x8f\x9f\x65\xef\x97\xe2\xa1\x85\x43\xf0\x21\x2d\xce\x24\xf8\x90\xb6\x13\x8b\x85\x3f\x56\x40\xe9\x1e\x5c\x54\x6e\xa2\x60\x15\xe4\xe5\x25\x3c\x7e\x9c\x52\xb3\x3c\xf7\x63\x96\x9d\xde\x68\xed\x10\x22\x5f\xaf\xf8\x5c\x38\xe2\xcd\x16\x6e\x53\x7f\x8e\x77\xdd\x6d\xb9\x36\xea\x8e\x75\x9b\x2f\x0f\x73\x63\x1c\x7f\x44\xe2\x98\xc4\xff\xab\x84\x24\xde\x9c\x9a\xfc\xd5\x39\x0d\x42\x8c\x25\xc4\x58\x42\x8c\x25\xc4\x58\x42\x8c\x05\x6d\x1a\x63\xd9\x95\xa6\x75\xd4\x01\x89\xe3\x54\x95\x0e\x1b\x91\x08\xda\xd2\xb1\x6b\x4b\x6d\x31\x0a\x8f\x2b\xc4\x52\x34\x07\xd7\xee\xba\xd5\x16\xbb\xf0\x2d\x39\x07\x8f\xcb\x46\x6c\x9d\x3b\xf0\xab\x13\x7c\x9b\xb9\xfe\x8e\x76\xb9\xc1\x2a\x0e\x56\x71\xb0\x8a\x83\x55\x1c\xac\x62\x14\xac\xe2\xb5\xad\xe2\xb7\xa4\x28\x1e\x9d\x85\x1c\x74\xc5\xd7\x5e\xf0\x57\xa6\x2b\xb6\xc5\x27\x50\x77\x72\x5b\xea\x19\xf8\x3a\x13\x8a\x8e\xf8\x26\x08\x35\xaf\xc8\xdb\xba\x90\x70\xf3\xb5\xc8\xd1\x96\x27\xdc\xbc\x3d\xbf\xea\x11\xcb\xc8\x50\x0d\x1c\xd4\xca\x1d\x2d\x37\xb8\x20\x83\x0b\x32\xb8\x20\x83\x0b\x32\xb8\x20\x51\xbb\x8b\x9f\x57\x3a\x9c\x42\xfd\xf3\xbe\x1c\xab\x47\xac\x29\x86\x5a\xe8\xa0\x2c\xee\x6e\xb9\x6d\xb5\x9d\xdb\xe4\x83\x94\xeb\x77\x94\x58\x89\xc4\x6d\x97\xfd\xdb\x12\x01\x76\x45\xa5\x33\x74\x8f\x49\x5e\xc9\x7d\x8b\xa4\x6d\xe0\x77\xdd\x8e\xbe\x59\xf4\xdd\x05\x3a\x00\x01\x22\x2c\x8d\x81\x97\x26\x8a\xce\xb4\x2a\x8f\xc7\x44\x5a\x3c\x62\xad\x78\x9f\x38\xd9\x25\x9e\x89\x38\xcd\x00\xa7\xe1\x13\x0e\x85\x1b\xcc\x16\xc5\xd1\x08\xd0\xe6\x81\xae\xe4\x8b\x82\xa1\x1e\x19\xe0\x70\x03\x8d\xce\xda\x88\x7f\x1b\xc0\x81\x97\x10\x27\x80\x03\xaf\x25\x4d\x02\x38\x70\x4b\xc0\x81\xd7\x35\xc1\xcc\xa9\xf4\xad\x30\x38\xe4\x4e\x6b\x75\x6e\xa9\x23\x35\xc6\x66\x5c\xd6\x6b\x26\x77\x64\x4c\x25\x88\xa4\x25\xdd\xae\x9c\x4e\x02\x8d\x15\x80\xd5\x3f\xea\x07\x50\x4c\x66\x09\x9f\x83\xff\x6b\x89\xba\xe2\x3e\x71\xbb\x60\x31\xb4\x5d\x63\x71\x33\x3f\x94\x4d\xd5\x16\x9d\x3b\x5f\x77\x2b\xb4\xec\x3c\xe5\xff\xf5\xf5\xed\x63\x4a\xbc\xda\xbb\xc2\x7d\x58\x39\x7b\x4c\xdd\xde\x83\x39\x11\xcc\x89\x26\x5c\x13\xcc\x89\x55\x04\x0a\xe6\x44\x30\x27\xf6\x69\x4e\x1c\x58\x83\xf9\xf0\x3f\xa5\x26\xea\xcb\x12\x10\x1f\x6c\xd6\x21\x04\x67\xa9\x84\x23\xbf\x52\x91\x79\x64\xd5\x3f\xb8\x20\xe5\x90\x38\x19\x33\x4c\x95\xd7\x76\x4d\xea\x4b\x6b\x46\x84\x9a\x7b\x4f\x92\xe9\x4c\xcd\xff\xf3\x91\x51\x95\xa5\x78\xd1\x31\xe3\xc2\x70\x8c\x7e\x79\x82\x59\x9c\xe8\x4b\x5d\x66\xe3\x44\x98\x31\xae\x40\x94\xc3\x02\x62\xf4\x4c\xb1\x11\xfc\x9d\xdb\x5e\xe3\x44\xc7\x63\x52\xb5\x0e\x9b\xc9\xb8\xff\xde\x69\x87\xef\x2c\xba\x62\x41\x9f\x12\x3e\xc4\x49\x32\x47\x69\x31\xa2\xa4\x07\x68\xc9\x1a\xda\x62\xbd\xb5\xc3\x5c\x73\xb0\x02\x65\xb3\xad\x65\xde\x88\x63\x12\x32\x87\x76\x47\x78\x5b\xf8\xc6\xc4\x4d\x5b\x8e\x6a\xdb\x1c\x2d\x41\x39\x69\xaa\x9c\x1c\x91\xd8\x38\xac\x6e\x12\x6e\xf2\xe3\xbf\xc9\x15\x96\x4f\x7e\x27\x73\xb8\xd0\x5d\x33\xfa\x42\x17\xdf\x72\x4b\xdf\x7f\x35\x79\x2e\x2f\x2f\x58\xfd\x6c\xd6\x0d\x7d\xf5\xa3\xd0\x19\xbd\xe6\x41\xdb\x46\xdf\xfc\x0c\xcb\x2b\xcf\xc3\xfd\xd1\xff\xa0\xfb\x5b\x3e\xb2\xfb\xcb\x33\x11\x92\x72\xfb\x98\x20\x4a\xcc\x07\x58\x29\x2d\x90\x36\xf0\x51\xd7\x4a\xcd\x3e\x96\x4f\xcd\xba\xb2\x7f\x22\xaa\xf0\x70\xdb\xd5\x1a\xb7\x50\x58\x67\x61\xe6\xfb\x97\x4f\x0d\xd8\xf8\x8d\xa9\x3b\x8d\x8f\xe4\x8a\x75\x1f\x5f\x63\xfa\xa6\x02\x66\x8d\x85\x7f\x2d\x4d\xea\x9b\x09\xdc\x55\xf9\x90\xc7\xd8\xb0\x7e\xd9\x0d\xd2\x9a\x19\x96\x2e\xb1\xb7\x78\x72\x8b\x57\x72\x38\xa2\xcb\x68\xd4\xf4\x2c\x1e\xcd\x09\x2c\x69\x5a\x2b\xd6\x76\xef\x36\xc8\x3e\xee\x38\xa1\x7d\xeb\x2a\x28\x8b\xbb\x9e\xd5\x7e\x02\xc0\xde\x6e\xac\x53\x42\xd8\x73\xcd\xd6\x4d\x38\x2b\x3b\x43\xae\xa8\x30\x3b\x9a\x6a\x02\x0d\xa9\xa8\xf4\xfb\xb3\x47\x5c\x18\x6d\x33\xb6\x67\xd6\x04\xb4\x3a\xfd\xce\x7d\xb7\x7f\x8e\x3a\x28\xc6\x0a\xeb\x43\x2a\xc8\x4c\x10\x49\x98\x32\xae\x08\xa6\xa8\x9a\xa3\x29\x8f\x49\x62\xfc\x00\xc6\x39\x78\x89\x15\xbe\xc0\x0a\x27\x7c\x7c\x86\x3a\xf0\x9f\xfa\x65\x2a\x11\x4e\x24\x47\xd8\x31\x0e\x89\xdd\x10\x98\xc5\x4e\x3c\x60\x14\xf1\xe9\x8c\x26\xa6\xae\xcd\x8f\x6f\x53\x16\xd3\x67\x1a\xa7\x38\x41\x7c\x08\x3e\x94\xb3\x47\xd6\x7d\x26\x4c\xa5\x60\xe3\xe2\x24\x41\xf6\xb3\xee\x01\xcf\x81\xe1\x66\x29\xe9\x94\x26\x58\x68\xed\xd1\xcc\xf6\xc6\x8e\x85\xfa\x13\x92\xcd\x15\xe6\xa5\xa9\x39\xc5\x4f\x44\x22\xaa\xd0\x8c\x4b\x49\x87\x49\x7e\x8c\x1f\x7a\x08\xe6\x7d\x71\xd5\x83\xa0\x61\xa4\x10\x37\x72\xd0\x7d\xdc\x46\xd0\xdd\x17\xa7\x98\x31\x02\x1f\xe6\x6a\x42\x84\xfd\xbc\x7d\xf8\xb5\xe3\x7f\x0f\xd7\xb6\x5c\xac\x7b\xb9\x18\x00\xec\x77\xee\x7f\x5c\xfc\xab\xab\x0f\x5b\xfc\xe5\xaa\xf3\x70\x7d\xf1\xc3\xe0\xf6\xaa\x53\x51\x77\x66\xd9\xaa\x71\x2c\xd1\x9f\xd9\xe6\x87\x69\xff\x86\x46\x4b\x53\x13\x9b\x3b\x1d\x1a\x79\x1c\x1a\xbb\x1b\x9a\xfa\x1a\x9a\x39\x1a\xea\xbd\x0c\x7b\x48\x53\x6b\xee\x0a\xb8\xa2\xb2\xe8\x0b\x38\x8e\x9c\xb5\xc2\x94\xf5\x1a\xf6\xed\x08\xf8\xea\xbc\x00\x5f\xa9\x0b\x20\xd8\xff\x7b\xa1\xdb\x5b\x35\xfe\x5b\x6e\xf9\x6f\x93\x94\x9a\xe1\x5f\x84\xac\xd4\xc5\xac\x54\x12\x92\x52\x43\x52\x6a\x53\x02\x85\xa4\xd4\x90\x94\x7a\xb4\x49\xa9\x65\x43\x2b\x44\x6c\xdb\x10\xb1\x6d\xb9\x8d\xd6\xe6\x80\xed\x5b\xb5\x5c\x42\xf0\x32\x04\x2f\x43\xf0\xf2\x48\x4f\x6e\x08\x5e\x36\xa7\x51\x08\x5e\x86\xe0\x65\x08\x5e\x86\xe0\x65\x08\x5e\x86\xe0\xe5\x6b\xba\x46\xda\x90\x1b\x7a\xcc\x21\xdb\x10\x89\x5d\x11\x89\x6d\xb9\x91\xdf\xca\x40\xec\x5b\xb5\x11\x82\x69\x1f\xe2\x92\x5b\x2d\xbb\x55\x46\xfd\x5b\xbb\x37\x43\x28\xb6\x39\x21\x42\x28\xb6\x31\xa9\x42\x28\x76\x09\x71\x42\x28\x36\x84\x62\xbf\xc2\x50\x2c\x8d\xb7\x6e\xb9\xd5\xc4\x6e\xd1\xba\x62\xdc\x05\xf7\x50\xe6\xdc\x12\xbf\x81\xf6\x88\xe5\x53\xe6\x01\x6a\x60\xcf\xf4\xe2\xa3\x30\x64\x2a\x17\x7c\x08\x83\x66\x1b\x8b\x05\x2b\x2d\xc1\x15\x40\x15\xe8\x5f\x72\xa7\x62\x0b\x7b\x04\x6c\x63\xa3\x78\x0b\xf3\xcc\x14\x67\x7d\x78\x44\x69\xdf\xb2\x83\xe2\x17\x14\xbf\xa0\xdb\x34\x5c\x70\xd0\x6d\xda\xab\xdb\xbc\x96\xc1\xd2\xbe\xe3\x79\x74\xfe\x89\xbd\xab\xa5\xb2\x31\x68\x9b\x69\x93\x0d\xa1\xbb\x74\x96\x70\x1c\xaf\x4a\x90\xfb\x0d\xe5\xba\xda\x12\x75\xd3\x8c\xab\x5f\x68\xb9\xb6\xb9\x90\x1b\x67\x66\xfe\x35\xa0\xc6\xd7\x2e\xfd\x55\xf1\xcc\x80\x7f\x33\xd4\xa2\xb5\x10\x08\xf7\xcf\xcc\x6d\xaf\xc6\x7b\x65\x6e\x7e\x93\xa5\x77\xe1\x88\xae\x3e\xa2\xf0\x8f\x42\x82\xf7\xbe\x3c\x21\xe5\x63\xdb\xc8\xe9\x21\xff\xde\xf2\x73\x9b\xed\xef\x21\x5c\x1c\x6f\xf2\x94\xbe\xe1\x60\x73\x08\x28\x2f\xcf\xfa\xd9\x51\x02\xea\x23\xeb\x4f\x34\x1d\xa6\x43\xca\xb2\x7c\x3b\xc7\x21\x7f\x71\xe4\xfa\x0b\x60\x5d\x5a\xfc\xcb\x64\x9e\xbb\xc2\x64\x79\xb4\xcc\x50\x42\xa7\xda\x4a\x8d\x88\x50\x40\x6f\xce\x14\xf9\xa2\x24\x3a\x45\x09\x7d\x22\xe8\xbd\x3e\xf2\xa8\x73\xdb\x7b\x7f\x82\xde\x5f\xe1\x94\x45\x13\x34\x4b\x30\x93\xef\x5b\x63\x60\x05\x9f\x59\xe8\xa6\x12\xa2\xa5\xbb\x24\x4e\xf0\x28\x06\x8f\x62\xeb\x3c\x8a\x6d\xb1\x19\x4c\x51\x29\x9e\x92\xb6\x58\x0f\x6d\xb7\xfa\x83\xf5\x10\xac\x87\x60\x3d\x04\xeb\xa1\x60\x3d\xb4\x83\xc2\xc1\x74\x08\xa6\x43\x30\x1d\x82\xe9\x10\x4c\x87\x9d\x93\x31\x98\x0e\xcb\x4c\x07\xf8\x97\xc3\x8d\x59\xd7\x8e\x68\x6c\x3f\x34\x00\x89\x39\x1a\xe3\x21\x18\x0e\xc1\x70\x08\x86\xc3\xc1\x0d\x87\xd6\x2c\xe8\xed\xe1\x5d\x04\xc4\x88\x80\x18\x11\x10\x23\x6a\x10\x23\x0e\xa5\xb2\x19\x7d\xed\xc8\x4a\x64\x8e\x42\x69\x7b\xb5\x1a\x99\xb7\xa7\xc6\x85\xaa\x9f\x50\xf5\x13\xdc\x90\xa1\xea\x27\x38\xda\x82\xa3\xad\xd5\x8e\xb6\xd7\xf2\x9e\x1f\xf8\x78\x1e\x40\x39\x6d\x79\xc6\xf2\x77\xc7\xa0\x81\x1e\x30\xe7\x20\x78\xd9\x82\x97\xad\x9a\x32\xc7\x19\x9e\x6f\xcd\xad\x1f\x00\x9e\x82\xc6\x1f\x12\x0f\x42\xe2\xc1\x4a\xe2\x04\x7b\x28\xd8\x43\xad\xb3\x87\x5e\xd1\x50\x68\x5d\x9a\x72\xb0\x18\x82\xc5\x10\x2c\x86\x37\x6b\x31\xb4\x86\xc2\xc1\x5c\x08\xe6\x42\x30\x17\x82\xb9\xb0\x9c\x38\xc1\x5c\x08\xe6\x42\x30\x17\x5a\x9d\x9a\x7c\x2c\x06\x43\x30\x16\x82\xb1\xd0\x6e\x63\xa1\x35\x0b\x0a\x49\xbc\x21\x89\x37\x24\xf1\x7e\x35\x49\xbc\x6f\xd4\x60\xdf\xab\x9a\xe6\x44\xe4\x32\xc5\x6b\x51\x5f\xfa\x79\x41\xb0\xb6\x56\x65\xca\x67\xbb\x29\xee\xe3\xae\x48\xfd\xc2\xc5\xd3\x28\xe1\x2f\x83\xcc\xaa\xb3\x49\xe1\xf9\x7f\xdb\x7a\x3e\xef\x0f\xb9\xf2\xec\xfd\x31\x53\xa2\xbd\xbf\xb9\xd1\x8b\x00\xa1\xe9\x2a\x7c\x50\x89\xb8\x40\xe9\x2c\x86\x7f\x46\xa9\x54\x7c\x5a\xaf\x55\x7f\xc6\x2a\x9a\xe0\x61\x42\x3a\xd9\x77\x2f\x38\x1b\xd1\x71\x6a\xf8\xe3\x37\x10\x85\xd8\x69\x36\x27\x4e\x33\xd2\x42\xd1\xcd\x6f\x99\x26\xfe\x00\xf3\xf8\xc5\x3e\x99\x7f\xe4\x28\x12\xd0\x17\xa7\x6d\x96\x73\x28\xb8\xd1\x22\x17\x6d\x2b\xe2\xbc\xd1\xda\xa1\xfe\x2c\x9e\x89\x55\xaa\x2a\x78\xa1\x33\xcb\x84\xc6\xc0\x9c\x2f\x13\x0a\x9e\x35\xf0\xc4\x81\xf7\x29\x1f\x18\xbd\xd0\x24\x01\x8d\xc3\xd0\xa2\x7d\x2b\x6f\x64\xbd\xd8\x85\xdb\xb3\xf7\x26\xd6\xed\x84\xc7\x8a\x95\xbb\x23\x68\xc2\x10\x47\xba\xec\xd7\x44\xd8\x5d\x21\xc8\x5e\x15\x67\xb7\xf6\xfa\xac\xa9\xa9\xfa\xf0\x3f\x95\x57\x62\x93\xde\xa9\xaf\x7d\x0f\x7e\x22\xea\xcd\x5c\x82\x9f\x88\x3a\xd4\x0d\xf8\x16\xaf\xbd\x4d\xef\xba\xa5\x82\x4f\x90\x11\x11\x84\x45\xe4\x58\x6b\xb2\x16\xae\xb8\xa3\x5d\xee\x46\x37\xdb\xd1\xae\x76\x1d\x07\xd6\x2f\x66\x91\xd6\x5d\x35\x75\x22\xd7\x5f\xaa\xe2\x36\xbc\x5c\x08\x81\x59\x67\x55\xbf\x73\xff\xe3\xe0\xae\x7b\x7f\xf3\x70\x77\xd1\x3d\x47\x1d\x38\xe8\xf0\x8e\x11\xef\xf4\x0f\x18\x0e\xea\x61\x33\x67\x98\x30\x77\x9c\x04\x51\x0d\x61\x70\x4d\x45\x74\x8a\x2e\xae\x1e\xee\xfb\xdd\xbb\x9a\x01\xad\xf0\xa7\x6c\x8c\x14\x99\xce\x12\xac\x48\x8c\x9e\xd2\x21\x11\x8c\x80\x61\x95\xa4\x52\x11\x91\x07\xc7\xcd\xa0\xdd\xff\xea\x5e\x3c\xf4\x7b\x37\xd7\x83\x9f\x1e\xba\x0f\xdd\x73\xe4\x6e\x11\x3d\xac\x9e\x97\x9e\x45\x3c\x67\x78\x4a\x23\xf3\x87\xac\x15\x2d\xfa\x3d\x25\x29\x41\x58\x4a\x3a\x66\x53\xc2\x54\x79\x44\x37\xe1\xab\xce\xf7\xdd\xab\xe2\xc8\x13\x82\x7e\xfc\x47\x3e\xa9\x04\x0f\x49\x62\xa3\xf5\x10\x80\xd6\x97\x57\xfe\x21\x1b\xc6\x4f\x0d\x55\x7f\x7a\xe8\x5c\xf5\xfa\xbf\x0e\x6e\x3e\x0e\xee\xbb\x77\x3f\xf7\x2e\xba\x03\xeb\x8c\xb9\xe8\xe8\xef\x16\xbe\x64\x7d\x36\xe8\xf7\x14\x27\x54\xcd\xf5\x3e\x4a\x73\xe9\xa3\x97\x09\x61\x28\x65\x70\x81\x18\x4f\x21\x66\xde\x47\xe5\x8c\x44\x66\x45\xb7\x57\x0f\x9f\x7a\xd7\x83\x9b\x9f\xbb\x77\x77\xbd\xcb\xee\x39\xba\x27\x09\xf8\xd2\x1c\xd1\x61\x17\x67\x49\x3a\xd6\x92\x60\x3a\x4b\x88\xa6\x86\xf1\x15\x0e\xc9\x04\x3f\x53\x2e\xec\x75\x3c\xa6\xcf\x84\x19\x3a\x6a\xb6\x32\xe3\x3b\x9f\xd5\xc0\x23\xdd\xcd\xf5\xc7\xde\xa7\x73\xd4\x89\xe3\x6c\x0d\x12\xc6\x28\x70\x8e\x3b\xba\xa7\xc5\x69\xd3\x11\x8d\xe0\xf3\x86\x89\xf8\x33\x11\x82\xc6\xa4\xc4\x47\x9d\xfb\xfb\xde\xa7\xeb\xcf\xdd\xeb\x3e\x50\x4c\x09\x9e\x48\x34\xe1\x2f\x10\xe8\x85\x15\x42\xfc\xf7\x19\xd3\x04\x3e\xe6\x36\x8b\x33\xff\xf4\x7b\x5f\x36\x6e\x4d\x91\xb2\x57\x8f\x5d\x16\x0e\xde\xa2\xb7\xaf\x7c\x92\x16\x9f\x28\x1d\x8b\x65\x0f\x14\xb8\x7c\xf1\xc1\x55\xdc\xba\xf8\x46\x89\xdd\xea\x7d\x9c\x0b\xfc\x52\xbf\xd2\x7c\xaf\x1b\xbb\x38\x8b\x34\xdc\x87\x8e\xed\xde\x7e\x17\x93\x84\x28\x52\xab\x13\x5f\xc2\xcf\xaf\xaf\x13\x9b\x79\xbc\x19\xb5\xd8\x2c\x27\x68\xc6\x41\x33\x6e\xbc\xe0\xa0\x19\x57\x2d\xf8\x8d\x68\xc6\x2d\xf4\xfa\x38\x11\xd5\x3a\xaf\x4f\x88\x8f\x94\x76\xea\x38\xaf\xc0\x57\x0b\x8f\x84\xf8\xc1\x7a\x57\xc8\xf1\xaf\x3b\xc4\x0f\x42\xfc\xa0\xf2\x26\x79\xf3\x51\x83\xe3\xbc\x1a\x0e\x18\x34\x08\x66\xc4\x92\xf5\x06\x33\xe2\xc8\x56\x1b\x1c\xec\xc1\xc1\x1e\x1c\xec\xc1\xc1\x1e\x1c\xec\x68\x53\x07\x7b\x03\x29\x7b\x08\x77\x6a\x4b\x93\x88\xdf\x4a\xd8\xe0\x38\xf5\xe2\xc3\x46\x0d\x82\x6a\xbc\x64\xbd\x41\x35\x3e\xb2\xd5\xb6\xd0\x2f\xd2\x2e\x0f\x3b\x8d\xab\x1c\x22\x07\x84\xa6\x77\x33\x69\x0a\x4f\xef\x08\xda\x8b\x8f\x42\x9c\xbf\x1a\x42\x7d\xc0\x73\x0f\x78\xee\x01\xae\x25\xe0\xb9\xa3\x00\x48\x12\x00\x49\xda\x0c\x48\xd2\x60\x1b\xdf\x02\x9e\xfb\x61\x3c\x0c\x6f\xa8\x48\xd9\x29\x86\xb2\x90\xbb\xc1\xe5\xaa\xe4\x0d\xf0\x12\xa4\xb3\x84\xe3\x78\x19\x58\x8c\xd3\x23\x7d\xc0\x98\x25\xaa\xa7\x19\xfb\x97\x45\xe3\xa9\xb5\x9a\xa7\x9b\xab\x99\xf9\xa1\xdc\x07\xad\x31\xb8\xdc\xb2\x5b\x61\x66\x15\x7b\xb7\xb6\x90\xa1\x8f\x2a\xa1\xf6\xb0\x1c\xfd\x26\x9b\xb6\x86\x63\xba\xfa\x98\x1e\xae\x3f\x4a\xd5\xd1\x6d\xec\x08\x91\x7f\x3f\xa6\xb3\x7b\x20\xe4\xe3\xb7\x77\x62\x03\x42\x5b\x40\x68\xab\xa5\xcc\x71\xc2\x39\xb7\xc6\xf0\x0a\xbe\xb4\x00\x7d\x1c\xa0\x8f\x77\x49\x9c\xe0\x69\x0c\x9e\xc6\xd6\x79\x1a\xdb\x64\x43\xec\xb1\x75\xca\x76\xd6\xc4\x51\x79\x02\x82\x35\x11\xac\x89\x8a\xa5\x05\x6b\xe2\x2b\xb4\x26\xda\x41\xe1\x60\x4a\x04\x53\x22\x98\x12\xc1\x94\x08\xa6\xc4\xce\xc9\x18\x4c\x89\xd7\x69\xab\x52\x65\x4f\x34\x2c\x49\x3d\x2a\x63\x22\x18\x12\xc1\x90\x08\x86\x44\x68\x1c\xb3\x7c\x4d\xa1\x71\x4c\x68\x1c\x13\x1a\xc7\xbc\x81\xc6\x31\x87\x54\xe1\x6a\xd0\xca\x8f\xa3\xcc\xe6\x28\x94\xb8\x57\xab\xb3\x79\x7b\x2a\x5d\xa8\x1c\x0a\x95\x43\xc1\x45\x19\x2a\x87\x82\x13\x2e\x38\xe1\x5a\xed\x84\x7b\x2d\xcf\xfa\x81\x8f\xe7\x81\x14\xd5\x23\xc9\x76\xfe\xee\x18\xb4\xd1\x03\xe7\x27\x04\x0f\x5c\xf0\xc0\x55\x53\xe6\x38\x43\xf9\xad\xd1\x02\x8e\xb1\x73\x6c\xb0\x00\x9a\x13\x22\x24\x29\x34\xa7\x55\x48\x52\x58\x42\x9c\x60\x1f\x05\xfb\xa8\x75\xf6\xd1\x2b\x1b\x0e\xad\x4d\x71\x0e\x16\x84\x79\x2e\x58\x10\xc1\x82\x78\xa3\x16\x44\x6b\x28\x1c\xcc\x87\x60\x3e\x04\xf3\x21\x98\x0f\xcb\x89\x13\xcc\x87\x60\x3e\x04\xf3\xe1\x68\xd2\x9a\x8f\xc9\x80\x08\xc6\x43\x30\x1e\xda\x6d\x3c\xb4\x66\x41\x21\x01\x38\x24\x00\x87\x04\xe0\xaf\x26\x01\xf8\x8d\x1a\xf0\xbb\x55\xdb\xfe\xcd\x12\xea\x9d\xa7\x60\x64\x9a\xc8\xbb\xef\x13\x3e\xec\xcf\x67\x44\xff\xdf\x4b\x3a\x25\x4c\x02\x25\xa8\x9a\xfb\x6a\x5a\x0d\x43\x2d\xb2\xd2\xbb\xfb\xde\xf5\xa7\x2b\xbf\x3d\xd0\xbb\xcf\x0f\x57\xfd\xde\x6d\xe7\x2e\xdb\xee\x6c\x55\xfe\x16\xdb\xf7\x0a\x9a\xa6\x3d\xc9\x77\x44\x9b\xd4\x20\x0c\xee\x15\x56\xa9\xdc\x6c\x66\x77\xdd\xfb\xee\xdd\xcf\xd0\xde\x68\x70\xd9\xbb\xef\x7c\x7f\x55\xe0\xf3\xc2\xef\x9d\x8b\x9f\x1e\x7a\x77\xf5\xbf\x77\xff\xab\x77\xdf\xbf\xaf\xfb\xf5\xae\x7b\xd5\xed\xdc\xd7\xbf\xfd\xb1\xd3\xbb\x7a\xb8\xeb\x2e\xa5\xc7\xd2\xd9\x2e\xb7\xad\x24\x10\x09\x5a\x7c\xa0\xc8\x0a\x43\x91\xd3\x10\x65\x5a\xb1\x93\xf2\x55\xdf\x3a\x47\x0f\xd6\x55\x41\xed\xe0\xe6\xde\xf0\x06\x32\x36\x56\x4c\x25\x1e\x26\x24\x5e\x18\xc9\xd1\xb0\x6e\x24\x5c\x98\xd4\x0b\x96\x9e\x26\xad\x45\x79\x64\x8e\x0f\x82\xa6\x6b\x8a\xb0\xb8\xe2\x1b\x66\x1f\x6a\xbf\xc0\xb4\x48\xa6\xcf\xa4\xf0\xa5\x28\x15\x82\x30\x95\xcc\x11\xf9\x42\xa5\x92\x0b\x83\xba\xed\xab\x1b\xd6\x0a\x84\x6c\xc0\x09\x96\x68\x48\x08\x2b\xce\x5f\x90\x84\x60\x59\x31\x67\xbb\xfb\xcd\xc8\x92\xed\x95\x75\x32\x99\x3b\x76\x84\x69\x92\x0a\x52\x3a\x2d\x7c\x3a\xc3\x82\x4a\xce\xba\x5f\xf4\x15\xad\x0f\xf2\x0d\xbc\xce\xc5\x66\x27\xa6\xfb\x93\xcf\xc1\xd7\xc5\xff\xfc\xd4\x2f\xfe\x57\xe1\xcc\x5f\xf5\x8b\xff\xb5\x9c\xd7\xbd\x81\xcb\x9c\x7d\x8a\x3e\xf5\xcf\xd1\x27\x80\x18\x15\xa8\x3f\xc1\x86\x63\xaf\xfa\xe7\xe8\x8a\x48\x09\x7f\xc9\x5f\x56\x54\x25\xb0\xb6\xef\x29\xc3\x62\x8e\xdc\xf2\x4d\xe7\x3e\x1c\x4d\x10\xc9\x48\x53\x26\x1e\xfb\x67\xca\xc0\x23\x91\x53\xef\x8a\x8f\x69\x84\x93\xed\x88\xd8\xb9\x2e\xc8\x81\x9b\xbb\xa5\xa4\xf0\x9f\x5e\xa4\x45\xe7\xfa\x12\xba\xe2\xb9\xa9\x56\xac\xfc\x9a\x48\xcd\x24\x11\x67\xb1\x8d\xa9\x69\xa5\x66\xee\xd9\x2a\xff\xe4\xd0\x59\x30\x95\x94\x8d\xf5\x88\xe8\x03\xba\xb9\x7b\x64\x37\x22\x36\xfe\x5d\xa2\x95\x7c\xc3\x73\x54\x22\xc6\x15\xa2\xd3\x19\x17\x0a\x33\xa5\xed\x1b\xd0\x6e\x2c\x45\x8c\x04\xb8\xe0\xd3\x69\xaa\xb0\x3e\x68\x0b\x44\x65\xc6\xcb\x73\x4f\x54\x2f\x86\x40\x58\x05\x0d\x8d\xfa\x93\xaf\x65\x26\xf4\xf8\x5a\xf5\x2a\xba\x06\x68\xbc\x60\xa1\xbb\x21\xb0\x10\xb8\x78\x01\xbf\xa3\x8a\x4c\xcb\xcf\x37\xbc\x76\xff\x55\xe9\xf7\xb8\x30\x55\x11\x44\x74\x44\x34\xa1\x8a\x44\x4a\x1f\xc1\x8d\x78\xe2\xe1\xfa\xc7\xeb\x9b\x5f\x7c\xc5\xe8\x5d\xe7\xf3\xe5\x7f\x14\x60\x60\x3b\x77\x9f\x17\xfe\x30\xf8\xf9\x3f\x16\xfe\xf2\xff\x5f\xca\x4f\xe5\x2f\x2d\xb8\x2f\xbc\xb5\x9c\x82\xa5\x00\xae\x6e\xb7\x54\x44\xa7\x78\x4c\x90\x4c\x67\x9a\x03\xe4\x59\x71\x7f\xb5\xa6\x7c\xc5\x71\x4c\xd9\xd8\x34\x7f\xbb\xa2\x8a\x08\x9c\x7c\xc6\xb3\x8f\xce\x2d\xbf\x01\x75\xfe\xcf\x7d\xa1\x01\xe1\xbb\x5f\x3b\x9f\xfd\x16\x86\xef\x6e\xef\x6e\xfa\x37\x4b\x57\x5d\x18\x61\xf1\x18\xe9\x9f\xcf\xe1\xff\x47\x1f\x90\x1e\x3d\x53\xe8\xa7\x44\x61\x6d\xe8\xa0\x3f\x99\x7e\x59\x59\x25\x0c\x65\x09\x9c\x9a\x99\xa0\x53\x0a\x57\x8a\x71\x4c\x7e\x63\x6c\x86\xcc\x28\xca\xce\x8d\x79\x01\x9c\x00\xee\x52\x66\x31\x16\x31\xfa\xa7\x2c\xf7\xc3\x04\x7f\xb8\xf9\x03\x89\xd1\x29\x9a\x28\x35\x93\xe7\x1f\x3e\xbc\xbc\xbc\x9c\xe9\xa7\xb5\x02\xfb\x41\xff\xe3\x94\xb0\xb3\x89\x9a\x26\xa6\xff\xa7\xa6\xc2\x39\xba\x15\x5c\x5f\x21\xe0\x77\x20\x82\xe2\x84\xfe\x41\x62\x34\x34\xf2\x8f\x8f\xd0\x6f\x11\x17\xe4\x2c\xdf\x18\xeb\x2b\xb3\xf7\x88\xf5\xa7\x7d\xd0\x0f\x55\x08\x93\xf2\x7e\xa2\x98\x44\x34\xb6\x6a\x06\x61\x11\x07\x87\xaa\x09\xc1\xe8\xf1\x5c\x93\x31\x6d\xa8\xcd\x52\x95\x93\xd3\xb3\xc1\x70\x4c\xbc\xf6\x9d\x56\xbf\xce\x18\x4e\xdb\x73\x3d\x63\x8d\xa7\x92\x08\xb8\x5b\x31\xdc\xaa\xee\xd1\x99\x5e\x70\xc4\x13\x34\x4c\x47\x23\x22\xfc\xf4\x81\x13\x6d\xa4\x51\x89\x04\x89\xf8\x74\x0a\x1a\x83\x7e\x2b\x95\x86\xab\x81\x62\x76\xb6\x67\x8f\x0c\xf6\x5f\x5b\x6f\xc0\x01\x31\x07\x51\xc7\x08\x89\x11\x66\x73\xf3\x99\x61\x3a\xf2\xc7\x37\x7d\x75\x71\x8c\xa8\x7a\x64\x9d\x24\x41\x82\x4c\xb9\x22\x5e\xfb\x34\x08\x75\x16\x09\x0e\x22\x52\x90\x59\x82\x23\x12\x1b\x7e\x48\x78\x84\x13\x34\xa2\x09\x91\x73\xa9\xc8\xd4\x1f\xe0\x4f\xe0\x82\xd2\x34\xa3\x12\xc5\xfc\x85\x25\x1c\xdb\x75\x94\x5f\xfb\xa6\x78\x1a\xbb\xae\xe7\x69\x57\x08\x2e\xe0\xff\xfb\x91\xb2\x78\x67\x12\xea\xe1\xbe\x7b\xe7\xff\xf7\xfd\xaf\xf7\xfd\xee\xe7\xf5\xa4\x4f\xc6\x59\x30\x3d\x70\x4d\x9c\xa3\x7b\x43\x04\x2e\xb4\x46\x24\x6a\x16\xf5\xd9\xb2\x52\xfe\x07\x1e\x6f\x28\x7d\x3f\x77\xae\x1f\x3a\x05\x89\x72\x7f\xf1\x43\xf7\xf2\xa1\x64\x0f\xd8\xf5\x15\x74\x78\x63\xd5\xfa\x7f\xbb\xf8\xa1\x77\x75\x39\xa8\xb0\x83\xdf\xdd\x75\x2f\x6e\x7e\xee\xde\xe5\x26\x6b\x25\x89\x4a\x93\x29\x0b\xab\xbe\x11\x4a\x13\x1e\xa3\xe1\xbc\xba\xc3\xad\xd6\x9c\x13\x88\x9c\xe7\x3d\x9e\xcd\xa8\xe7\x20\x9b\x5c\xb3\xe1\xfc\x8d\x29\x8f\xc9\x89\x7d\x06\x5a\x03\x1b\x9f\x91\xd1\x98\xab\x07\xd6\x5f\xc7\xcc\xf3\xbf\x98\xae\xbd\x19\xe1\xce\x51\x07\x49\xfd\x60\xaa\x0f\xb5\xa0\xe3\x31\xf8\x43\x4b\x53\x35\xa3\xd9\x57\x81\xbc\xf0\x9e\xd9\xff\x99\xe0\x70\xce\xf5\x67\xad\x23\x3d\x73\xb6\x98\x17\xa1\x8d\x74\x71\x44\x81\xc1\x8f\x52\x31\x35\xb7\x59\x9a\x08\xb5\xf4\x32\xe7\xd1\xb8\xc1\xf4\xe1\x02\xb1\x25\x8d\x1b\x77\x26\xc8\x33\xe5\xa9\xf7\xaa\xed\x54\x5c\xd8\xf1\xca\xe1\x73\x02\x00\xd9\x8c\xaf\xa7\x34\x4c\xc6\x1e\x95\x23\x68\x11\xf6\x0c\x23\x8c\x04\x9f\x56\x8c\x51\x3c\x26\xbd\x9b\x7b\x25\xb0\x22\xe3\xf9\xa5\x15\x19\x9b\x1f\x8f\xcb\x9b\x5f\xae\xaf\x6e\x3a\x97\x83\x6e\xe7\x53\xf1\xc4\x67\xbf\xdc\xf7\xef\xba\x9d\xcf\xc5\x9f\x06\xd7\x37\xfd\x81\x7b\x62\x29\xcb\xd7\x7c\x60\xf1\x9e\x2e\x3e\x78\x8e\xb4\xc8\x05\xd1\xf8\x42\x93\x44\x5f\x26\x9e\x7c\x1c\x92\x11\x17\x46\xce\x4f\x5d\xa2\x89\x55\x61\x1c\x6d\xad\x2d\x56\x5a\xc5\x39\x38\xfc\xaa\x86\x34\xce\x7c\x25\x08\x9e\xc2\x3d\x81\x19\xea\xb2\xf8\xf4\x66\x74\x7a\x6f\xfe\x38\xc5\xe2\x89\x88\xec\xd5\x17\x41\x95\x22\xac\x60\xd2\x61\x37\xe5\xcc\x48\xcc\x3f\x70\x86\xee\xb4\xdc\xd7\xcf\x67\x97\x9a\x66\xf6\x98\x28\x4c\x13\x69\x27\x5b\xa0\xeb\x39\xba\xc2\x62\x9c\xbb\x17\xff\xc4\x47\x23\x33\xd8\x37\x66\x1a\xfa\x0e\x2b\xac\xa2\x42\xf6\x6a\xd6\x70\xf7\x22\x7c\xcf\x3e\x9c\xe9\xc3\x8b\x5c\xf5\x30\xdb\x8e\xa7\x1e\x6e\x81\xe2\xc6\x62\x2f\xd8\x86\xf6\x97\x0a\x5e\x83\x85\x9b\x9f\x97\x5f\x32\xd5\x63\x2f\xb2\x53\xf1\xc1\x0a\x76\x32\x1d\x5a\xf4\xce\x8f\xb4\xb5\x59\xc1\x4b\xe4\x0b\xb5\x0e\x03\x7f\xde\x25\x16\xca\x87\x01\xaf\x31\x9e\xcd\x08\x16\xb2\x6a\xb7\x8b\x6a\x60\xcd\xde\x9b\x2f\xf9\xdf\xb0\x9b\xec\xbe\x73\x82\x38\x03\x87\x43\xa6\x44\x94\x38\xb2\x01\x0f\x98\xb1\x16\x38\xe0\x16\xda\xc7\xdf\xd8\x56\xed\x9f\xa9\xd4\x46\xa3\xf9\xe3\xf7\xb6\x87\xfc\x66\x0c\xf1\xb1\xd3\xbb\x2a\x29\x17\x83\xcb\xee\xc7\xce\xc3\xd5\x72\x37\x61\xe1\xbd\xf2\x16\xa3\x53\xa4\x7f\x2f\xa6\x03\xd0\x91\xb9\x33\x5c\x27\x7c\x63\xd2\x12\x06\x4e\x2b\xdb\xa5\xda\xb8\xe1\x63\x32\x4b\xf8\x7c\x4a\x18\xb8\x78\x0a\x37\xa1\xa6\xe7\x08\x53\x7b\xb5\x78\x93\x05\x2f\x8e\x75\xbb\xc1\x35\x76\xea\xda\xef\x93\x38\xbb\x79\x8b\xdd\xf7\x4b\xa2\xfb\xd6\x04\x05\xed\xff\xb9\x57\x58\x6d\x78\xc6\x3a\x17\xfd\xde\xcf\xdd\xa2\x7d\x78\xf1\x43\xef\xe7\x2a\xad\x66\xf0\xa9\x7b\xdd\xbd\xeb\xf4\x57\x28\x27\xa5\x21\xab\x94\x13\xa9\x27\x5c\x0e\x0a\x53\x99\x25\x3a\x45\xa6\x87\x3f\xa2\x4a\xa2\x67\x2a\xe9\x90\x26\x54\xcd\x91\x0d\xb0\x3e\xf4\x40\xb2\x3e\xe3\x84\xc6\x54\xcd\x9d\xfa\x62\xbe\x5b\xdc\x47\x2d\x49\xed\xf8\xc6\xed\xe0\x87\x5d\xc1\xcb\x67\x36\xc7\x2d\xfa\x1c\x81\x6d\xfb\x0c\x46\x9b\xf7\x1a\xd3\x8a\x34\x1b\x13\x61\xa6\x03\x41\x25\x7f\x2e\xde\xef\x7a\x56\xbe\xb2\x92\x53\x2d\x53\x5a\xc7\x84\x11\x2d\x22\xbd\x8f\x18\x45\x4a\x10\xf6\x5e\xeb\x5c\xb3\x84\x46\x54\x25\x73\x14\x81\x0f\x0b\xdc\x99\x53\xcc\xf0\xd8\x2a\x07\x60\xe6\x94\x58\xe2\xa7\x14\x1c\xf0\x37\x23\xeb\xda\xef\x53\xb2\xe1\x31\x7b\xb8\xbe\xec\x7e\xec\x5d\x17\x59\xe0\x87\xde\xa7\x82\x0a\xfb\xb9\x7b\xd9\x7b\x28\xdc\xe6\x5a\x93\x5d\xae\xd7\x97\x87\xad\x38\x8a\xd9\x43\xe7\xe8\xd2\xbc\x7a\xae\x89\xfb\xbb\x59\x9c\x66\x19\x69\x96\x97\x1b\xbf\x25\x3a\xdc\xb9\x4c\x43\xf7\x8f\x2e\x53\xa2\x32\x2e\xd1\xd4\x85\x64\xa3\x42\x05\x1f\x52\x75\x06\xc6\xc2\xb7\xaf\xcb\xb1\xf2\xf2\x92\xdd\x83\x90\x22\x7b\x96\x7b\x96\xfc\xd4\x0c\x70\x1a\xd4\x39\xb1\x2a\xa2\x75\xb9\xc0\xfe\x19\x22\xef\xd3\x54\x2a\x13\x21\x05\xe6\x44\x4f\xff\x90\x9a\xa0\x10\x41\x3d\x43\xf7\x84\x3c\x32\xe7\x3d\x18\x53\x35\x49\x87\x67\x11\x9f\x7e\x78\x4a\x87\x44\x30\xa2\x88\xfc\x80\x67\x74\x8a\xb5\x26\x4d\xc4\xfc\xc3\x30\xe1\xc3\x0f\x53\x2c\x15\x11\x1f\x66\x4f\x63\x48\xec\x71\x91\xae\x0f\xd9\xb0\x63\xfe\xef\x57\xdf\x7d\x7b\x7a\xf5\x8f\x6f\xdf\x2d\x7a\xc8\xea\xf6\xbf\xcb\x22\x3c\x93\x69\x62\x13\x01\x85\x4f\x1b\x77\xe4\x53\xb2\x6a\xbf\xaf\x8b\xdb\xb5\x9d\xfd\x7a\x71\xfb\x50\xf0\x58\x17\xff\xf3\x73\xf7\xf3\xcd\xdd\xaf\x05\x49\xd9\xbf\xb9\xeb\x7c\x2a\x08\xd4\xee\xed\x0f\xdd\xcf\xdd\xbb\xce\xd5\xc0\xfd\xb8\x8d\xef\xed\x47\xc6\x5f\x58\x91\x34\xd2\x49\xc0\x85\x2f\x9d\xa3\x8f\x5c\xa0\x1f\xb3\x9d\x3c\x1d\x62\x09\x57\x8c\xbb\xb3\xe4\x09\x9a\xf1\x18\x04\x2f\x22\xb3\x09\x99\x12\x81\x13\xeb\x33\x90\x8a\x0b\x3c\x36\x37\xbd\x8c\x04\x56\xd1\x04\xc9\x19\x8e\xc8\x09\x8a\x80\x1b\xc6\x27\xb0\x29\x60\x6a\xf1\x71\xd9\xcf\x77\x97\x32\x45\xa7\xc4\x99\xe0\xf6\x3f\xfb\x66\x33\x36\xd8\x9c\x9b\xfe\x0f\x45\x65\xef\xe3\xd5\xaf\xfd\xee\xe0\xfe\xf2\xc7\xa5\xf4\x34\xaf\x15\x66\x76\x0f\x79\x55\x17\x3c\x49\xa7\xcc\xff\xf7\xe6\x73\xeb\x5d\xf7\xbb\x9f\xca\xb3\xbb\xe9\xf4\x8b\x9c\x71\x57\xcc\xdb\x7b\xf7\xfd\xcd\xcd\x55\xb7\x10\xe9\x7e\x77\xd9\xe9\x77\xfb\xbd\xcf\x05\xfe\xb9\x7c\xb8\x83\x18\xd0\xd2\x65\xba\x19\x54\x2c\x54\x2f\xcb\x5f\xe6\xae\x45\x61\x23\x49\xd4\xb1\xe9\xff\xe6\x2c\x9f\x7a\x78\x39\x26\xcb\x0d\xbc\x3a\xa7\x99\x4b\x35\x32\x33\xad\x14\x87\xaa\xb8\x4d\xa8\x5e\x1c\x2f\xdd\xe8\x65\x52\xb9\x9f\x4d\x01\xe6\x75\x66\x8c\x6d\x9c\x24\xfc\xc5\x64\x28\x4f\xa9\xbe\x95\x25\x81\x44\x65\xfd\x88\xcc\x23\x84\x67\x15\x12\xaf\xb8\x2d\x24\x12\x44\x7d\xe6\x29\x53\x9b\xb3\x5c\xe7\xba\x20\x77\xba\xd7\x3f\x0f\x7e\xee\x14\x39\xb0\x77\xb5\x5c\xd4\xf8\x43\x54\x5c\xc5\x9d\xeb\x5f\xb3\x4b\x18\xf2\xd8\x4f\x32\x0b\xd5\xe8\xae\x51\x42\xb5\xda\x1b\x61\x6d\xbd\x26\xa0\xd1\x20\x42\xc1\xe5\x30\xd5\x8b\x83\xbc\xd9\x99\x89\x27\x19\xf9\x64\x26\x79\xee\xfe\x51\x1a\x4f\x02\x5d\xc0\x9b\xea\xca\x04\x60\x1c\x6b\x55\x33\x44\xd8\x33\x15\x9c\x81\xb2\xfd\x8c\x05\xd5\xda\xb8\x19\x59\xaf\xf5\x1c\xfe\xff\xf5\xc6\x04\xc7\x68\x49\x70\xdd\x73\xa1\x2e\xb3\xfc\xe4\xcd\xbc\x21\x55\x79\xba\x8b\x19\xba\xd5\x8e\x8e\xc5\x77\x2b\x36\x67\xcb\x3c\xe6\xe2\x82\x7f\x4f\x2e\x29\x4e\xb4\x00\xd8\x9d\xbe\xd8\xb9\xbe\xef\x15\xf5\xc7\xa2\x99\xe1\xc9\xe5\x8d\xf5\x45\x70\x54\x9a\x99\x3b\x63\xe2\xfe\xa7\x2b\x63\x5d\x68\x26\xb1\xe7\xd6\x33\x2c\x40\x01\x72\xbd\x55\x67\x58\xc8\xd2\x1b\x12\x01\x92\x59\x9e\x47\xa6\xef\x2c\xc8\xd2\x7a\xe6\x34\x7e\x64\xe4\xcb\x8c\x30\x09\xc9\x01\xe6\x3e\xcb\x63\xed\xf2\x0c\xf5\x46\x20\x12\xf4\xe3\x0c\xa5\xcc\x06\xc0\xf4\x85\x6b\x26\x79\xa2\x55\x59\x3b\x85\xcc\x42\x04\xc7\x0b\x23\x2e\x07\x2c\x9f\xfc\x23\xfb\x25\x0b\xa2\xc1\x4f\x23\xae\x05\x90\xde\x45\x3b\xde\x39\xc2\x4c\xd2\x13\xa4\x0d\x96\xf2\x9e\x42\x45\x84\x36\x28\x6d\x66\x9a\x96\x34\xf6\x9f\x87\xbf\x06\x16\xd2\x9f\xfd\xcb\xa0\xfa\x2e\x28\x5d\x05\x35\xaa\x71\x62\x22\x26\x83\xe6\x77\x42\xc4\x05\xb1\x71\x96\xb5\xaf\x81\x55\x82\xbd\x8f\xe5\xd3\x42\xec\xa1\xc7\xa4\xc2\x2c\x22\x17\x09\x96\x1b\x26\x21\x39\x1f\xc7\x49\x51\xe3\xb8\xbb\x7b\xb8\xed\xf7\xbe\x5f\x21\xe5\xcb\x2f\x2f\xa6\x01\x45\x49\xea\xc2\x73\x43\xc1\x71\x8c\xb4\xf8\x1c\x73\x13\x0a\xb4\x8a\xbf\x39\x41\x66\x4f\xa8\xf4\xf2\x44\xb1\x7c\x2a\x38\xa9\x6d\x95\x85\xf5\x73\xf8\xa1\x04\x6a\x09\x81\x22\x4d\x09\xe4\xb9\x3c\xdc\x56\x43\x64\xd1\x64\xd1\x59\xef\xd6\x2c\xc1\x6a\xc4\xc5\xd4\x48\xf9\xc2\xa2\xcd\xe0\xcb\x07\xa5\x4c\x11\x21\xd2\x99\x02\x93\x5d\xcf\xb5\xac\xa5\xea\x2d\xbb\xe2\xe3\xcf\x44\x4a\x3c\x26\xdb\x04\xa0\xab\x8c\x87\xfb\x9f\xfd\xff\x84\x00\x73\x13\xdd\xbf\x30\x43\x97\xd0\xef\xf8\xe9\x86\x7d\x34\x89\x3c\xb7\x3c\xa1\xd1\x86\x09\x77\x1f\x3b\xbd\xab\x41\xef\xb3\x36\xe2\x3b\xfd\xee\x55\x41\x95\x80\xdf\x3a\x1f\xfb\xdd\xbb\x41\xf7\xbf\xba\x17\x0f\xfd\xce\xf7\x57\xdd\xc1\xf5\xcd\x65\xf7\x7e\x70\x71\xf3\xf9\xf6\xaa\xbb\x22\x33\xa7\x76\xf0\x45\xef\x6a\xf9\xd1\xf3\x85\xbf\xc0\x0e\x6b\x59\xe6\xfb\xcb\xa0\x18\x0e\xd3\x04\x82\xe0\xdc\x04\xc3\x31\x62\x3c\x26\xf0\x67\xe9\xbc\x33\xae\xda\xe4\x0c\xf5\xd4\xfb\x24\x41\x38\x55\x7c\x8a\x21\x6a\x93\xcc\x1f\x19\x1e\x6a\xd1\x8a\x93\xc4\x4b\xef\x12\x29\x63\x5a\xc4\xea\xc1\xa4\xc9\x2f\x4e\x88\x16\xe7\x33\xaf\x86\xd1\xc6\x0d\x46\x94\x41\x02\xf1\x14\x8b\x27\x13\x66\xca\x3f\x99\x1f\x0a\x89\xb0\x7c\x64\x7a\x5e\xc4\x3a\x86\x9a\x50\xf8\xbc\xd1\x53\xb5\xd4\x99\xe2\x27\xa2\xa9\x32\x4d\xa3\x09\x9a\x09\x3e\x16\x44\x4a\xeb\x5b\x8e\x30\x33\x09\x08\xf6\x71\x7d\x0d\x3d\x32\xc6\x35\x29\x9c\x0b\x3b\x26\x33\xc2\x62\xc2\x22\x6a\xaa\x15\x21\x76\x9f\xb9\x36\xc7\x02\xcf\x26\x48\x72\x08\x7a\x03\xd9\xc1\x7f\x65\x5e\x72\x37\x99\x59\xb1\xf9\xd9\xf7\x40\x8b\x54\xcb\x89\x1b\xd0\x13\x0d\x95\xe1\x65\x77\x19\xba\xb0\x8b\xf1\x03\x4e\x67\x09\x81\x4f\x5a\x92\xc3\x66\x68\x5a\x17\xf6\x43\x6f\x53\xd5\x26\xe8\x0b\xdb\xcd\x19\x4b\x3b\xa3\xb3\x0a\xcf\xb6\x3d\x52\xe8\x07\xcc\xe2\x44\x8f\xe2\x62\x18\xc5\xb3\x08\x15\x36\x1d\xcd\x35\xee\x34\x6e\x73\x8b\x46\x38\x95\xdb\x5c\xa3\xa5\x12\x53\xe3\x15\x3c\xcd\x93\x42\x80\xbd\x6d\x7d\x29\x50\x77\xa6\x45\x24\x4e\xb8\xa5\x92\x79\x3c\xb5\x49\xcb\x30\x9b\x9a\x6b\x76\x26\x28\x8b\xe8\x0c\x27\x1b\xd9\x7e\xa5\x1a\x03\x9b\xba\xff\x27\x3a\xd2\xec\xf3\xcd\x42\xd8\x56\x11\x31\x85\x72\x72\x3b\xcd\x6c\x0b\xd7\xf0\x24\xd9\x62\x0d\x22\xf3\x6c\x12\x2c\x78\x6a\xe2\x71\x40\x17\x12\x57\x1c\xd5\xb3\xaa\xed\xd6\x27\x03\x17\x13\xa0\x37\xd8\x6c\x93\xf9\x53\x47\xbf\xd2\x28\xf6\xeb\x26\x19\x0f\x27\xb7\xd5\x63\x56\xed\x80\xf7\xe3\xbf\x96\xf1\xce\x67\x3c\xd3\x3c\x13\xa5\x52\x41\xa4\x38\x5b\xa3\x35\x92\x4a\xa9\xec\x5e\xec\x3c\x4b\x6a\x6f\xbe\x1b\x39\x09\x6d\x02\xd4\xe2\x47\x0a\x39\x04\x1e\x22\x80\xe5\xf1\x51\xaa\x75\x59\x84\x21\x0b\x01\xfd\x89\x9c\x8d\xcf\xd0\xcd\xcf\xdd\xbb\xbb\xde\x65\xf7\x04\x75\x6e\x6f\xbb\xd7\x97\x27\x88\xa8\xe8\x1b\x97\xb3\x68\x13\x96\x1e\x99\xe2\x56\x5b\x99\xa3\x09\x7f\x01\xd9\x48\xc4\x98\x14\xd6\xec\xb2\x9b\x20\x55\x79\x4c\xa5\xb2\xe9\xb3\x5a\xae\xe4\xd3\xd2\xfa\x7e\x25\x87\xa4\x6a\xb2\x0d\x6b\x60\x29\xd3\xa9\xb6\x65\x07\x14\x4f\x07\x82\x27\xdb\x08\x85\x4b\x58\x0a\x98\xcb\x19\x98\x02\xc5\x53\xa4\x87\xb5\xa9\x20\x59\xc8\x31\x53\xe9\xb4\x62\xa4\xe5\xb2\xbe\x37\xbd\x7b\xcb\x45\x1f\x6c\x3e\x1a\x75\x29\x10\x00\xb6\x50\x23\x2a\x72\xb7\xf1\xc0\x7a\xea\x07\x38\x8a\xb4\xc9\xbd\xe3\x45\xe5\x1f\xca\x42\x02\xf6\x43\x7b\x5b\xe6\x2a\x3e\x77\xd3\x9c\x69\x09\x06\xc9\xc0\xfa\xca\x95\x3c\xa2\xf9\xf8\x15\xdf\x1d\xce\x17\xbe\x0a\x2c\x7b\xf6\xc8\x1e\x64\xe6\x52\x31\x97\xb0\x24\xb0\x93\x12\xbd\x4c\x08\x1c\x8d\x39\x9a\xe0\x67\x52\xf8\xa4\xab\x21\xd1\x03\xcf\x79\x2a\xaa\x04\xdd\x23\xbb\x24\x33\x41\xb4\xa6\x5f\x0e\xa0\x64\x3c\x7d\x57\xe4\xc4\xc0\xd7\x81\xaf\x8f\x9e\xaf\x2f\x92\x54\x2a\x22\x3a\x52\xd2\x31\x38\x12\xb7\x52\xe0\xcc\x60\x83\x19\xe7\xc9\xa0\x81\x4f\xa4\x39\xc5\x0b\x91\xb0\x42\xc2\x87\x34\x48\x07\x3c\x05\xfd\xa8\x70\x6d\x72\x7d\xd7\x79\x95\xc3\x76\x7a\x4b\xc8\xe0\x42\x66\x1d\x07\x28\xb1\x95\x8a\x83\xab\x46\x59\x36\x12\xda\xbb\x9a\x73\x61\xf4\x9b\x2c\x5c\x96\x4f\xb1\x74\x98\x9c\x2a\x42\x99\x23\x5b\xfe\x12\xf0\xb3\x26\xb0\xd1\x3b\x7e\x4f\xb9\xc2\xf2\x9b\xb3\x47\xa6\x95\xa8\x27\x32\x37\xee\x56\xad\xa6\xfc\x59\xeb\xe2\xa7\x92\x30\x09\xe9\xde\x7f\x36\xe1\x39\xcd\xe2\xce\x5d\x6d\x4c\x53\x32\x9d\x25\x58\x41\xd2\x75\xf6\x15\x48\xd1\xb5\x83\x5a\x2d\x29\x4f\x80\x06\x3d\xdf\xac\xc5\xfe\x66\xa6\x3f\x26\x0a\x2a\xc7\x15\x55\x60\x33\xc5\xa9\x26\xcf\xe2\xd4\x57\xba\xae\x0c\x57\x08\x0e\x71\x92\x38\xdd\x4e\xf0\xcb\xc5\x31\x56\x4a\xc6\xcc\x5a\xb8\xb7\x39\xef\x1f\x9c\xdf\x28\x12\x9c\x95\xb2\x61\xb4\x31\x67\x76\x7a\x68\xc4\x81\x8b\x5f\x13\x76\xf6\x42\x9f\xe8\x8c\xc4\x14\x43\x06\xbc\xfe\xaf\x0f\x7a\x5d\xff\x7e\x71\x77\x73\x3d\xc8\x2b\x79\xfe\xf3\x91\x75\x12\xc9\xb3\x2a\x05\xc4\x38\xcb\xd2\xed\x67\x82\x38\x95\xd0\xae\x05\xbc\xae\xb9\x1b\xf1\x91\xd5\xcd\x20\xe6\x91\x3c\xc3\x2f\xf2\x0c\x4f\xf1\x1f\x9c\x41\x28\xbd\x03\xff\xbc\x48\x78\x1a\xff\x82\x55\x34\xf9\x00\xe7\x5a\x7d\x20\xcf\x84\x29\x13\xa6\xd2\xe4\x8a\xa1\x26\x59\x42\xb6\xfe\xbf\xeb\x39\xe7\x45\x45\x52\x5b\xb2\x11\x99\x29\xf4\xff\x0a\x32\xe4\x5c\x55\x5f\x52\x7c\x34\x92\x64\xad\x0b\x29\x37\xd2\xee\x6f\xd0\x3f\xfe\xe3\xdb\xbf\x6a\x16\xda\x84\xc6\xbd\xfb\x9b\x81\x7e\xff\xdf\x2f\xed\xfb\x72\x0d\x71\x67\x4a\x69\xa5\x0d\x35\x1b\x6a\x98\xc4\xf9\x94\xc1\xed\x27\x20\x78\x01\xe2\x0d\xd8\x21\xdf\xc7\x2a\xe9\x76\x59\x18\x7d\x3b\x93\x6d\x23\x62\x82\x89\xed\xad\x11\x9d\x22\xc6\xd1\xd4\xe4\x9a\x62\x86\xfe\xfe\xe3\xf7\xd5\x1b\x98\x0a\xba\xd1\x07\xa9\x45\xa1\xf0\x3e\x29\xe9\x1f\x44\x22\xcd\x35\x9a\x8b\xf9\x54\x7f\x5a\x10\x39\xe1\x49\x8c\x5e\x08\x98\x49\x36\x0f\x34\xb3\xca\x05\x79\x64\xfe\x10\x90\x72\x88\x70\xa2\xf8\x98\xc0\x5d\xed\x0c\x35\x45\x84\x56\x55\x4c\x95\x86\xe2\x82\x9c\x18\x60\xb6\xfb\xef\x5c\x6e\x35\x2c\x13\x7e\x72\x45\x2d\xd6\x25\x17\x0f\xab\x57\x3e\x2a\xbb\x5e\x51\xbd\x0f\xbf\xbc\xc9\xd6\x6d\x5b\x4d\x1a\x5b\x84\x62\x7d\x58\xe5\x9d\xa9\x9e\x0c\x8d\x38\x1b\x24\x94\x3d\x6d\xb4\x19\xae\x30\x1c\xe9\x11\x2c\xcd\xf4\x88\x99\x9f\xdb\x78\x40\xd6\x38\x1f\x1f\xd3\x24\x31\xa5\x2d\xfe\xf6\x80\xde\x65\xe8\x06\xca\xc0\xcc\xd4\x80\x92\xd8\xc6\xbd\xac\x25\x2c\x08\x83\x84\xb7\x47\x36\x9c\xdb\x98\xad\x3c\x41\x32\x8d\x26\xae\x32\x2f\xe2\x4c\x6a\x35\x9a\x0b\x14\xf1\xe9\x54\x5b\xbd\xb0\x65\x8a\xf3\x44\xda\x6c\x77\x76\xaa\x70\xa4\x1e\x59\xfe\xbd\x15\x27\xcf\x34\x65\xda\xae\x74\xaf\x79\x48\x27\x6f\xfe\xb4\x54\xe1\xa6\xb1\x0f\x45\x01\x4e\x30\x13\x89\xf2\x40\x2d\xf8\xe2\x59\x32\x1b\x56\x63\x19\xc8\x09\x17\x6a\x10\x57\xca\x9c\x95\x4c\x53\x16\x84\x8c\x9c\x26\x90\x34\xcc\x9f\xb5\xf2\x4f\x5e\x32\xe7\xeb\xb2\x29\x68\xae\x5e\x36\x83\x66\xc7\x68\xe9\xcc\xd6\x65\xc1\x1a\x5a\x19\x60\x92\xa8\x98\x13\xbe\x6a\x8e\xf7\xf0\xd6\x85\x7e\x69\x29\xf1\xca\xe7\xce\x29\x41\x3c\xce\x31\xf4\xcc\xbd\x6e\x2b\x42\x96\xd1\xd4\x42\x27\xec\xaf\x72\x74\xd9\x52\x1e\x8a\x9e\x5c\x3d\x17\x70\xd9\x4b\x02\xba\x26\x16\x43\xaa\x04\x16\x05\x00\x94\xcc\x1e\x94\x04\x0b\xc8\xcf\x7a\x64\x06\x0e\xcf\x58\x0a\x31\x8a\xa9\x84\x02\x11\xb8\x4b\xbd\x60\x18\x6a\x66\x04\x96\x8e\x76\x5e\xe7\x68\xf2\xcf\x21\xb1\x2c\x67\x0d\x27\xec\xf4\x87\x32\xd8\x2f\x6d\x9f\xf1\x28\xcd\x15\xb9\x08\x34\x5c\x0b\x15\x84\x28\x93\x74\x3c\x51\x88\x32\xeb\x77\xc4\xc9\x98\x0b\xaa\x26\x53\x79\x82\x86\xa9\xd4\x56\xa8\x49\x56\x33\xf9\x28\x44\x45\x8d\xa4\xd0\xb6\x45\xc4\x71\x69\xc0\x45\x13\x65\x03\xd6\x68\x76\x28\xbb\xa5\xbb\x62\x05\xe3\x74\x32\xf8\xc4\xf2\x18\x94\xc8\x0c\x75\x13\x99\x3c\x40\xee\x00\xab\x7e\x4f\x89\x54\x75\xe7\x00\xc0\x2e\x77\x16\xa5\x38\x44\x27\x2d\x64\x8a\x41\x05\x71\xb9\xdb\xa0\x79\x15\x01\x37\x0d\x28\x55\x16\x74\x9a\xce\x54\x65\xe2\xd6\x62\xa8\xe8\xce\x83\x32\x6a\x46\x6c\x28\xc6\x02\x6e\x06\x00\xba\x47\x76\x4f\x48\x3d\x3e\xdd\xc2\xde\xff\x06\x47\x09\x96\x60\x0b\x3d\x96\xb3\xfc\x36\x41\xec\xcb\xee\xfd\xc5\x5d\xef\xd6\x40\x4e\xdc\xdc\x7d\xee\xf4\x07\x15\x71\xed\x8a\xa7\x3e\x77\xee\x7e\xbc\x5c\xfd\xd8\x0f\xfd\x62\x55\x76\xc5\x23\x77\xf7\xcb\x8b\x39\x1a\x4c\xb1\xa2\x28\xac\xf2\x3b\xe7\x68\x36\x57\x13\xce\xb2\x14\x85\xb8\x20\x9b\x4e\x91\xa9\x08\x56\x90\x42\x24\xa4\xaa\x08\x1c\xf6\x21\x2f\x67\xb5\x86\x59\xdc\x2c\x83\x2e\xb7\x53\xd5\x68\x8d\x13\xf9\x29\xe1\x43\x88\x5b\x5b\xdd\xc7\x02\xd3\x2d\xc9\x40\xdf\x32\xdf\xe7\x92\xca\x59\x82\xe7\x0b\x5f\x58\x75\xe5\x5c\xe3\x29\x81\x8c\xe3\x1c\x16\xcf\x15\x8b\xe8\x9d\x81\x02\xa6\xec\x5e\xa7\x23\xa8\x64\x52\x14\x2b\x82\x86\x44\xbd\x40\xdd\x9c\xfb\x6b\xe6\x4b\x75\x09\x23\xf2\xec\x91\x81\x3b\xe7\x51\x13\x39\x4e\x21\xdb\xef\xf1\xdd\x09\x7a\x7c\x17\x93\x67\x92\xf0\x99\xde\x79\xfd\x87\xba\x4b\x66\xce\xf0\x94\x46\xd7\x3c\x26\x2e\x45\xe3\xce\x22\x39\x6e\xe5\x53\x84\xe4\x33\x12\x0f\xdc\x95\xd9\x5c\x07\xbe\xb0\xaf\xba\xe9\x5c\x24\x5c\x66\x80\x2f\x68\xb9\xeb\xa7\x3b\xc5\x34\xb9\xe6\x2a\xf3\x33\x6e\xb3\x06\x41\x22\x3a\x03\x3b\x63\x40\xf4\xb8\x87\x53\xa3\x0a\xe7\xd2\x09\x67\x98\x03\xc2\x71\x2c\x88\x94\x20\x98\xdd\xf4\xf2\x84\x26\xe6\x2d\xbd\xd0\xba\x73\x1d\x05\x29\x73\xe6\x9b\x2f\xfa\x63\x16\x9d\xb8\x55\xfc\xd4\x65\xcf\x7b\x0c\x30\x6f\xab\x96\x68\xfe\xfa\x91\xcc\xa1\xa0\xe4\x16\x53\xb1\x61\xa0\xb9\x2a\x83\x77\x2f\x21\xe7\x6e\xc5\x87\x5a\x14\x7c\xae\xa6\xc3\x76\x61\xe8\x2c\xf3\xf0\x50\x36\xb7\x93\x33\xd9\x87\x1b\x1a\xe1\x0f\x75\x26\x77\x6d\x42\x06\x2a\x9b\x91\x33\x12\xad\x61\x3f\x66\x13\xbc\xd7\xef\xad\xb4\xbb\x32\xe5\xd3\x65\x13\xe6\xbb\x60\x4b\xfd\xcb\xe8\x02\x64\xe5\x8c\x23\x2b\x8b\x37\x98\xb4\x13\xe3\xcb\xe6\xdd\xf5\xd9\x57\x6b\xb9\x6b\xa5\x67\x54\x10\xbe\x84\xd8\x69\x6e\x4d\x65\xe3\x7d\xf6\xe9\x13\x44\x21\x77\x14\xcc\xcb\x24\xc7\x41\x60\x31\xca\x83\x3a\x8f\x2c\xcf\xc0\x91\xe8\x85\x24\x90\xb4\xa7\x6f\x39\x08\x58\xd8\xe9\xda\x91\x48\x6c\xf2\x9f\x4f\x10\x4f\x95\x1e\xcc\x54\x18\x39\x97\xb4\x2d\x5f\xca\x83\x38\x26\x92\x68\x53\xf9\x33\xf4\x6f\xc3\xeb\x46\x33\xa0\x0c\x7d\x22\x0a\x46\x81\xa6\x11\xfe\x02\xc1\xea\x29\x27\x84\x56\xd3\x7e\x8b\x13\x65\x57\xb2\xc6\xce\xe7\x30\x30\xdf\x27\x7c\xb8\xdc\xe5\x01\x83\xa3\x87\xbb\x9e\xf3\xaf\xe6\xd9\x60\x1e\xc4\x74\x21\x3e\xda\xbd\xbd\xeb\x5e\x74\xfa\xdd\xcb\x33\xf4\x20\x89\x26\x4f\xb6\x5c\xa8\x16\xcf\x0c\x2c\x33\x73\x8b\x2b\xc3\xa4\x22\xb8\xce\xad\x43\x84\x28\xd4\x74\xaf\x10\x1c\x45\xd0\x99\xe5\x8c\x0d\x90\x2f\xd4\xba\x1d\x01\x26\xa9\xbc\x4e\x9b\x67\xb8\xea\x04\x42\xd6\xd7\xe0\x78\x72\xee\xcc\x7c\xa7\x8b\x79\x86\xab\xd8\xa7\x98\x9f\xb8\xef\xc5\xc0\xd1\x52\x13\x42\x05\x6a\xb4\x2c\xc3\x54\x83\xe6\x6b\xf2\x12\xf6\x3f\xe3\xd9\xf2\x62\x5a\xfc\x52\x60\x5a\xa3\xd8\x7b\x99\x08\xfb\x3e\x07\x4e\xac\x0d\x8c\x28\xdc\x7e\x81\x79\x78\xce\xc8\xd6\x4c\x6e\x9a\xfa\x15\xe9\x5c\x7e\xfe\xc2\x4a\x93\xb0\x59\xb9\x12\xc1\xd9\x81\xbf\x50\x86\x0a\x57\xe2\x09\x1a\xd1\x2f\x76\xd0\x3c\x5b\xdf\x3d\xea\xa5\x6f\xd4\x64\x87\x4e\xf0\xe2\x99\x5a\x43\x6d\xb8\x85\xf7\x97\x2a\x91\x5c\x6a\x95\x28\xd2\xea\x92\x20\x11\x17\xfa\xa6\x80\xcf\xe6\x31\x95\x55\x2a\x83\xc2\x42\x13\x65\x31\xc6\xb4\xec\xf4\xe7\x8d\x62\x62\xac\xc8\xa9\x56\xbd\x56\x94\x73\xdb\x8a\x1f\xa8\x0d\xc2\xca\x03\x37\xcb\x6f\x9e\x21\x19\x63\xe6\x12\xcd\x6b\xa6\xeb\xae\xbc\x2d\x44\x95\x36\x81\x30\x14\xbb\x81\x7e\x05\x85\x4c\x85\x79\xc8\x19\xd0\x73\xe9\x3c\x6c\x2e\x4f\x1b\xc8\xf6\x82\xb3\xd4\xa2\x9a\xc9\xa6\xb3\xb8\x4d\x93\x4d\xb0\x54\xc8\xce\xa9\xce\xb1\xe2\x99\x88\xfb\x75\x29\x17\x6c\xfb\xa6\xc6\x9b\x66\xa1\xa2\x15\x4b\x20\xce\x23\x1d\x0a\x8c\xc1\xbc\xd1\x36\x8d\x53\x84\x2f\x60\x87\xb2\xb3\x7d\x67\xb4\x2c\x77\x4b\xf8\xc2\x04\x4a\x0e\x16\x87\x3e\x43\x1d\xb6\x80\xfe\xe5\xb2\xcc\x0a\xf4\x32\x77\x12\x4e\x5e\xf0\x5c\xa2\x99\x30\x40\x39\xa6\x0e\xc1\x2d\x1e\x2c\xb0\xe2\x4b\x59\x62\x87\x72\x85\x20\x08\x3c\x4b\xab\x53\x00\x9d\xde\x3b\xd8\x43\x60\xb2\x94\x23\x9f\x29\xe4\xf9\x70\xb9\xaf\xa2\x81\xa8\x53\x64\x10\x4d\x30\x1b\x93\x81\x73\x19\x6f\x62\x2d\xe9\x71\x2e\x60\x98\x4b\x3b\x4a\xf5\xe5\x74\x6b\x0c\x26\xdb\xa4\xc7\x3c\x9a\xb9\x43\xf5\x21\x90\x0a\x8f\x09\x32\x33\x6a\xe4\x64\x2f\xe4\xbf\x59\xe8\x64\xb0\x13\xec\xa8\xdd\x62\x4d\x40\x9d\xf2\x0e\x89\x5c\x57\x78\x48\x92\xd7\xc9\x03\x81\x4f\xdb\x50\x03\xc4\x1e\x4d\x6d\x03\x41\x2f\x10\x9d\x28\x89\x0c\x1b\x8b\x10\x69\x55\xa5\xc3\xb2\x75\xc2\x91\xb3\x27\x6d\x9b\x85\xba\x86\x2e\x9b\x2c\xb5\xae\xcd\x8b\x7f\xed\x79\xed\x50\xaa\x1c\x6c\xfe\xf5\x57\xf6\x90\x6f\x36\x11\xaf\x2b\x4b\xcd\x3c\xb6\x6e\xcb\xb2\x72\x29\x1b\x43\x26\x34\xec\xc0\xd8\x1b\x21\xc6\x19\x41\x54\xe6\x0f\xab\x62\x71\x57\x06\x38\xa4\x55\x7c\xe3\x7c\xc9\x5a\xa9\x65\x1d\xb2\xf6\xed\x69\xc9\xa1\x20\x32\xdf\x80\xab\x56\x67\x44\x1b\xaa\x58\xcc\x01\xb0\xd4\xc8\xe1\xa2\x4e\xb7\x72\x9e\x3b\x57\xb8\xfb\x0e\x8f\xd6\xcb\x3b\x56\x1c\x81\x1a\x59\x9a\x1c\x32\xa8\xae\xf6\x21\xfb\x92\x05\xdd\x79\x64\x99\x67\x03\x18\x91\x4a\x34\xc5\x33\x88\x50\x32\xae\xf2\xb7\x0c\x88\x94\xca\xb6\xf0\xc4\x29\xe2\xd2\x34\x3a\x5b\x4d\x01\x2e\xc6\xdb\x24\x9e\x34\x6f\x66\xd1\xdc\xb1\xe4\x2e\xff\x7c\x57\x8b\x50\xa1\x0e\xe6\x78\x4c\x9f\x09\x73\x27\xea\xc4\x9d\x48\x4d\x12\xb7\xe4\x64\x7e\x8a\x21\x65\x9b\xc4\x7e\x14\x69\xb9\x3c\xdc\x3e\x18\xb3\x2b\x6f\x68\x73\x92\xf5\x2b\x53\x92\x0c\xe0\x5c\xa1\x53\x80\x4b\xb2\xf7\xcf\x88\xc5\x40\x36\x55\xf5\x58\xa2\x3f\x33\xae\xfe\xec\xa1\x44\x3b\xd7\x09\xbc\xea\x1c\x60\x27\x0b\x5d\x7d\x40\x64\x58\xb6\x45\xd8\x43\x2b\x5b\x49\xf9\x6d\xf3\x2c\xf2\x22\x82\xbd\xea\xc2\xdd\xc5\x8a\xc2\xba\xb6\x68\x21\xfb\x01\x95\x2f\xa5\xb2\xbb\xd5\x74\x5e\xcc\x4f\x7a\xc1\xcd\x2a\x57\xa5\x3b\x64\x7b\xd1\x28\xcd\x61\x01\x5d\x61\x1b\x6e\x9b\x36\xce\x42\x5b\x01\x28\x5d\xed\x15\xd9\xa4\x66\xb6\xce\x2a\x10\xc5\x34\x40\xdb\x52\xa4\x06\x31\xf9\xec\x91\x7d\xe4\xc2\x2a\x00\xd2\xf6\x6c\x18\xe2\xe8\xe9\x94\xb0\x18\xe1\x54\x4d\x0c\x72\xb1\x8d\x6a\xcc\x2d\x37\x68\x3d\x07\xd8\x26\x83\x25\xa1\x32\xc2\x22\x76\xdd\x43\x9e\xb9\x9b\xc5\x23\xf3\x06\x81\xae\x10\xd0\x0b\x0c\x9a\x34\xd7\x19\xba\x44\x6a\xeb\xae\x8e\x16\x55\x7d\x7a\x17\xba\xf4\x2e\x3f\x67\x85\xbe\xc3\xd0\xcf\x02\x92\xc5\xf8\x68\x91\x3a\x3d\xe7\xeb\x74\xd6\xa5\xe6\xe7\xc5\x18\xc8\x89\xb5\x67\x8c\x43\xcc\xae\x40\xeb\x59\xdf\x3a\x59\x5b\x40\x60\x1e\xa5\x02\x52\x9f\xab\xc6\xfc\x53\x34\xa1\x49\x1e\x39\xf9\xe6\x24\x9b\xa6\x1e\x32\x21\xcf\x24\x31\xf8\xff\x91\x80\x2a\x07\xe3\xb3\xfc\x16\xfd\x6f\xd3\xbb\x16\xfd\xf5\x91\x7d\x02\x31\x9c\x24\x73\x40\x27\xcd\x46\xc6\xaa\x34\xcc\x53\xe5\x04\x94\x2d\xab\x42\xc5\x89\x98\xbd\x9e\xe0\x67\xf2\xc8\xdc\x30\xff\x1b\x3d\xa1\xbf\xa0\xbf\xd6\x19\x97\xae\x58\x61\xcf\x5e\x96\x8f\x5e\x29\x80\x77\xcb\x59\x41\x69\xe5\x8d\x73\xc2\x14\x5c\xa0\x15\x28\x25\x19\xc8\x38\x65\xcf\x3c\x5a\xa8\x88\xf1\x4f\x2d\x16\x84\xa9\x01\xe3\x31\x19\x90\x8a\x80\xea\x12\x21\xa1\x95\x80\x6b\x1e\x93\x95\xe1\xd0\x4c\x98\xfe\x02\x8e\x23\x99\x0e\xb3\xed\x00\xb0\x84\xac\x32\x3e\xf3\x7d\x14\x39\xad\x7a\xe6\x19\x92\xef\x26\xf3\xde\x34\x94\x9b\xab\x8d\x38\x87\x12\xae\x0e\x27\x26\x58\x39\x6d\xb2\x7c\x1c\xcb\x61\x08\xfd\xb0\x5e\xb9\xbd\xac\x3c\x8c\x62\xe8\x23\x23\xe8\x98\x6a\xeb\xa1\x79\xb8\x18\x24\xe1\x26\xb1\x14\x03\xd8\xda\x28\x98\x92\x93\xc2\x81\xd6\x9c\x66\xfc\x97\x87\x40\x87\x3c\x2d\x9b\x0f\x96\x00\x54\xfa\xc9\x06\xd6\x52\x98\x6b\x39\x3c\x36\xd5\x94\x64\x42\x0d\x7e\x41\xe7\xe2\x0a\xe9\xd3\xc1\xa7\x06\xe4\x0b\x88\x96\xaa\x09\x17\xf4\x8f\x65\xbc\x8d\x85\xa2\x23\x1c\xa9\xc1\x4e\x7a\xe2\xd4\x33\x53\xc7\x7e\xa7\x57\xdf\x77\x6f\x01\xaf\x01\x3f\x13\x2f\x9d\x12\x92\x25\xed\x28\x32\x0b\xe4\x96\xe5\x2d\x17\x88\xf1\x97\x1c\xe4\xcb\xbd\x0f\xb8\xd6\x5e\x19\x0a\xd6\x26\xd7\x0c\xf2\xa1\x25\x05\xfe\x04\xc8\xad\xf7\xca\x94\x98\x02\x5c\xbb\x01\xcb\xd2\xec\x39\xc1\x2c\x4e\xdc\x15\x82\xb8\xc9\xe8\x99\xbf\xe0\xf9\x5a\x31\x75\x3f\x4b\x34\xaf\x39\x34\xdb\x5f\x34\x82\x40\x06\x18\x4d\x4d\x15\x4c\xcd\x2a\x43\x18\x0d\x53\x80\x09\xd6\x34\x19\xa5\x89\xe9\x2d\x12\x71\x11\x9f\x3d\x32\x9b\x1e\xee\x7d\x4d\xab\x80\xce\x6a\xc2\x2a\x1b\x90\x5a\x34\x55\xdb\xbd\xc4\xb8\xe5\x96\xea\xf5\x3f\xa5\x24\xdd\x51\x91\xe8\xab\xa6\xd5\xf7\xf1\x58\xe6\x79\xf2\x86\x36\xfa\xca\xcb\xe9\xfb\xbb\x5e\xa9\xf4\xca\xaa\x9d\xbb\x38\x43\x29\x33\x7e\x16\xd3\x92\x77\x2d\x37\xdd\x9d\xe9\xce\xb0\x03\x3f\xdd\x21\x92\x74\x16\x55\xcf\x0a\xa9\x6e\xd9\xef\x39\x2b\x32\x46\x87\x71\x7e\xb9\x36\x17\x25\xa5\x6e\x8f\x7e\xb0\x0d\xee\x8e\x45\x5b\x65\x69\xe2\x7e\xee\x15\xcb\x6e\x8b\x8a\x7a\x7f\xc5\xa1\x32\xe8\x45\x50\x00\x31\x9c\xe7\x0f\x67\xdd\xa7\xdd\x2d\xec\xcb\x18\xad\xfc\x19\x6b\x01\x92\x75\x1c\x09\xe7\xd5\x57\xe7\x1a\x7e\x1d\x3b\x50\xf1\xd3\x8b\xc9\x18\x75\x27\xc2\x88\xa4\xb6\x1e\x89\x45\x04\xa3\x95\x87\x21\x6b\x56\xf3\x3a\x5e\xe1\x4c\x63\x3c\xdc\xc9\xc8\xd8\x71\x10\xe1\x68\x52\xbb\xa8\x21\xe7\x09\xc1\xac\xce\x28\xa8\xfc\xb9\x7c\x44\x0c\xfe\x2e\x88\xee\x24\x01\x10\x6a\x47\x02\xdb\xb8\x34\xb7\x8a\x58\x0c\xcd\x03\x8c\x0c\x37\x09\x9f\x6e\xa2\x8a\x30\xe7\x50\xa3\x6c\x9c\x90\x32\xad\x6c\x97\x87\x13\xfb\x91\x24\x4a\x13\xaf\x73\xe9\x8c\x08\x3d\x6b\x4d\xe2\x67\xc2\xb4\x29\x66\xe7\xe1\x22\x54\x2f\xae\x66\x3f\xeb\x57\x76\x92\x7d\xda\x05\x49\xa1\x30\x36\x7e\x64\x70\x70\x79\xf1\xb0\x6a\x5e\x95\xda\x7a\xf3\xdd\x7d\x1b\x9f\x4e\x4f\x89\x58\xfb\x78\xde\x17\x7d\xff\x6b\x9f\x49\xf3\xed\x01\x24\x8e\x6c\x1d\x2f\xf5\x62\x6a\x39\x9a\x88\xd9\x58\x87\x18\x77\xa0\xc8\x00\xa4\xe2\x14\x73\x89\xbd\x4c\x9c\x3a\xc4\xb2\xbd\xde\x25\x79\x87\x16\x77\x1b\x34\x9c\xca\xd2\xfc\x83\x86\xd9\x04\xe0\xf4\x5d\x76\x6e\xaf\xac\x56\x5f\x8c\xc3\x67\x25\x68\x79\xee\xaa\xed\x41\xac\x04\x06\x00\x0d\x80\x3d\xf8\xc5\x38\x2e\xa8\x34\xca\xbd\xeb\xc4\x32\x9d\xa9\xb9\x6d\xdc\x07\xf7\x62\x41\xdf\x07\x50\xc2\xaa\x98\x7f\xf9\x8e\x8c\x0b\x51\xff\xaa\x8f\xc1\x87\xac\xb7\xa6\x72\x48\x47\x68\x1f\xe4\xa6\x04\x2a\x52\x97\xe2\x63\x7a\x20\x0f\x70\x52\xeb\x22\xdc\x81\xd0\x04\xe3\x28\x07\x12\xb1\xf8\xc4\x4a\xa4\x44\xcb\x2e\x9c\x24\xa5\x75\x61\xa8\xd8\x57\x59\x1f\xc4\x61\xde\xac\xb9\x79\x06\x42\x82\x87\x64\xad\x9c\x83\x2b\xf3\xc2\x52\x2e\x82\x47\x20\x5d\x7f\x36\x4b\xe6\xcd\xca\x04\x7c\xeb\xb7\x12\xc7\x6f\xd5\xc4\x7c\xf4\xbf\xa5\x77\x53\x11\x41\x6f\xb3\x29\x4a\x12\xa5\x82\xaa\xf9\xc0\xfa\x52\x9b\x0b\xad\x7b\xfb\xe6\x85\x7d\xb1\x89\xa3\xe2\x1c\xb9\xef\x39\xdf\x2d\xdc\x53\x82\x9a\x26\x4f\x76\x09\x4d\xb6\x1b\xa7\x6a\x52\x89\xef\xb5\x8c\xb0\x0e\x60\xac\xd9\x54\xf5\x27\x36\x9d\x9e\x6d\x1e\x33\xe0\x23\x07\xdd\xd5\x9c\xb0\xe5\xae\x3a\x6b\x38\xa1\x1d\x42\xf8\x4c\x50\x2e\x6c\xf3\x9a\x26\x99\x8a\x53\xfc\x65\x30\xc3\x02\x27\x09\x49\xa8\x9c\x6e\xee\x32\xff\xee\x6f\x4b\x67\x7b\x61\x9a\x2c\x99\xc9\x4e\xf1\x17\x3a\x4d\xa7\x88\xa5\xd3\xa1\xd5\x72\xb1\x7c\xf2\xf1\x59\x1d\x9a\x84\x81\x19\x73\x13\x2c\x60\x5a\x08\x0f\x71\xf7\x91\x79\xd8\xeb\xd6\x55\x81\xa3\x09\x25\xcf\x80\x0c\x2b\x18\x91\xf2\x0c\x5d\x73\x45\xce\xd1\x67\x3c\xeb\x83\xa2\x66\xba\x9e\x8e\x4d\xd0\x01\x4b\xa4\xb5\xd6\x94\x51\x75\xf2\xc8\x2c\x60\xbb\xa3\xca\x87\x88\x33\x03\xda\x1b\x01\x61\xb3\x21\xc0\x8b\xee\xd0\x6b\x95\xab\xbd\xa5\xb2\x86\xd8\x02\xbf\x0c\xbc\x94\xe4\x81\x29\xf9\x58\x83\x8f\xef\xf0\x8b\x49\xc2\xbf\xc4\x0a\x9b\x86\xc6\xcb\x34\x77\x9b\xe5\x66\x9b\x5c\x19\xac\x6a\x97\x0d\xc4\x2d\x60\x4a\xd6\x9e\xcf\xa4\x1c\xff\x89\x9e\x91\x33\xf4\x7d\xc2\x87\xf2\x24\x77\x55\x99\x1f\x25\x51\xf2\xc4\xc4\xfd\xe0\xbf\x4d\xb5\xe2\x37\x8e\xfa\xb9\xdc\x87\xce\x94\x23\xfa\xc5\xe0\xb4\xc8\xef\xce\x3f\x7c\x98\xce\x4f\x87\x69\xf4\x44\x94\xfe\x17\xe8\x14\x95\x14\x72\x20\x67\xb8\x0a\x32\x6d\x15\x75\x16\xe1\xd6\x1a\x71\xa4\xad\x95\x92\x04\xa0\xfd\xf5\x95\x9e\xf5\xfe\x75\xe8\x5c\x9c\x55\x37\x36\xb5\x4b\x16\x69\xdd\xf1\x2a\x60\x82\x1f\xc6\x5a\x31\xbd\x8d\x7d\x28\xf2\x51\x82\xc7\x25\x93\x65\x0d\x23\xe5\x66\x4a\x2d\x17\xe9\xb5\x43\x12\x8d\x3e\x65\xc5\xd4\xc1\xf7\x2e\xca\x0b\xd1\x5a\x1b\xc5\x3a\x7b\x64\x1d\x89\x5e\x88\x69\x59\x0c\x65\xb3\x10\xf4\x49\xa9\x9c\x64\x45\xb3\xe0\x86\x86\x41\x0d\x62\xb3\x01\xf6\xb0\x86\xa3\xb3\xac\x5c\x58\xcc\x5a\xa0\x38\x91\xe4\x44\x0f\x0c\x2e\x55\x97\x1d\x8a\x5e\x04\x9e\xcd\x88\x78\x64\x16\x7d\x17\x30\xe6\x39\xb7\x99\x3f\x75\x25\x02\xc1\xa2\x3c\xac\x45\xe9\xd1\x9e\x14\xab\x50\x57\x9d\x6f\x28\x5a\x5d\x46\xe1\xaa\x3a\x4c\x47\x3e\xad\x8b\x36\x4d\xdf\x7f\x7d\xb7\x71\xc3\x39\xaf\xb2\xce\x3b\xa5\xda\x0b\xe8\x88\x3e\x05\x03\x52\xe6\x8d\x5f\x9d\xaf\x2f\x33\xdf\x0b\x6a\x0e\x80\xb7\xc3\xcb\x31\x27\xd2\x73\xe2\xa3\xcc\x17\x97\xd0\x11\xd1\xda\xc7\x23\xd3\x6c\xec\x07\x1c\x0c\x06\xbc\x83\x84\xd7\x1f\x8d\x04\x97\xd2\x96\x53\x98\x71\x96\x17\xc5\x6d\xd1\x6e\xd2\x00\xd9\xf7\x6e\xae\x07\x8b\x8d\x27\xbd\xdf\x5c\x0b\x4a\xfb\x63\x25\x0e\x44\xed\x50\x2b\x1b\x4e\xe6\xb4\x58\xa3\xe5\xe4\x87\x8b\xab\x5e\xd6\x67\xad\xf4\xe9\xc5\x9e\x93\x3e\xf8\x7f\x7d\xd7\xc9\xc5\x15\x7b\xfd\x27\x4b\x43\x2c\xe9\x40\xb9\x7a\xb3\x8a\x49\xdc\xdb\x20\x3b\x96\xb6\x7e\xa5\x7c\x28\xf2\xcc\xaa\x5a\x83\x1d\x6d\x53\xcd\xb5\x12\x81\xc2\xb8\xef\xc4\x05\x50\xbc\xf4\x53\x52\xe1\xe9\xcc\xaf\xa3\x75\xd0\xb6\x76\x99\xe6\xa8\xd5\x5d\x82\x07\x85\xdc\x8f\xb0\x49\x12\x2a\x4f\x6e\x61\x2b\xd6\x8b\x78\xf5\x2d\x92\xff\x2e\x72\xd3\x0f\x57\x98\x9e\xcc\xf3\x64\x48\x69\x75\x37\xd7\x25\xbe\xc6\xef\x3f\x24\x59\xd7\x82\xda\x0d\xdd\xb6\xf2\x34\x43\x37\x13\x04\x4b\x1b\xfe\x86\x02\xcd\x52\xf1\xd6\x1a\xee\xe1\x6c\xce\xa6\xc4\xfb\x34\xeb\x13\xe2\x5d\x35\xb6\xf5\x5d\xe4\x0e\x22\x15\x82\x3c\x13\x01\xbc\x63\x53\xa9\x58\xf1\xa8\xe2\x44\x10\x1c\xcf\x3d\x8a\x64\x79\x1c\xe6\xcb\xe0\x1e\x93\x74\xaa\x0d\x78\x30\x4d\x18\x3f\xe5\x33\x67\xb3\x14\x9e\x82\x26\x2f\x74\xa4\x6f\x2c\x2f\x0b\x44\xbf\xc1\x4e\xc9\x17\x2a\x95\xd6\x2b\x2a\x52\x60\xdd\x20\xa0\xf1\x40\xeb\xb7\x09\xb1\x37\xdc\xe3\xbb\xce\xf7\x37\x77\xfd\xee\xe5\xe3\xbb\xbc\xe4\xc2\xd5\x14\x66\xa0\x65\xae\x07\x05\x67\x8f\x2c\xcb\x53\xce\x30\xba\x61\x2f\x11\x8e\xe3\x3c\x3f\xda\x1a\x91\x46\x67\x5b\x2a\x91\xbd\x53\xb1\x32\x43\x79\xc9\x30\x0f\x50\x58\xd6\xd6\x93\xb5\x24\x74\x56\x38\x39\xa6\x3c\x6e\x49\x1d\xd3\x8e\x2e\x1b\x1f\x5e\x58\x19\x5b\x9b\x28\x87\x7f\xc9\xc8\x8b\xb3\x95\xe0\x76\xfe\x80\xcd\x25\xbc\x9e\xb4\x73\x1b\xb2\xc1\xa6\x7e\xa4\x5f\x48\x7c\x57\xa3\x55\xed\xa4\x4c\xa9\x51\x82\x65\xe5\x2e\xa4\x8c\xae\x63\xf1\x67\x4b\x79\xd0\xef\x35\x17\x4b\x37\x39\x6a\x60\x8e\x00\x0c\xf0\xbf\x0a\x61\x14\x11\xa1\x30\x65\x68\x04\x07\x9b\x45\x73\x04\x28\x2c\x04\x62\xd8\x7f\x43\x53\xca\x00\x0e\x62\x19\x69\x1f\x8a\xeb\x58\x43\x69\xfd\xdc\xbb\x7e\xe8\x17\x54\xd5\x1f\x6e\x1e\x0a\xad\x36\x2f\x3b\xbf\x2e\xd5\x55\x4b\x23\x2c\x4b\x16\xf2\x96\x98\x97\x96\x5a\x20\xe4\x8c\x32\x95\x0b\x4d\xe6\x8a\x3c\xdc\x5d\x6d\xa5\xdf\x55\x07\xcb\x6a\x61\xec\x7d\xed\xaa\x1a\xe6\xa2\xc9\xab\x31\x89\x56\x01\xed\x36\xe7\x23\x93\x05\xa5\xe9\x60\xbd\x89\x16\x84\x0f\x4b\x34\xc3\xc2\xc6\xa1\x62\x93\x00\x55\x6c\x5e\x67\x2c\xaf\x65\xb0\x20\x9f\x88\xfa\x59\x5f\x7d\x9c\xed\x06\xe9\x0b\x54\x59\x88\x8f\x92\xc1\xb3\x19\x78\x8d\x93\x66\xa7\xb2\xa4\x7e\xc9\x29\xcb\xf0\x05\x64\xbf\xe0\x83\x69\x9c\x21\xe0\x9a\x8e\x1e\x0e\x28\xe2\xd2\x34\xb5\x49\xca\x99\xe6\x48\x83\xf8\xeb\x60\x82\xbd\xe1\xf8\xc8\xbc\xdc\x10\x34\xd1\x2b\x16\xd0\x63\xe5\xa4\x44\x9d\xdb\x5e\x05\xad\xaf\xca\x21\xa4\xb7\xd5\x71\x29\xc9\xa2\x59\xbb\x46\xbe\xf2\x6a\x4e\x5b\x01\x75\x65\x57\xba\x1d\xb6\x95\x09\xfa\xdf\x16\x33\x09\xda\x00\x28\x5d\x65\x32\x14\x6a\xc9\x57\x60\x47\xaf\x57\x5e\x99\x93\x61\x4d\x24\x2b\x7f\x42\xb6\xba\xc6\x47\x6f\x5a\x4c\xdd\x3e\xf1\xd1\x9c\xb8\xe9\xe9\x6c\x73\x0b\x76\x86\x70\x95\xaf\xa6\x09\xc4\xd5\xcf\x86\xa3\x33\x04\x14\xc0\x74\x71\x3d\x43\x5d\xca\xb5\x05\x24\xf0\x97\xeb\x73\xdb\x7a\xa8\x58\xf9\xfc\x9c\xfb\xdb\xc2\xa5\xe3\x19\xb6\x7e\x07\x30\xa2\x5c\x33\x8f\xaa\xde\x8f\x67\x8f\xcc\x4b\x58\x91\xc6\xec\xd1\x67\xc4\xf5\xcf\x81\xa6\xcc\x0c\xb0\xd7\xa1\xf6\x29\x53\x7e\x0a\x3b\x50\xc6\x3d\x50\x93\x62\x07\x9c\x85\xef\xd8\xd3\x29\x27\xd8\x55\x97\x3a\x0f\x8a\xcd\x03\xf4\xfd\x4b\x30\x9e\xd7\xf3\xc2\x7e\x18\xdc\xd1\xe0\xb4\xc0\x5e\x47\x45\x0f\x91\x20\xe6\x44\xb2\xf7\x2a\xab\xdf\xa5\xc9\xdc\xa5\x54\x97\xc2\x03\x5a\xab\xc3\xd4\x8e\xbc\xfc\x80\xef\x00\x72\x6b\x5d\xc3\xc1\x3b\x56\x2b\xdd\x54\x2e\xc6\x0b\x9c\xe0\xe7\x22\xc1\x47\xeb\xbc\xea\x5f\x66\x24\xda\x04\x17\xe8\x16\x0b\x3c\x25\x8a\x88\x65\xe9\x48\xc5\x7e\xe7\xa0\xe2\xb8\x1d\xb4\xdf\x35\xbb\x68\x9a\xc1\x94\xbb\x06\x65\xd6\xed\xd5\x2a\x9c\x9f\x6c\x15\x6b\x41\x9a\xe9\x65\xfc\x6c\x3d\xff\x6b\xae\xc2\x7e\x27\x5f\x86\xcd\xb6\xf2\x60\x9d\xb6\x5d\xd3\x61\xf0\x6d\xfa\x0b\x48\x31\x85\x74\xa1\x96\x00\xdb\xac\x9e\x65\x1d\xa2\xcd\x2a\x59\xba\x13\xd9\xed\x2a\x1c\x5c\x65\x72\xe9\x50\x15\x6a\x27\x80\x4b\xc0\xa4\x32\xe0\x2e\xd5\xa8\x34\xa0\xb4\x54\x65\x48\x7a\x61\x3f\x8b\x59\x98\x3b\x74\xad\x66\x55\xee\x7f\x56\x22\xd7\x0a\x19\xb7\x2b\xc4\x8e\xa0\xd1\xec\x5a\xa3\x59\xc5\xca\x85\xec\x5a\xcd\x9d\x44\x94\xc0\x83\x6c\x5f\x72\x8b\xfa\x50\x5c\x20\x94\x74\xd9\x2b\xd2\x36\x37\x86\xab\x9f\xb2\xec\xbf\x8a\x12\xdc\x31\xb5\xcf\xaa\x55\xb5\xaa\x67\x5e\x08\x0a\x22\x50\x89\xaf\x0d\xd8\xbc\x1a\x98\xad\x49\x83\x34\x5e\xfe\xde\xb5\x09\x60\x41\xcd\xf8\x9c\xa7\xe8\x85\xca\x09\x52\xfc\x91\x41\x9e\x60\x16\x0d\x50\x1c\x99\x07\x4f\xe0\x29\xc0\xb6\x90\xe9\x70\x4a\x15\xc2\xde\x0a\x0b\x2e\xc9\x13\x7b\x9e\xf5\x0b\xb0\xe2\x4a\xf8\x82\x2a\xdc\xa5\x15\x87\x66\x03\xff\x5a\x3e\xc8\xb6\x08\x05\x5e\x4e\xf3\x7e\x31\x0a\x3c\x8b\xc7\xb7\x30\x2b\xcf\x5c\x00\x29\x40\xd5\xde\x06\x8b\x04\x0b\x70\xbd\x54\xaa\xd2\xdd\x62\x1d\x3d\x2b\x00\x0a\xf2\x8d\x68\x84\x50\x90\x3f\xbe\x0b\x88\x82\xba\x4e\x7a\xcb\x4a\x56\xdd\x2b\x35\xfe\x6f\x57\x0a\xad\xb8\x4b\x9c\xf7\x35\xa5\xdb\x5a\x4d\xa9\x6d\x50\x75\x79\x41\xc0\xe6\xe9\xe5\x75\xd9\xcb\x70\xc6\x23\xce\x62\xba\x46\xbe\x30\x74\x4b\x1b\xa6\xa3\x0e\x9b\xaf\x46\x3e\x9a\xfa\x89\xfa\xd6\x5f\xe2\x69\x22\xd5\x98\x9b\x2b\x4d\xd6\x7c\x7c\x9f\xd3\xbd\x92\xd0\x22\x18\x11\x29\xdf\x4e\x8c\x2b\xa8\xfb\x89\x54\x32\xaf\xa8\x45\x7d\x64\xd5\x5a\xd2\x72\xb9\xbd\x6d\x19\xc9\x4e\x61\xf7\x3c\x19\xe1\x56\x61\xbd\x6e\xbf\x64\x89\x78\xc6\xa0\x27\x16\x64\xa3\xa4\x06\xe7\x61\xc8\xba\x04\x2a\xad\x1c\x6d\x52\x6b\x5e\x21\x39\xaa\xa7\xbe\x50\xe4\xb1\xf2\xec\x5a\xc5\x60\x87\xe6\xe7\xc2\x0d\xd2\xb8\x26\x26\xd3\xe3\xed\x8d\x61\x93\xba\xe3\xcc\xd7\x50\x0a\x27\x6f\xd2\xac\x19\xe0\x6c\x77\x06\xc2\x5b\x46\xa6\xd0\x83\x9f\x40\x08\xda\xce\x1d\x9b\x74\x9c\x0c\x1a\xbe\xb4\x27\x85\x15\x9b\x94\xca\xbd\xac\x7a\xdd\x0e\xdb\x5e\x4c\x54\xd8\x9c\x64\xea\x7b\x37\xa0\xb5\xb6\x4d\xe5\x2c\xdd\x16\x99\x02\x9a\xb2\x98\x08\x46\xb0\x9a\x1c\xae\x12\xe4\x62\x5b\x17\xba\x37\xbf\xfd\x56\x85\xd8\x99\xe2\x62\x71\xc8\x36\xd3\x4d\xd5\x64\xcd\x22\x8b\x35\x2a\x16\xf2\x66\xdb\x0b\xe6\x6d\x85\x6b\xd3\xc3\x1f\x5a\x87\x4b\xb7\x2a\x16\xa9\x36\x39\xf7\x53\x36\x53\xe1\x9b\x5a\x28\x98\xd1\xa7\xdd\x6f\x51\xbe\x82\x24\x6f\xa2\x3e\x65\xff\x25\x13\xcb\x9a\xa1\xa7\x5e\x15\x05\x74\xa4\x57\x98\x32\x2b\xbd\x96\x15\x4e\x68\xbd\x77\x8a\xab\x6a\x25\x5a\x5f\x85\xf3\xe6\x8b\x70\x42\x49\x46\x28\xc9\xa8\xd8\xa3\x50\x92\x81\x50\xdb\x4a\x32\x56\x99\xa0\xcb\x9c\xb4\x59\xdc\x10\x9a\xd6\x16\x7a\x2b\x99\xfd\x5d\x61\x47\x6e\x5e\x76\xe0\xfc\x9c\x7e\xce\x96\xfd\x8b\xfd\x43\x65\xda\xd6\xc2\x6b\xe5\xd5\xfa\x3e\x57\x36\x2f\x87\x2e\xb0\x88\x13\x0b\x41\x68\x93\xaa\x8b\x3e\xb2\x65\xee\xdc\x47\xf6\x03\x7f\x21\xcf\x44\x9c\x20\xac\xd0\x94\x03\xae\x55\x9e\xc3\x03\x07\xa1\x80\xa5\x6f\x72\x35\x30\xba\xc6\x53\x12\x9b\xc6\xa1\x5e\xea\xa5\x75\x2a\xdb\x70\x70\x15\xd2\x2e\x80\xc6\x9a\x6d\x70\xb9\x1d\x8f\xcc\xa4\x43\x9a\x14\x3c\xd0\x15\xa8\x5b\x18\x30\xcc\x9f\xb3\x60\xf5\x9f\xcf\x50\x5f\xdf\x4f\x54\x16\xe7\xeb\x01\xef\xd5\xcd\xed\x91\x8d\x05\x4f\x67\x99\x9f\x8f\x0f\x4d\x07\x69\x93\xa1\xb5\x18\xac\x86\xc9\xb8\x48\x75\x84\x63\x6d\x8b\x2f\x67\x9c\x57\xc9\x94\xdd\x08\x66\xc9\x67\x20\x7d\x0c\xb3\xf4\x3f\x9b\x8e\x6f\x62\xcc\x1e\xb8\xcc\xb2\x0e\x00\x7b\x0a\x80\x5f\x12\x09\x5e\xa1\x2c\x32\x50\xa8\x75\x2f\xe2\x29\x54\xce\x73\x99\xdf\x36\x8b\xad\xb8\xf8\x43\x35\x54\x43\xfe\x71\x9b\x97\x66\x0a\x69\xed\x3d\xb1\x37\x8f\x6e\xe3\x0c\xdf\x3a\x79\x71\x9b\x8a\x19\x07\x4d\x2c\x99\x3b\x68\x09\x0b\xf2\x37\xe3\xb3\xd4\xe4\xde\x51\x3f\x15\xab\x92\xb3\xa9\x54\x9f\xb1\x8a\x26\x5a\x72\xe7\xa8\x6c\x3b\xca\x49\xcc\xa5\xf2\x7e\xbd\xbc\x15\x2b\xb8\xf0\xbf\x5e\x13\xf6\x58\xc6\x3d\x5e\x8e\x61\x96\xc8\x99\x69\x12\x53\xfd\x3d\x13\x1a\xb4\x7d\xe1\x3d\xbf\xa8\x7b\xc5\xfe\xa2\x17\xba\x8a\x8b\x56\xcd\xbf\x19\x6f\x15\x5b\xbd\xed\x3c\xdb\x71\x0b\x98\x9b\x4b\x0b\x2a\x96\x3f\x68\x1b\x1d\xd7\xa4\x28\x08\xba\x59\xa5\x92\x6d\xcf\xf0\xac\xd5\x91\xcc\xe3\x3a\xc5\x33\x6d\x44\x28\xae\x6f\x49\x31\x36\x7a\xac\xc9\xe5\x45\x18\xa5\x82\xba\xb3\x5f\xaa\x5b\xaf\xe7\x0e\xf0\x50\x7e\xf0\x5b\x79\x45\xd8\xeb\x72\x68\x92\x12\x70\xa4\x52\x9c\x25\x4f\x02\x4f\x24\x94\x3d\xe9\x8f\x99\x1a\x7d\x17\xfc\x17\x4e\xbd\xab\xd8\xd3\x95\x8c\xbd\xc5\x2e\xe3\x2a\x0c\xc6\x46\x27\x8d\xb2\xb1\x07\xe0\x58\xed\x25\x6e\xd2\x74\xa3\xf2\xcd\x66\x8d\x43\x2a\x5f\xad\x6b\x73\xdc\xe4\xdd\x25\x00\x53\x8d\x52\xd6\xdb\xd8\x31\xc1\xab\x04\xb0\xa9\xc2\x56\x77\xf3\x81\x3d\xed\x87\x00\xf6\x98\x42\x2a\x03\x76\xba\xdc\x9f\xfc\xb6\x09\x7a\x6a\xdf\xfc\x67\xfe\x23\xd8\xef\xb6\x39\x4b\xc5\x83\x8f\x8c\x0b\xfb\xe8\x49\xf6\x9c\x7e\x2c\xc7\x27\xd6\x5a\xe2\xe2\x9b\x39\xfa\xa8\x28\xe2\x14\x02\x5a\x8b\xc5\x99\x33\xf0\xd4\x59\x5b\x0b\x3d\xf9\xa7\x74\x48\x04\x23\x7a\x4d\x0e\xd7\x21\x93\xc1\x53\xcc\xf0\x18\xc0\xb0\x4f\x20\xe9\x10\xb4\xec\xdc\x82\x32\x27\xd1\xf4\x07\x05\x21\xab\x65\xbc\x2d\x65\xce\xbb\x7e\xc3\x37\x8d\x06\x6e\xb1\x78\xf3\xcc\x95\xea\x43\x7b\x67\xbf\xbf\x99\xa1\xd1\xef\xdc\xff\x38\xb8\xeb\xde\xdf\x3c\xdc\x5d\x14\xac\x8d\x8b\xab\x87\xfb\x7e\xf7\xae\xf2\xb7\xbc\x0c\xf8\xa7\x87\xee\x43\xcd\x4f\x6e\x80\xab\xce\xf7\xdd\x42\x0b\xfd\x9f\x1e\x3a\x57\xbd\xfe\xaf\x83\x9b\x8f\x83\xfb\xee\xdd\xcf\xbd\x8b\xee\xe0\xfe\xb6\x7b\xd1\xfb\xd8\xbb\xe8\xe8\x37\xfd\x67\x6f\xaf\x1e\x3e\xf5\xae\x07\x2e\xa3\xdb\xff\xe9\x97\x9b\xbb\x1f\x3f\x5e\xdd\xfc\x32\xf0\x3e\x79\x73\xfd\xb1\xf7\xa9\x6a\x15\x9d\xfb\xfb\xde\xa7\xeb\xcf\xdd\xeb\xe5\xad\xfa\xab\xa9\x51\xdb\x37\xdb\xbb\x7f\x3d\x5f\x97\xa7\xdd\x0d\xe7\xf6\x4c\xd0\x3f\x20\xe4\x72\x6b\x58\xf4\xf4\xc4\xfd\xcb\x34\xd6\x3f\xd5\x92\xdb\x85\xf3\x72\xa1\xf7\xc8\xb2\x98\x70\xa6\x0b\x28\x3c\x96\xae\xaa\xbb\x30\xdb\x73\xd4\x81\x43\x06\x76\x4e\xe1\xa3\x50\x34\x92\xcd\xd4\x65\x11\x00\x1f\x26\x74\x4a\x21\xa1\x00\x9d\xa2\xf2\x86\x17\x07\xb4\x6b\x82\x29\xd8\x70\x63\xbc\xec\x34\xc8\x72\xc1\x38\x70\xca\x39\x72\x17\x0b\x31\x5e\x10\x03\xeb\x6b\x1a\xd3\x97\xab\x5b\x00\xd9\x16\xe5\x28\x2e\xe5\x11\x0b\x0c\x56\x1c\x79\x42\xd0\x8f\xff\xc8\x27\x05\x81\x17\xeb\x30\x48\x17\x3a\x50\xda\x1f\x44\x6a\xa8\xba\x8a\x3d\x0b\x5f\x72\xc7\xdc\x7a\xc4\xe1\xdc\xda\xbe\xfd\x10\x25\x4b\x99\x87\xe4\x56\x08\x99\xe9\xe3\x6d\x56\x54\xe2\xf1\x73\x74\x0f\x28\x32\x32\xf7\x38\xe8\x5d\x9c\x25\xe9\x98\x32\x44\xa7\xb3\x04\x64\x8c\x71\x43\x0c\xc9\x04\x3f\x53\xee\x1a\xae\x98\xbe\x34\x40\x47\xab\x11\xa2\x53\x54\x7b\x50\xce\x51\x27\x8e\x65\x51\xc0\x15\x38\xc7\x49\xd1\xd3\xe2\xb4\x7d\xf0\x35\x2d\x58\xad\xd8\x2c\xf1\x51\x7e\xe4\x80\x62\xbb\xc7\xc9\x59\x14\x87\x45\x95\x61\x0b\xad\x45\x53\x70\xe0\x58\x79\xb0\x91\x0e\xd3\xc7\xf2\xc9\x89\xe6\x55\x7a\x8c\x43\x2c\xda\xee\x8b\x16\xba\xa8\xe9\x47\x33\xca\x0e\xe0\xa0\x6d\xf6\xcd\x5a\xc0\xed\x15\x9f\x74\x2b\x4e\x4a\xad\xee\x1a\x7f\xaf\xd0\x2a\xaf\xf2\x63\x3b\x0d\x52\x55\x2b\x91\x70\x24\x07\x19\xff\xaf\xb1\x8e\x5b\x78\xf5\x26\x7b\x73\xa9\xa6\x39\xf0\xe8\xb6\x6e\xe8\x6a\xa1\xfe\xd9\x86\xaf\x96\xf2\xe1\x8e\x90\xb3\x9a\x6b\x91\xd0\x2a\x84\x46\x10\xa5\xc4\x94\xd9\x06\x52\x24\x0b\xa3\xb9\x86\xed\xfa\x1c\x67\x2d\x15\xf1\x90\x3f\x17\x6c\xe2\x29\x91\x12\xd7\x60\xc1\x78\x9e\xbc\x6d\x04\x43\x76\x42\xed\x8b\x0d\xf9\xc9\x9d\xc9\xbe\x7e\x6b\x99\x8e\x7e\xe7\x1b\xf4\x6e\xa1\x5a\x87\x8d\x5d\x12\x33\xba\x31\xa5\x8c\x5a\xbe\x9c\xe4\x39\x40\x5c\x78\xa9\x51\x75\x51\xab\x86\xde\xc0\x32\xc1\x2a\xfb\x82\xf9\x91\xc7\xf5\x53\x87\xbc\xd1\x37\x06\x1b\xb7\xe1\x20\x5c\xa4\xcf\x1a\x5c\x57\x08\xd3\xfa\x1d\xdb\x23\x3e\x9d\x1a\xbd\xa0\xe0\x02\x3e\x41\xd8\x54\x90\xe6\xda\x94\x4c\xa3\x89\x09\x8e\xe9\x2b\xe3\xe4\x91\xbd\x78\x1b\x52\xc8\xb1\xee\xf8\x23\x01\x50\xeb\x17\x7d\xdc\xe8\x73\x21\x73\x1d\x54\x46\x0a\x69\xd4\x1e\x23\x98\x38\x66\xde\xf0\x6c\x05\x83\x7b\xfb\xb5\x05\xab\x6f\xd0\x5b\xb3\x44\xdf\xba\x0e\x9b\xd9\xda\xbc\xc6\x96\x5b\x18\xf8\x4d\xa7\xe0\xf5\xd6\xac\x9a\xc1\x0e\x5a\x6b\x1e\x14\x39\x3d\xab\x84\x35\x85\xd3\xd3\xa1\x85\xff\xd0\xcb\x75\xd4\xfe\x8b\x5b\xd1\x5f\x8c\x21\x9c\xd6\xe0\xc5\x78\xa3\x65\xe0\xe9\xe8\x54\xeb\xac\x0e\xc7\xc0\xe6\x8f\x48\x74\x6a\x00\x19\xdf\x43\x12\x6b\xe7\xb6\xf7\xfe\x04\xbd\xf7\x0b\xf9\xde\x1f\x8d\xeb\x22\x3f\xfe\x96\x6a\xb6\xb9\x27\xd8\x72\x85\x5a\x92\xe2\xa1\x07\x4e\x29\xc9\x01\xcb\x31\x56\x0c\xa0\x3a\x29\xa0\xdf\x2c\xbc\x03\x11\x7d\x68\x17\x69\x82\xde\x59\x26\xbb\x8d\x9b\x19\x0d\x9b\xca\x8a\x9d\x8b\x1f\xd9\x70\x5e\x8e\x8c\x9d\x64\xa1\xb1\xc6\x32\x62\xeb\x16\x88\x7a\xbc\xc5\xba\xf3\x1d\x67\x58\x2f\xbf\x8d\x56\x54\xb2\x77\xb2\x3e\x3d\xb9\x0c\xad\x4b\xed\x08\xa5\x09\x55\xab\x2a\xb8\xf9\x1c\x31\x2b\x37\x65\x95\xf6\x75\x6c\xec\xd6\x20\x9f\xbf\x53\x45\x11\x5b\xca\x51\xa3\xda\x07\x2e\xdb\x2f\x97\xed\xa2\x94\xa5\x38\xb9\xf5\xaf\xef\x0b\xa3\x45\x7a\xc3\x38\x77\xaf\x36\x65\x32\x01\x5f\xe8\x93\xb9\xba\xbd\xf5\x9a\x81\x72\x8f\x26\xab\x23\xe5\xf7\x26\xdb\xc2\xc4\xaa\x17\xe7\x5a\x9e\x6a\x47\xd9\xee\x52\x9c\x9a\xb2\x55\x45\xa7\xe4\xc4\xb4\x33\xcb\x33\x44\xec\x79\x05\x76\x33\x89\x5d\x13\x42\x85\xfb\x88\x05\x8f\x5c\x0b\xe7\x60\x4d\x5b\xa0\x8e\x47\xb6\x48\xcf\xb9\xee\x7c\xee\x5e\x0e\xba\xd7\xfd\x5e\xff\xd7\x0a\x60\xd0\xe2\xcf\x0e\x1b\xd4\x7b\xe0\xfe\xd7\xfb\x7e\xf7\xf3\xe0\x53\xf7\xba\x7b\xd7\xe9\xaf\xc0\x0d\x5d\xf6\xb1\x3a\x4c\xca\x54\x56\x19\x8f\xeb\xe0\x52\x3a\x27\x73\xc5\xd7\x17\xd1\x43\xbd\x8f\x50\x52\x83\x20\x6a\x30\x1d\x58\x4c\x04\x8a\xc9\x33\x49\xf8\x2c\x77\xea\x56\x12\xcc\x83\x16\xad\x18\x7f\x19\xbc\x28\x8c\x59\xa6\xf1\x39\x32\xbd\x11\xbd\xf6\xd0\xd9\x80\xa0\xf2\x61\x41\xd8\x7b\x85\xc8\x97\x59\x42\x23\xaa\xbc\x9a\x4f\x2e\x6c\x70\xc7\xc4\x5c\x21\xa5\x77\x05\x73\xed\x2c\x85\x67\xe7\x1e\x07\x3f\xfd\x60\xd1\xd7\x90\x9d\xa8\x0c\xea\x6e\x65\x67\xa8\x1d\xb8\x15\x6a\x22\xed\x0b\x48\x7c\x1b\xcc\x6e\x1f\xce\x89\xc5\xc2\x26\x5b\xb7\x59\x83\xd2\x57\x3d\xc9\xd5\xb7\xe1\xb2\xe4\xa2\xc2\xb9\x5e\x9e\x5d\xd4\x8c\x53\x5f\x39\x47\xa8\xd0\x88\x76\x07\x90\x2a\x36\xe1\x7f\xcd\x2c\x8f\x85\x46\x40\xcc\x24\xea\x62\x24\xc8\x94\x2b\x6d\x80\x99\x34\x8a\x13\xad\x54\x51\x9c\xd0\x3f\x00\x7c\x4c\x90\x33\x2f\xed\xc4\x41\xb6\xe5\xc1\x0b\x0b\x0c\x72\xf6\xc8\x2e\xbb\xb7\x77\xdd\x0b\x2d\x90\xce\xd0\x83\x04\x5c\xb1\xc2\xd2\x2f\x2d\x7b\x1b\x75\xcc\x4f\xff\xa0\x4c\x2a\x82\xeb\x32\xe8\x88\x10\x5c\x34\x97\x0f\xd9\xf7\xba\xf0\x5e\x35\x7b\xc3\x6f\x05\xcf\x98\x73\x3f\x5c\xd7\x76\x11\xf7\x0a\x2d\x76\x5e\xc8\x76\x87\x5f\x0a\x14\xf1\x71\x55\x40\x13\x29\x52\x7d\x8f\xd4\x06\x64\xd6\xe6\xeb\x2b\x7c\xf3\x16\xde\x5d\xb6\xce\x3e\xe4\x25\x4a\x95\xc3\xbc\x1a\x24\xd8\xac\x9d\x51\x69\x9d\xb5\xaa\xa2\x78\x0d\x0c\x96\x12\xeb\x0f\xc9\x18\x33\x24\x52\xc6\x4a\xb8\xbf\xbe\x9f\x6f\x31\xd3\x68\xdd\xa3\xaa\x69\x86\xa7\x3c\x65\xa6\x1f\xaf\x9e\x55\xc5\x64\xe4\x8c\x30\xb5\x62\x32\xaf\x85\xb0\x53\x9a\x6a\x7b\x41\x76\x2a\x26\x5a\x87\xb3\x53\x15\xcd\x82\x56\xe5\xeb\x5d\xcb\x2e\x93\xb1\x10\xd2\xd2\x87\x2a\xbb\x9f\xab\xad\x6c\x2c\x9f\xb6\xfe\x5c\x1f\xcb\xa7\xd5\x9f\x8a\x49\xf4\xb4\xee\x65\x53\x2e\x67\x4d\x6c\xa7\xf7\x05\x67\xdf\x5c\xff\x6a\x7b\xee\x40\x83\xff\xe8\x09\xfd\xd0\xff\x7c\x85\x46\x54\xeb\xbd\xfa\x5a\xb9\xc6\x5a\xc7\x7e\x10\x89\xf3\x4a\x5b\xcf\x6e\x2a\x92\xec\xee\x85\x8d\x77\xaa\x94\xa7\x25\xe8\x1b\x0d\x8f\x89\x73\x35\x0b\x0b\xa3\x58\xea\xb9\x23\x30\x8b\xf9\xd4\xac\xe3\x83\x4c\x47\x23\xfa\xe5\x4c\x61\xf1\x4d\x0d\x3d\x4c\x4e\xc7\xe0\x9f\x7c\x38\xd0\x33\xda\xf2\x22\xae\x1a\x0e\xd9\x06\xe4\x19\xd9\xec\xca\x2e\xcd\xb3\xff\x87\x0f\x01\x22\x00\x50\x0e\x5c\x6c\xd0\xe6\x49\xd8\x47\x1c\x27\xe5\x1d\xb9\x0b\xe8\x35\x11\x17\x82\x58\x64\x01\xd3\x34\x76\x86\x85\xa2\xe0\xad\x75\xe8\x37\x85\xb6\x07\xf9\x16\xf9\x2d\xf2\x27\x38\x87\x18\x1f\x12\x02\xe1\xa5\x19\x4d\xd6\x33\x7a\x2f\x0a\x91\xd1\xd2\x09\xb4\xe9\xba\x16\x10\x15\x1c\x32\x2b\x55\xac\xee\x33\x61\x6a\x27\xf6\x09\x0c\x51\x81\x75\xd0\x2c\xc6\x61\x7a\xb7\xf6\x2e\xf3\xcb\xcd\xe5\x41\xfb\x39\x55\x4a\x60\xb8\xe7\x6d\x75\x99\x0d\xe8\xd7\xa5\x19\x3c\x37\x8e\x5c\xc3\xa3\x8b\x74\x59\x51\x4f\x60\xa9\x9d\xb7\xc6\xcf\x73\x81\x5d\xcf\x86\x0d\x11\x9a\x24\x31\x5e\x0c\x0f\x19\xc4\x1a\xa7\xe5\x3d\x37\xdf\xd4\xbc\x55\xfa\xe4\xca\x2d\xdf\x00\x8e\xa8\x30\xcc\x27\x02\x75\xb0\xbb\xc8\xde\x5f\x07\xf0\x00\x26\xf2\x20\x12\xc8\x3b\x5f\xea\xc5\x32\xfd\xe3\xb5\xe4\xcb\x34\x3b\xdc\x40\x47\x37\x93\xd1\x4a\x23\x99\x09\x12\xe9\xab\xec\x1c\xdd\x26\x44\x6b\x5e\xa9\xd6\xbe\xd2\x24\x71\xd0\x6d\xcb\xb5\xc3\xb5\xe0\x06\xf7\xbe\x2e\xcf\xf6\x58\xb2\x30\x07\x5d\xb8\x7c\x65\x1e\x0d\x76\x0f\x53\xe1\xd1\x17\x5c\xc8\xe0\x48\x2c\x5a\x91\x20\xe1\xe7\x26\x6b\x17\x5c\x49\xb8\x70\x91\xd1\x3f\xb4\xf8\x15\x44\x4e\x78\x6d\x65\xa8\xbf\xda\xfd\xac\xc1\x91\x72\x8f\x8b\x70\xf7\x61\x5d\x32\x7a\x03\xbd\xa6\x74\x07\x16\x54\x9c\x26\xb1\xd8\x3c\xf7\xc4\x02\xe9\xda\xbb\xd5\x4e\x0d\x6e\xc9\xdc\xd5\xe6\x83\xda\xe5\xa1\x8b\xdc\x98\x99\x9b\xd8\x6b\xf6\x7a\xee\x40\xce\xeb\x28\xa8\x92\x79\x3b\x41\xa4\xef\xda\xba\x2d\xd6\xeb\x1c\xa4\x62\x2d\x1c\x8f\x1c\x8e\x7e\x1d\xc9\x6d\x2b\x78\xf2\x69\x69\x22\x54\x8b\x4b\xdb\x25\x04\xd4\x68\x9b\xe8\x24\x0b\x10\x7f\x96\x6d\x0c\x19\x2b\x4d\xbc\x7a\xa1\xbc\x6d\x58\x0d\xb4\xe4\x5c\x95\xd9\x57\x74\xad\xc0\x81\x85\x05\x04\xd0\xb8\xf5\x41\xe3\x6c\xcb\x98\x8c\xf7\x00\xe2\x51\x09\x40\x4b\xc8\x03\x68\x65\xc5\xc1\x3a\xbd\x57\x95\x8b\x15\x76\xa7\x51\x6d\x58\xe1\x0d\x2d\x4b\x2e\xb7\x8c\xc0\xe9\xc5\xcc\x07\x50\x6d\xbb\x4d\x0e\x50\x61\xfd\x26\x7a\x00\x63\x92\x18\x19\xc8\x07\x03\x69\x6d\x69\x97\x45\x4e\x66\x58\x10\xa6\x1e\xd9\x9d\x9e\x85\x79\x23\xcf\xc4\x70\x59\x40\xae\xcd\x00\x34\x23\x1e\x21\x6c\xdf\x02\xa2\xd7\xa5\xe1\xc9\x81\x79\x08\x4c\xd3\x3d\x22\x13\x7c\x6f\x9e\x31\x40\x11\x16\x28\x49\x2f\x95\x8e\x72\x33\x5e\x2b\x90\xd1\x84\x02\x4e\x43\x4c\xa4\xbd\x90\xa8\xb2\x40\x1c\x99\xfa\x9d\x12\x07\xac\x0d\xaf\x65\xf2\xab\x4a\x60\x3b\x47\x01\x73\x0e\x3a\xf9\xc8\xbc\x6f\x2c\xc1\x61\x35\xc6\xfa\x86\xa6\x04\xec\x33\x8d\xb3\xc0\x17\xfc\xa7\xd9\x21\x2e\xe8\x98\x32\xaf\x1b\x96\x5d\xde\x14\xcf\xc0\xbd\x6b\xce\x20\x1f\x65\x77\x5a\xdf\xd6\x38\x9c\xc1\x8c\xff\xef\x7f\xfd\xf7\x19\xad\x8b\x7e\xc8\x81\xa5\x40\x1b\x76\x72\xbd\x6d\xf1\x77\xde\x83\x5e\xa9\x81\xf4\xf0\x6c\x5a\x59\xa8\xdb\xc8\xff\x6a\x2f\x37\xcd\x34\x5c\x4d\x4c\xb8\xb7\xc8\xee\x10\x1b\x11\xe9\x92\xb3\x61\xae\x98\xd7\xa5\x25\x95\x50\x9b\xa0\x67\x62\x4e\x72\xe6\x20\xf0\x3b\xcd\x2f\xb8\x69\x1e\x59\xfe\x8a\x34\x20\x32\x06\xb7\xd7\xfc\x21\xa7\x4e\x43\xc2\x2c\x93\xfd\x79\xa6\x44\x1e\x0e\xf7\x72\xa1\x5d\x5f\x14\x93\xc3\xaa\xc7\x2f\xdd\xb4\x25\xc9\xed\x01\x58\x6e\x93\x33\x3a\xc1\x72\x7f\xa9\x39\x95\xfd\xbc\x8c\x37\xdd\x57\x1e\x56\x25\xe9\x98\x49\x9a\x12\x59\xbd\x21\xa9\x24\xc2\x48\xba\x0c\x43\xcc\x72\x82\x0f\xcf\x09\x19\xa2\x2b\x62\x8d\x64\x8a\xe9\x5a\xd5\x0c\xfa\xf9\x6a\xf0\xd0\x42\xb0\x01\x8f\x89\x18\xc4\xa9\x5a\x38\x16\xcb\x2a\x0c\xf4\x4b\x97\xa9\x9a\xaf\x1e\x5f\x26\x78\xb1\x9f\xd1\x32\xc0\x56\xfd\x7c\xcd\xb0\xab\x35\x66\x2f\xc5\xa7\xa8\x35\xd7\xc0\xa1\x92\x12\x1c\xaa\xcd\x78\x2d\xb8\x48\xe0\x06\x66\x0a\x70\x08\x73\x4b\xca\x5e\xd1\x06\xb4\x1d\x66\x8e\x86\x69\xee\x52\xca\xda\x60\xc4\x67\x8f\xec\xa3\xe9\x23\x03\x56\x9e\x99\x40\x04\xe5\x46\xe4\xcb\x8c\x4b\x52\xa8\x7f\xab\x68\x6d\x61\x0b\x5f\xed\x34\xaa\x95\xf5\xfc\xa5\xed\x75\xf5\x57\x07\xb6\x5d\xdc\xf0\xc5\x25\x57\x73\xe0\x56\xea\x60\x44\x67\x54\xf3\xce\xa0\xf2\xa4\xed\xaf\xbd\x72\x9e\xd3\x05\xe0\x61\x2a\x99\x9f\xa0\x6c\x79\x25\x86\x48\xc8\x33\x01\x77\x3a\xcc\xd1\x6f\x60\x52\xf4\xeb\xd5\x88\x93\x55\x07\x28\x2f\x3e\x05\xb1\x80\xe2\xf2\x0c\x8a\x25\x7a\x55\xbc\x58\x2c\x3e\xda\xba\x4e\xae\x2a\x31\x65\x0d\xf5\xbc\xe3\x37\x72\x99\x13\x85\xc8\x17\x45\x6c\xab\xd7\xbe\xab\x64\x5c\x2c\x7e\x40\xd5\xc5\x58\xf5\xba\xe3\xde\x9b\x6e\x77\x5c\xe1\xbb\x2b\xd5\x8c\xdd\x95\x6f\x4b\x17\x27\x98\xc5\xb6\x1e\xd7\x1a\x19\x5a\xd9\x82\xd5\x19\xa7\x5b\x56\xa9\x60\xab\x4a\x3d\x04\x7c\x33\xa6\x81\xea\x87\x8b\xcc\x19\x8c\xda\x64\x81\xf4\x0a\x2e\xb4\xe6\x9e\x32\x45\x13\xcd\x1c\x76\x0e\x12\x8d\x20\x33\xce\xa2\x3b\x42\x66\x7b\x1d\x80\x20\x95\x92\xb2\xf1\xc0\x52\xd2\x95\x96\x36\xbb\x18\x8a\x3c\xf5\xd9\x0c\x65\xfe\xf8\xbd\x1b\x68\xb9\x53\xdd\xb0\x35\x80\xbb\xb9\xa2\x56\xb0\x38\x18\x77\x8b\xb1\xa8\x7c\xae\x16\x76\x40\x63\x43\x0a\x6a\x3a\x8a\xc3\x42\xd7\xf1\xbb\x83\x4e\xb7\x08\x7e\x91\x5f\x21\xd2\x16\xaa\x9a\xf2\x33\xc8\xd4\x57\x35\x95\xb8\xb2\xb6\x02\xb7\xc7\x32\x15\xcd\xf6\x33\xcb\x70\x06\x4a\xc5\xbc\xd8\x7d\xce\x96\x23\xe0\x24\x19\xe2\xe8\x29\xb3\xc2\x32\x5f\x04\x17\xae\x1f\x84\xd6\x2b\xa1\xe1\x9d\x61\x2e\x3d\xd1\x08\xb4\x1b\x3f\x5a\x68\xe0\x8f\xec\xb4\xf3\x8f\x1b\xaa\x59\x5c\x39\x83\x77\x65\x66\x6f\x6a\x1b\x62\x32\x4b\xf8\x7c\x5a\x73\x9f\x95\x0b\x18\xb7\xc9\xd4\xa9\xab\x9f\xdc\xe9\x55\x56\x12\x7a\x6b\x5f\x66\x0b\xd5\x50\x3b\x00\xe3\x5a\x43\x4a\x7e\x4a\xf8\x10\x5c\xaa\xd6\xfd\xe0\x2a\x7c\xbc\x52\x8f\xf2\x79\x5e\xb7\xee\xa8\x7c\x22\xa9\x9c\x25\xda\x98\xa9\xff\x82\xa9\x39\xd9\xef\xbe\x19\x84\x84\xd5\xde\xc1\xe6\xd9\xda\x95\xaf\xef\x03\xf6\xf9\xca\x69\x02\xe6\x59\x23\xbf\x4a\x5e\x36\x53\x6a\x78\x66\x82\xd4\x8a\x3f\x32\x85\xc7\x6e\x73\xad\x72\xc9\x5f\x18\x11\x72\x42\x67\x85\x46\x98\x5b\xa7\x87\x5b\x8e\xb6\xff\xc7\x24\x43\x57\x8e\xd9\xc2\xd2\xad\x3e\x9f\x9d\x1a\x74\x16\xcd\x9d\x72\x86\xa3\xdc\x27\x1b\x25\x58\x4a\x3a\x9a\x7b\xa0\x2a\x59\x9e\x2f\x94\xae\x15\x9d\x18\x5e\xe7\xbb\x2a\x31\x67\xa8\xb3\x1b\x54\x81\xed\x2b\x2a\x1f\x8a\x87\x9f\xc6\x3e\xe8\x9e\xbe\xcd\x16\xa1\x77\x9c\x9e\x60\xa9\x5e\x0b\x1e\x6c\xe0\x13\x36\x43\x01\x68\x8a\xd7\xb4\x67\x4e\xaa\x28\xc3\x5c\x60\xa4\x1c\x2d\x2c\xd3\xa3\x2d\xcd\xac\x0d\x97\x21\xad\xf8\xf0\x45\xaa\x50\xc3\x0a\x9c\xa7\x6d\x46\xe7\x12\xd7\xe7\x32\x43\x69\x01\x30\x8b\xfc\xe5\x13\x24\xb7\x02\x65\x6b\xc2\x94\x97\x24\x21\x3b\x49\x36\xdf\x80\x43\xcb\x99\x1c\x1e\x6f\x2e\xe5\xcb\xbc\x2d\xc5\x6a\xbf\xca\x06\x39\xf0\x35\x18\x49\xd5\x53\xff\xc5\x4c\xd4\xa6\xc1\x57\xed\x22\xf8\x44\x81\xca\xab\x67\xdb\x26\x2e\xf7\x52\x4b\xcc\xf4\x2d\xbf\xe7\x6b\x2c\x30\x75\xbe\xe2\x4c\x4f\x6c\x23\x9f\xbf\x72\xaa\xfa\xc2\xbc\x3e\x91\x26\x69\x35\x2b\x4f\xdf\x46\xb2\x77\xf1\x86\x6a\xc6\x17\x36\x70\xad\x38\x1a\x13\x40\xe2\xa1\x2c\xa6\xcf\x34\x4e\x71\x72\x54\x3c\xb1\xb3\x42\x9b\x1d\x51\xbf\x5a\xc2\x34\xf2\xf4\xe4\xf9\xa0\x44\x49\x77\x1f\x2d\x60\x7e\xda\xcd\x69\xe1\x16\xb4\xe3\x58\x1a\x83\xe1\xcd\x6b\x6c\x5b\x43\x63\xd8\x99\x59\x80\x88\xa0\x4a\x16\x2e\xd9\x7c\xee\xbb\xd7\x26\x0d\x8d\x63\xfb\x46\x06\x07\x51\x80\x61\xc3\x05\x34\x4b\xb3\x47\xaf\x2f\x75\xcb\x47\xeb\xad\xeb\x9d\xeb\x9f\xb1\xf2\xac\xf2\xd3\x15\x94\xe1\x36\x9c\xd3\xe6\xfa\xb0\x03\xa0\x6d\xa1\xf2\x53\x77\x0c\xdb\x79\xff\xb6\x40\x39\x5e\x50\x09\x76\xa7\x22\x1f\x11\x9b\xb4\x42\x53\x5e\xd8\x8a\x43\xe9\xcb\xa7\x0e\xdb\x2b\x47\xca\x6a\xef\x16\xb5\xe3\x24\xdf\xd9\xf0\xe3\xfe\x2e\xf8\xd5\xfc\xb2\x13\xfe\x00\x98\x5b\x0c\xf5\xf8\xa9\x6d\xf7\x03\x87\xd7\xcb\xe1\x5c\x88\x79\xad\xc8\x8e\xb5\xd3\x6b\x94\x17\xbb\x40\xce\x7d\x6c\xaf\x2d\xbe\x6c\xbc\xb9\xfb\x64\xb5\x75\xe7\xb2\x0b\x1b\x6d\xcf\xd1\x43\xcb\x8d\xde\x0b\x21\x49\xbd\xd9\x2d\x5a\x01\xe9\xe4\xb6\x6c\x97\x87\xac\xaa\x47\xe3\xf6\xf0\x11\xae\xb6\x74\x30\x13\x64\x44\xbf\x6c\x64\x0a\xdc\xc2\xab\xd6\xbc\xd6\x64\x2e\x75\x7d\x84\xb0\x20\x74\x89\xf4\x12\x69\x2d\xa5\x6d\x67\xb8\x47\x96\x57\xe4\xda\x72\x5c\xad\x0c\x73\x51\xf8\xd3\xa6\xd0\xa7\xbb\xef\x50\x69\xf6\x75\xa2\xd4\x4c\x9e\x7f\xf8\x30\xa6\x6a\x92\x0e\xcf\x22\x3e\x35\xf5\x1f\x5c\x8c\xcd\x3f\x3e\x50\x29\x53\x22\x3f\xfc\xed\xaf\x7f\xcd\xb7\x78\x88\xa3\xa7\xb1\x81\x73\x5a\x8c\x77\x16\xb7\x9c\x60\xb9\x5d\x46\x99\x2b\x9d\xdc\x73\x09\xbd\xf7\x19\x57\xb4\xac\xdf\x91\x0a\x4f\x67\x7e\x0a\xb2\xe9\xf1\x28\x15\xce\x3b\xcb\x40\x3d\xac\x5e\x26\x9a\xe0\xd9\x8c\xb0\x7a\xb7\x8b\x29\x70\xde\x42\xf4\xb8\x12\x69\x3b\x43\xf2\x65\x96\x60\x56\x84\xfd\x80\x36\x69\x82\x44\x84\x29\x0b\x49\x91\xf7\xa6\x07\x6e\x34\xd0\x53\x46\xfe\xaf\x57\x02\x0b\x6b\xa4\x32\xef\x7f\xe8\xa6\x63\x7b\x11\xbb\x0e\xb5\xd8\x23\x5d\xb9\xff\x73\x4e\x3b\xe2\xa8\xb6\xac\x38\xf6\xde\xf6\x7a\xdb\x86\x83\x22\xc1\xd9\x80\x7c\xd1\x42\x4e\x6e\x0a\x14\xf7\x20\x89\x44\x9d\x5f\xee\x91\x9c\x33\x85\xbf\x9c\xa3\xcf\x94\x81\x02\xfb\x03\x4f\x85\x44\x97\x78\x7e\xca\x47\xa7\x53\xce\xd4\x04\x7d\x86\xff\xdf\xfe\xe9\x85\x90\x27\xf4\x2b\xc1\xc2\xca\x07\xdb\x3f\xd2\xb5\xb0\x03\x16\x12\x29\x93\x88\x3c\xeb\x13\xfa\xd7\xff\x85\xa6\x66\xe4\x73\xf4\xed\x87\xbf\xfe\x2f\xf4\x67\xf8\x7f\xff\x0f\xfa\x73\x8d\xa7\x61\x3d\xa8\x39\x68\x33\x7e\x57\x9b\x46\x00\x94\x92\x8b\x24\x5f\x35\xec\x85\xe0\xf9\x4e\x55\x8e\xfc\x44\xa3\x27\x3e\x1a\x0d\x34\x63\x98\x02\xd2\x01\xde\xca\xed\xe0\xa3\x06\x53\xdb\x28\xde\xb4\x9d\xcc\x1b\x3e\xd9\x8f\x1a\xa4\x11\x27\xae\x65\x9a\xbb\x27\x20\x79\xad\xd0\x7a\x9c\x4a\x78\x8b\xc4\x5a\xaa\xae\x73\x3a\x9c\x77\xd1\x81\x0e\x38\x0f\x92\x8f\xcc\xe3\x14\xe2\x42\xc2\xa9\x9f\x3d\x6d\x12\xcc\x2c\x21\x2b\x8f\xc3\x42\x5a\xf7\x9b\xc9\xd5\x85\xa5\xbd\x56\x9e\xae\x5c\xf8\xf8\xea\x14\xdd\x7b\x2e\xb6\xb2\xb7\x9e\x48\x6d\x09\xcd\x8a\xe6\x66\xae\xe1\x36\xf6\x9d\x1a\x8a\x23\xc9\x45\x86\xde\x6d\xfc\x22\xb6\x05\xea\x6a\x2f\x2a\x15\x26\xa9\xb1\xd9\xa1\xd7\x4b\xbf\xcc\x5e\x59\x35\x4d\xc8\x70\x74\x4f\xe7\xcd\x1d\x61\xb6\x5a\x45\xd2\x22\xb1\x62\xc6\x15\x20\x9b\xab\x36\xf4\x3e\xc3\x55\x81\xc1\x21\xdd\x16\xea\x86\x98\xd3\x6c\x2d\x70\x45\xf5\x7e\xa6\x22\x22\x17\x7c\xbb\x74\xeb\x84\xb2\x85\x3a\x8d\xda\xe4\xb6\x7a\x9d\xfc\xca\x76\x88\x73\x38\xd4\x3c\xce\x8d\x05\x13\x96\xb0\xbd\x57\x3c\x00\xdc\xe2\x6a\x00\x48\x71\x17\x18\xab\x0b\x1d\x41\xb6\x90\xda\xc6\x71\x9d\x0b\x3c\xd7\x50\xa6\xd4\x47\x46\x60\x2d\x0b\x97\xe4\x4c\x42\x3a\xd9\xd6\xf3\xf0\x7a\x23\xe5\x39\x6a\x85\x2e\xc5\x30\x13\xa8\xb7\xdc\x10\x23\xd7\xb4\x29\x3b\x41\x02\x43\x32\xb0\x9a\xe8\xf1\x24\x11\xa7\x23\x1c\x51\x36\x3e\xf1\xe0\x51\x01\xaa\xc4\xbf\x0e\xaa\x98\xb4\x8f\xe5\xd3\x6e\x13\x5c\xb7\xee\x36\x4b\xe3\xbc\xe3\xa1\x05\x34\x32\x81\x15\xba\x80\x0d\xa9\xb0\x7c\xaa\x43\xf4\x5a\x80\x13\x5c\x32\xbb\x8c\x14\x0e\x84\x70\xd9\xfc\x1c\xf4\x01\xf1\xed\x29\xe8\x54\xe2\xfa\x9f\x5b\x70\x51\x57\x69\x8a\x33\xf4\x9f\x32\xaa\xee\x92\xf9\xcb\x09\x17\x6a\xb0\x21\x1e\x71\x39\xa4\xc2\xc8\x69\x02\x40\x42\xfc\x99\x88\x67\x4a\x5e\x8a\xb0\xbe\xeb\xf0\xa2\x71\x9a\x79\xf9\x94\x80\xfb\x3a\x9d\x71\x28\xdd\x1a\xa1\x29\x66\x73\x23\x28\xb5\x70\xc1\xf2\x49\x66\x5d\x97\x91\x9c\xe2\x24\x39\x41\x82\xa4\xd2\x74\x23\x97\x24\x19\x9d\xba\x06\x30\x31\x4a\xf8\x98\x46\x38\x41\xc3\x84\x47\x4f\xd2\x54\x56\xb2\xb1\x11\x52\x33\xc1\x23\x22\xa5\xa7\x59\xe5\x28\x0a\xb6\xb6\x15\x5a\x2e\x2b\x22\xa6\x94\x51\xa9\x68\xe4\x54\xa6\x1c\x0c\xc5\x34\xfe\x8f\x30\xb8\x84\xa1\x52\x18\xa6\xab\x35\x3d\x62\x40\x61\x53\x66\x5b\x85\xc1\x75\x6d\xb1\x1e\x5d\x71\x42\xdd\x01\xda\x01\x74\xa5\xe3\x90\x81\x2a\x1e\xc8\x15\x47\xea\xc2\xbe\x06\xc7\x78\x19\x0b\xdc\x15\x4f\x54\xc6\x90\xd9\x49\x2b\xc0\x69\x41\x2d\x43\x56\x7a\x51\xd0\x5c\xb2\x8a\x84\x96\x21\xe9\xc1\x94\x6b\xf0\xf3\x56\xf1\xb4\xa6\x22\xa8\x3c\xd0\x9d\xae\x1c\xb5\xa7\x2c\x4a\xd2\x38\x6b\xab\xaa\x55\x80\x67\xcd\x24\x8e\x3c\x9a\xf6\x5a\x51\x38\x41\x58\xa2\x17\x92\x24\xfa\xff\x9a\xca\x8b\xd3\xac\x5d\x88\x16\xc9\xa6\xa5\x0b\x7c\xc4\x49\xe9\x3a\x8e\x6a\x1d\x2a\xea\x2d\x56\x13\x83\x35\x31\xe5\xca\x74\xb4\x35\xa8\xa8\xce\xbf\x65\x60\x34\x87\x09\x1f\xc2\x49\x07\xc0\x54\x57\x5f\xed\x95\x73\xa6\x51\x44\x48\x4c\x62\xd3\x9f\x33\x03\xf3\xb4\x47\xf4\x9b\x6a\xf8\xce\x02\x45\x5a\x00\x96\x5a\x76\xac\xd5\x42\xa6\x16\xbb\x1b\x9e\xa1\xdb\x12\x20\x90\x47\x99\x11\x2e\xc3\xc3\x9d\x2c\x6c\xe1\xeb\x00\xac\x96\x16\xb1\xbf\x1d\x5a\x13\x60\xb5\xf0\xcd\x1d\x00\xac\x96\xd6\x59\x53\x33\xc2\xc7\x7b\xad\x75\xd7\x8b\xba\xe2\xcd\x0b\x10\x0d\x30\x9d\xb9\x3b\x0b\x2c\xe8\x0e\xe4\xbc\x8a\x11\xdb\x05\x1e\x5b\xea\x01\xfa\xba\xe0\xb1\xa5\xc9\xb4\x19\x3c\xb6\x34\xd5\xf6\x82\xc7\x56\x4c\xb4\x01\x78\xac\x09\xee\x0f\x34\x53\x37\x13\x0a\x50\x50\x35\x4c\x47\xf7\x00\x31\xb0\x74\x8e\x17\x26\x71\xc0\x5c\x63\xee\x8e\xb6\xf9\x45\x30\x5b\x5b\x7b\x5b\x97\x8e\x55\x0a\x42\xac\xcb\x7b\x59\xf4\xcd\x80\x8e\xac\xeb\x76\x3f\xf1\xbd\xdd\xe0\x87\x8c\xf0\xcc\x62\x19\xd4\xb5\x38\x6a\x4f\xd5\xf6\x66\xb8\xbc\x80\x7d\x59\x10\xf9\x8d\x90\xeb\x3e\x97\xba\x85\x4c\xf8\x8b\xed\xd8\x05\x6c\x68\x98\xb2\x96\x05\xe1\xa3\x03\x6b\xb4\xd5\x51\x8e\x32\x45\xc6\x65\x9b\x36\x3f\x34\x94\xa9\xef\xfe\xb6\x52\x12\x19\x68\x4f\x67\x1e\x7a\x3d\x3b\xb2\x60\x87\xfd\x8d\xc4\x28\x9a\x68\xab\x48\x6a\xf3\x45\x2f\xc7\xdc\xac\x12\x4d\x31\x75\x86\x54\x2a\x4d\x68\x89\xca\x47\x56\xc0\xc2\x3d\x43\x1f\xa1\x0d\x32\x9e\xce\xb4\xfd\x95\xad\x8f\x6a\x4e\x7a\x4c\xbf\xfd\xf6\x3b\x82\xbe\x45\x53\x82\x59\xc1\x86\x05\xb3\x49\x5f\x7d\x80\x1d\xa9\x26\xe4\x91\x55\x6e\x05\xea\x7e\x31\xbd\xcd\x5c\xbe\x61\x8f\x8d\xb8\xb3\x89\xa1\xbd\x27\x8e\x26\x48\xa6\x43\xd3\x9f\xda\xf3\x61\x38\x45\xfa\x8a\x8f\x21\x50\x0d\x37\xb2\x9b\xf4\xb2\x53\xb8\xdf\x1c\x00\x1b\x6e\x6c\x7a\x1b\x77\xe0\x1e\x39\x95\xa4\x80\x29\x56\x11\x34\x33\x92\xcf\x3f\xf8\xd2\xe0\x0d\x9d\x98\x18\x82\xb6\xcf\xb0\xf5\xec\x6b\x5d\x1a\xd2\x89\x21\x4a\x96\x26\x58\xd8\xa3\xff\xc8\xb4\xa1\x21\xc8\x33\xe5\xa9\x4c\xe6\x28\xe6\x8c\x9c\x00\x27\xa4\xd1\xc4\x04\x56\xb5\xcd\x82\x6d\xa3\x94\x67\x2a\x53\x6d\xd0\xc2\x58\xae\x2f\x8b\x54\xd8\x60\xa1\x4d\x28\x7c\x47\x9b\xdf\x04\xde\x52\x5e\x7d\x24\x6a\x66\x45\xf9\x70\xc5\x25\x99\xdf\x10\xae\xb8\xc0\x55\x01\xae\x38\x83\x2b\x5e\xa4\x4b\x1b\xe1\x8a\x4b\x7b\xde\x0c\xae\xb8\x6a\xcb\x37\x80\x2b\x2e\x0c\xf3\x66\xe0\x8a\x4b\x14\x7d\x33\x70\xc5\xa5\x75\x05\xb8\xe2\xb7\x07\x57\xbc\x25\x20\x6f\xb5\x2c\x36\xb8\x5e\x8a\xb2\xf9\xda\x4c\xf6\x5e\xa2\xde\x8d\x66\xb0\xe8\xa9\x98\xd4\x96\x5d\x57\xdb\x83\x00\x57\x0b\xa1\xf5\x40\x80\x2b\x4d\xf5\x7a\x51\xb7\x2d\xb0\x18\x18\x06\x07\x06\x01\x2e\x2c\x20\xe4\x57\xae\x9f\x5f\x59\xc9\x7c\xf6\xdb\x7a\x7a\x2e\xe9\xb2\x7c\x21\x37\x84\x01\x2e\xec\x4f\xa3\x4c\x4c\x50\xdd\x77\xc0\x89\xfb\xd5\xe6\xfb\x85\x43\xbe\x52\x97\xf7\xa9\x28\x2d\x20\xb9\xd6\xf0\x1c\x4a\xa1\x31\xc2\xfd\xf8\x7f\xe0\xdc\x0d\x32\x83\x4b\xe4\xcd\xe2\x2a\x86\x17\x1b\xb0\x6a\x63\x0e\x75\x56\xe9\x6e\x0a\x85\x5d\xf1\xe6\x9a\x21\x66\x37\x89\xfb\x19\x89\x6a\x7c\xcc\x74\x4a\x77\x35\xec\xaa\x8b\x2c\xc3\x60\x03\x83\x7c\xa1\x2e\x55\x5f\x4f\x66\x3a\x46\xc7\x2f\x95\x03\x03\x4a\x8a\x79\x73\x4c\xa5\x12\xb5\xb9\x4d\x0b\x33\xdc\x26\x54\x3a\x4b\x1b\x27\xc4\x78\x54\x1d\x6f\xf6\xda\x94\x4c\xb9\x58\x95\x58\x55\xf9\xa6\x6d\xb1\xb4\xc9\xab\x64\x36\x21\x53\xad\xc9\x0c\xd6\x1d\xa4\xe9\x7e\x67\x45\xcb\xb6\x76\xcd\x24\x3a\x16\x98\xc0\x0b\x84\xea\x67\x63\x83\x84\xda\x78\xbb\xb7\xdd\x66\x8b\xd5\xba\x66\x40\xc8\x81\x78\x2f\x77\xb8\xd9\x87\x0a\xe1\x6e\xe0\xef\xca\x9c\x8e\x2c\xa5\x66\x75\xd6\xc6\x92\x7c\x8d\x65\x78\x67\xf9\x5b\xb6\x01\xf9\x1a\xa1\xfc\x62\x74\x5e\x4b\x42\xbf\xfb\xf4\xfa\x09\x1e\x35\x68\xbd\x8b\xe4\x81\xcc\x1c\x49\xc4\xa9\x6f\x19\x14\x26\xb3\x48\xaf\x02\x97\x38\x8b\x72\x0b\x26\x49\x45\x6d\x96\x69\x13\x87\x76\xa4\x52\x9c\x80\x25\xe1\x77\x4d\x2d\x6f\xea\x70\x5e\x51\xf6\xd8\x2c\x62\x42\x99\xfa\x8f\xbf\xaf\xb5\x9b\xda\xb4\xb2\x74\x83\x4e\x6f\x38\x8a\x88\x34\x3e\x76\x9b\x85\x8c\x87\xfc\x19\x9a\xbc\x6d\xb3\xab\xfa\x28\xeb\x75\x6b\x01\x9f\x41\x60\xc7\x39\xab\x1b\x75\x61\x22\x78\x3a\x9e\x38\x1f\x92\x3e\x33\x7a\x69\x55\x7b\xf9\xf3\x82\x8f\x7c\xed\xbd\xfc\x3e\xa5\xc9\x66\x1e\xba\xfb\x42\xfb\xbb\x4f\xbd\x3e\x92\x93\xec\xb4\x0e\x61\xd8\xca\x8d\x5d\x9c\x74\xf3\x6f\xda\x77\xb3\x78\x0d\x7c\xe6\xc4\xc1\xbe\x8e\x78\x92\x40\xa4\x41\x92\xe9\x33\x11\xd5\x9f\x87\x05\xf7\xe9\x7a\x88\x8d\xd9\x04\xe0\xed\xbc\x30\xa2\x91\xfe\x75\x6b\x54\x43\x89\xdc\xec\xcb\x49\x0b\x26\x55\x8d\x33\xc2\xaa\x7c\x6c\xbf\x2c\x76\x1e\x3a\xb2\x84\x41\x97\x3d\xb6\xb3\xa4\x41\x47\x92\x03\x27\x0e\xae\x58\x47\x5b\x93\x07\x4b\xc2\x2e\xcb\xe5\xcb\xaf\x19\x97\x38\x64\x0c\x9f\x8e\x26\xf1\x23\xeb\x14\xea\x29\x5c\x87\xf6\xe1\x3c\x4f\xc8\x36\x36\x84\x2f\xcc\xa0\xbf\x8b\x75\xac\x40\x18\x4d\xff\x0b\x2c\x1d\x03\x9a\x6c\x52\x0a\x5d\xda\x20\x64\x93\x93\xf8\x14\x47\xf3\x28\xa1\x91\x67\x33\x8f\x05\x9e\x4d\xaa\x24\x9e\xdb\xf9\x80\x3a\xf4\x5a\xa8\x43\x75\x8d\xd0\xd6\xc9\xdb\x76\x7c\xc5\xf0\x94\x04\x34\xa4\x36\xa2\x21\x9d\x64\x78\x1b\x2c\x6f\x29\xf7\x8a\x30\x0e\x8b\xe7\x3e\x40\x22\xbd\x02\x24\xd2\x26\x87\x3f\xc7\x3b\x2a\x1c\xfb\x00\xd3\xd4\x84\x78\xaf\x0f\xd3\x94\x29\x01\xad\x42\xde\xa9\x97\x07\xaf\x8c\xe8\xb2\x38\xb1\xd7\x84\x65\xaa\x50\x97\xd6\xd1\x1b\x97\xe1\x32\x2d\xe3\x8b\x46\x74\x79\x5d\x94\xa4\xf5\x28\xb3\x16\x00\x52\xe5\xdd\xd9\x12\x38\xa4\xfa\x6d\x68\xc9\xb9\xd9\x65\x55\xcf\x7a\x3d\x7b\xfd\xca\x9e\x75\x0c\xcc\xf5\x8a\x7c\x32\x7e\x38\xae\x42\x9f\xbc\xb9\xe1\x66\xc5\x3e\x1d\x17\x83\x27\x02\x4d\x78\x12\x3b\x10\x8e\x8c\x5a\xd9\x07\xb2\x4a\x88\x8c\x40\x6e\x33\xee\x67\x24\x32\xd6\x66\xde\x88\x6f\x59\x49\x4f\xb6\x89\x30\xdd\x1d\x08\x9a\x5d\x78\x51\x32\x49\xb2\x89\xff\x64\xa5\x76\x21\x8b\xee\xff\x25\x73\x2c\x50\x08\xa2\x06\xd5\xd3\x5c\xe9\xf7\x5e\x31\xb9\x65\xaa\x87\xe7\x1c\x15\x55\x2d\x76\x0d\x9f\xc1\xaf\xcf\xd4\x39\x62\xb0\xff\xc5\xa5\x51\x4a\xb7\xba\x46\x91\xca\x32\xb3\x6c\x90\x0c\xb7\xd0\x31\x71\x7b\x70\xa4\x29\xfe\x32\x98\x61\x81\x93\x84\x24\x54\x4e\xf7\x96\x0c\x7d\x51\x0c\x57\xeb\xb3\x2a\xb8\x71\x91\xb1\x74\x3a\x34\xac\xe8\x26\x62\x9b\x6c\x2a\x8e\x44\xca\x7c\x68\xb7\x6c\x63\xb2\x26\x9e\x29\xdc\x0b\xe0\x55\x8b\x26\xd0\x2d\x79\x84\xa9\x60\x44\xd6\xf6\xa6\x25\x51\x2a\xa8\x9a\x0f\x6c\xab\xdf\xe6\x07\xee\xde\xbe\x79\x61\x5f\x5c\x1e\xe1\x77\xa8\x06\xee\x7b\x59\x6b\xe1\x19\x11\xd0\x9e\xcb\x35\x9a\xf2\xda\x19\x5b\xd4\x0a\x92\xf5\xf8\x82\xf4\xef\x85\x6b\xbb\x2e\x71\x1a\xbf\x0c\xbc\x8a\xb2\x41\x54\x66\x8e\x55\x87\xb5\x0a\x77\x6b\xd9\x22\xf7\x8c\x3c\x55\x13\x45\xdf\x43\x77\x1f\x5b\x36\x62\x86\xd6\x13\xf6\x42\xe1\xe0\xaf\xcd\x37\xc6\x2b\xf9\xaf\x68\x76\xe3\xcd\xd3\x62\x1d\x55\x25\x5f\x2d\x99\x6c\xc7\x7b\xab\xc1\x8c\xbd\x8f\xec\x68\xda\xfa\xa0\x0b\x91\xce\x14\x1d\x2e\x42\xfb\x38\x69\xb0\x83\xd6\xbd\x9d\x04\xca\xcc\x5d\x98\xa5\xf0\x59\xd3\xcf\xb7\x20\x89\xed\xea\xb4\xfe\x6f\x71\xd4\x1c\x42\x92\x41\x98\xf2\xeb\x18\x6f\xa6\x54\x29\x57\x28\x61\x1c\xf0\x9a\x3b\x8b\xbe\xe9\xf7\x2e\xdd\x05\x43\x87\x65\xe3\xa2\x3a\x7b\x64\x1d\x89\x5e\x08\x62\xc4\x42\x68\x54\xf4\x4e\xce\xbc\xfa\xd0\x73\x6d\x48\xf4\x97\xb2\xdc\x1c\xad\x3c\x50\x25\xb3\xb6\x7f\xe6\x1b\x23\x9c\x48\x72\xa2\x07\x86\x6e\xc1\x8a\x43\xf2\x2b\x46\x2f\x02\xcf\x66\x44\x3c\x32\x5b\xc5\x02\x01\x27\xce\x13\x33\x7e\x5d\x8a\xaf\xa5\x01\x19\x44\x38\x9a\x1c\x68\x8f\x30\x14\x23\x45\x13\x12\xbb\x7a\xe9\xe2\xf6\xb8\x75\x1b\x87\xfd\x1a\x9b\xd5\x1b\xb9\xb6\x75\x27\xf6\x23\x49\xa4\x25\x4a\xd6\xde\x7d\x46\x84\x9e\xb5\xe6\xe1\x67\xc2\x10\x1d\xb9\x79\xd8\xdc\x25\xf4\x02\x91\x39\xcd\xfa\xcf\x98\x26\x06\x80\xc0\x7d\xda\x29\x81\x26\xfc\xf0\xc8\x4c\xb8\x9f\x45\x85\x0a\x5d\xca\xa8\x9c\x68\x49\x9d\x42\x4c\x16\xcc\x8c\xba\xc2\x21\xf6\xbc\xce\x69\xee\xea\xc7\x97\x4b\xd0\x67\x2a\x38\x9b\x42\x91\x90\xc5\xa5\x72\xe4\x93\x44\x65\xc7\xa3\xb2\xc4\x73\xa5\x46\x1c\xc7\xb2\xe8\x7c\x35\x66\x25\xfd\xa3\xe0\x76\x39\x2d\x54\x45\x46\x1e\xac\x12\x24\xb1\xba\x8e\x7e\xcb\xf4\xdf\x50\xda\xb1\x58\xda\x51\x4d\x9b\x36\x96\x77\x64\x87\x78\xdd\x12\x8f\xba\xed\xdf\x85\x66\xbb\xc3\x52\x8f\x57\xae\x89\xd8\x4f\x39\xc4\xeb\xd6\xaf\xec\xa3\x74\x25\x14\x78\xbc\x62\x81\x47\x63\x4f\x6d\x31\x37\xbd\xfe\xd8\xae\x55\x1c\xb1\x02\xcc\xaa\xea\x2b\x9f\x89\x12\x34\x92\xbb\x90\x0f\x72\x86\x1b\x66\xf5\x81\x15\x38\x5b\xa1\x35\xe9\x07\xb2\x20\x28\xe4\xc9\x65\x1d\x2e\x87\x82\xe0\xa7\x98\xbf\x2c\xf8\xea\xa4\x8f\x26\xf2\x99\x6b\xb5\x47\x90\x88\x4a\x52\xc8\xe4\xa1\x12\x31\x22\xad\xb3\x13\x3f\xb2\x09\x25\x02\x8b\x68\x02\xd5\x9d\xf9\xc6\x98\x2a\x61\x03\xe8\x64\x72\x39\xfc\x68\xd7\x1a\x9b\xde\x80\xee\x65\x0f\x53\x86\xcf\x67\xf7\x5c\xcf\x64\x6a\x5e\xc9\x94\x19\xab\x65\xf8\x2e\xb9\x46\xdb\xbf\x6d\x21\x42\x46\xec\xbd\x16\x23\x64\xc9\x54\xde\x1b\x0d\x0b\x12\x72\x6e\x08\x45\x09\x7b\x2a\x4a\xa8\x20\xf1\x7a\x85\x09\x1b\xb9\xfc\x0e\x9f\x33\xed\xbe\x7c\x88\xbc\xe9\x55\x49\x6b\xe9\x70\xb0\xf7\xa3\x57\xb9\xe6\xa6\x27\xf0\x97\x8c\x29\x8c\x46\x2c\x34\x9f\x0d\x49\x1c\x83\xa4\x55\xdc\x76\x68\xcf\x79\xc7\xb9\x07\xf4\xdd\x8b\xa5\x66\x76\x9c\x70\x36\x96\x34\x36\x60\x33\x33\x0c\xbd\x8a\x7d\xe7\x05\x80\x2b\xc0\xfe\x26\x09\x11\x2e\x2a\x21\xd0\x9f\x24\x65\x16\x4d\x32\xfb\x5b\xcc\x89\x64\xef\x95\x71\x16\x60\x36\x47\x4f\x8c\xbf\x24\x24\x1e\xc3\x0e\x95\x27\x73\x8a\x28\x39\x41\x54\x65\xaf\x09\x40\x63\xe0\xa9\x7a\xd4\x73\x87\x5c\x3b\x63\x01\x10\xfb\xae\xb0\xdd\x2b\x3c\x09\x2c\xbf\x39\x43\xa8\xc7\xd0\x08\x47\xea\x04\xc9\x74\x98\x8f\x1f\x73\xd3\x5c\x5e\x5b\xdf\xde\xc2\xf3\x41\x42\xce\x7c\xc5\xc7\xab\xcf\x86\x93\x0e\x9a\x5d\x3b\x09\xc5\x5b\xe5\x16\x3e\xe3\x6d\x20\x56\x3f\xa7\xd2\x26\x61\x20\xce\xb2\xa3\x6f\xe1\xa5\x32\x8c\x6c\xc0\x3b\x35\x78\xd3\x8c\xc7\xb5\xbe\xce\xd2\x52\xd6\x9d\x4b\x9e\x08\x6a\x15\x25\x1b\xa8\x82\x71\x0d\xb9\xb5\xd6\x24\x95\x20\x78\x6a\x83\x03\xfa\xaa\x01\xb5\xc6\xa4\x81\xea\xd9\x53\x61\x34\xcc\x75\xb6\xf8\x8a\xb2\x27\xbd\xbb\x39\x2a\x38\x07\xbc\x64\xfd\xe5\xaa\x4d\x9b\xe9\x1b\x8f\x5c\x70\x66\x02\x84\x5b\xe9\x9d\x74\xcc\x70\xb2\xa6\x8f\x63\x81\x72\x8b\x31\x3d\xa7\x67\x59\x75\x41\x6b\x11\xc6\xd9\x87\xcc\x17\xd7\xf2\x21\x95\xd6\xeb\xeb\x7b\x18\xc5\x64\x46\x58\x4c\x58\x34\x07\x16\x61\x80\x1c\x24\x18\x4e\x10\x86\xf7\x70\x72\x86\x2e\x4d\x7d\x51\xa6\xe1\xd9\x6b\x1d\x2e\xf4\x29\x66\x74\xa4\xed\x04\x70\xc2\xda\x59\x3e\x32\x33\x4d\x17\x03\x21\xb9\x77\x35\xa3\x58\xd5\xce\xe8\x1b\xe4\x7a\x4b\x54\x66\x56\x7c\x1f\x2d\xbf\x70\xe0\x6b\xcb\x76\x47\x0f\xe7\x7a\x30\xc8\x74\x78\x0a\xff\x5d\x28\xb8\x73\x40\x45\x39\x8a\x0e\x49\x08\xb8\x03\x6d\xc4\x0b\x2e\xc6\x3a\x60\xbd\x5d\xc4\xed\x56\xd4\xb1\x78\xdf\x28\x18\x35\x53\xca\xe8\x34\x9d\x7a\xc1\x3b\xd3\xb1\x21\xb2\xfe\x4b\x53\x89\x32\xd3\x76\x40\xe4\xc0\xdb\x91\xbe\x5c\xd9\x1c\x8d\xe9\x33\x61\x8f\x6c\xc6\x29\x53\x67\xe8\x9a\x2b\xe2\xb5\xc8\x30\xd0\x59\x7c\xa6\xe8\xd4\xa0\xbd\x0a\xa2\xcf\x81\x01\x05\x07\xa0\xcd\x09\x56\x27\x28\x4e\xe1\xa8\x32\xa2\xb4\xe8\xd0\x37\xae\x82\x9d\x81\xfc\x70\xf1\xc8\xcc\x4d\x37\xc2\x34\x49\x05\xb1\x3a\x2b\x36\x75\x41\xf9\x94\xf3\x99\x59\x24\x38\x6f\x11\x53\x3a\x9e\x28\xbd\x45\x5a\xc7\xb3\xf1\xc6\x89\x96\x46\xfc\x91\x0d\x09\xc2\x68\xc6\x25\x55\xf4\x39\x8b\x5f\xd2\x11\xc2\x52\x82\x07\xe5\x0c\x5d\x16\xfc\xff\x54\x82\xe9\x5d\x97\x57\x4c\xd9\xc0\xfa\x9e\xeb\xeb\x91\xb6\xde\xc8\xc2\x57\x2c\x95\xf1\x50\xf2\x24\x55\x7e\x08\xb6\x7a\x6f\x73\xd7\xb8\x6b\x5c\x00\x0e\x62\x3e\x7a\x64\x8e\xaf\xe5\x19\xea\x48\x24\xb9\xde\x25\x69\xb6\x32\x12\x54\x11\x41\x0d\x8a\x15\x51\x66\x13\xb2\x73\x9a\x9d\x81\x29\x16\x4f\x5a\x85\xf2\x3d\xf0\x06\x53\xb5\xe0\xed\x18\x1a\x0d\x09\x60\xbd\xfc\xed\x00\xd7\x3f\x62\x9c\x9d\x32\x32\xc6\xab\x76\xe4\x91\x15\xb6\x04\xfd\x89\x8e\x72\x83\xb4\x2e\xe6\xe8\xd1\x6e\x00\x99\x4f\x75\xbb\x64\x3e\x5c\xb7\x49\xa3\x84\xe3\x15\x61\xe3\x51\x7e\xe8\xd1\x3f\xf9\xd0\xcc\x51\xdb\xfd\x5c\x81\x16\xa8\xcd\xab\x11\x17\x64\x82\x59\x7c\xe2\x36\xab\x38\x37\xb8\x19\xad\xab\xcd\x19\x63\xa0\x09\xfe\x7f\xec\xbd\x6b\x73\xe3\x48\x72\x2e\xfc\xdd\xbf\xa2\x3c\x7e\x23\xba\xfb\x98\xa2\xa6\x67\x8f\x1d\xe3\x76\x4c\xc4\xab\x96\xd4\x33\xdc\x51\xab\xb5\xba\xcc\xac\xcf\x72\x83\x5d\x04\x8a\x64\xad\xc0\x2a\x0c\x0a\x90\x9a\x6b\xef\x7f\x3f\x51\x99\x59\x17\x80\x00\x09\x88\xea\x9e\xb1\xcf\x7e\xb0\x77\x5a\x04\xaa\x0a\x75\xc9\xca\xcb\x93\x4f\x3a\x12\x65\x81\x5c\x54\x5c\x45\x6b\x41\x86\x1b\x2d\x45\x58\x87\x41\x77\x85\x6f\x0d\x6a\xbf\xa0\x03\x02\x45\xde\x26\xa7\x23\x6e\xe4\x3a\xcf\x42\x4e\x57\xe4\x1b\x5d\x58\x15\xcb\xc9\x48\xfd\x00\xae\x2b\x67\xb5\xc1\xad\x4e\x2b\x67\xf7\x59\xcb\xc8\xbd\x20\x85\x5b\xc3\xf9\xbc\xb0\x0c\x68\x24\xc2\x5e\x1a\x61\xff\x59\x8a\x60\xf6\xa1\xb2\x3e\x55\x4e\x05\x79\x05\x52\x86\x9a\x8d\x9c\x67\x56\x85\x46\x9a\x5b\x9a\x3f\x96\x60\x90\xbb\x76\x4e\xe8\x30\xb8\x47\x5b\x2f\xaa\x52\x5a\x35\xfb\xad\x04\x86\xae\xb3\x03\x69\xf7\xa5\x4a\x45\x67\x31\xab\x5e\x52\xa3\xeb\x6e\x41\x81\x3a\xdb\x5f\x7f\xa2\xb5\x81\xb9\x54\xe9\xac\xd4\x58\xc9\xc5\xb7\x76\x60\xb4\xb2\xdd\xf1\x51\x70\x65\xec\xb3\x4f\x8a\x15\x39\x20\x09\x38\x53\x0b\x91\x89\x07\x1e\x6e\x5f\xa0\x0f\xf5\x83\x07\xdf\x48\x2f\xdf\xc3\x07\xdb\xd8\x03\xcf\x28\x29\x86\xe2\xf0\xa6\x7b\x33\x4c\xce\x0e\xc2\xa7\x52\x2b\x6d\x6b\xd5\xad\xbe\xb8\xbe\x7f\x14\x9b\xf6\x89\xdd\x43\x90\xb8\x6b\xf5\xfd\x9c\x0d\xf0\x83\x5f\x85\x77\x76\x7a\x2b\x56\x52\xdd\xa3\xbc\x08\x2b\x03\x0a\x64\xc9\x97\xa4\x87\xba\x4f\x1b\x31\xf1\x29\x11\x79\xc9\x64\xf9\xc2\x3e\x71\x2f\x36\x47\x28\xff\x73\x2e\x8b\xf1\x54\x9d\xc9\x05\x78\x46\xcb\xd0\x96\x61\x8a\x97\x95\x15\x19\x1b\xb4\xc0\x53\xff\x0c\x4d\x88\x61\x2f\xa3\x00\x48\x6a\xba\xae\x8d\xdd\x5b\x7f\x47\xc2\xbb\x5c\x87\xc9\xd8\x47\x31\x59\xd0\xfd\x60\x25\x0e\x40\x86\xd4\x12\x66\x43\x11\x0b\x52\x63\x0b\x1f\xd9\x5b\x11\x66\xc3\x5e\x29\x86\x6e\x65\x15\x78\x4b\x95\x6e\xbc\xd2\x76\x05\x74\xee\xe3\x1f\x6b\x1b\xf0\x57\x48\x97\xbb\xda\xaa\xb5\x0e\xff\x69\xaa\xc5\x42\x7e\x02\xff\x85\xd3\x19\x9c\x8d\x99\x14\xda\xd8\xbd\x01\x5a\x29\x73\x47\x09\x21\x03\x87\xa4\xce\xb5\xbe\x69\xed\xe9\xc1\xa4\x0f\x9d\xb3\xfd\x87\x4a\x14\x07\xcd\xb7\x17\x1c\x43\x80\xa7\x91\xcc\x6a\xf7\x06\xb8\x46\x4b\xde\x13\x7d\x16\xb7\x7a\xcb\x3b\xa6\x6e\x3f\xd1\x7d\xe7\x3d\x64\x7f\x1c\x3c\x90\xf8\x7e\xde\x23\x8f\x8c\x67\x76\x75\xf7\x06\x95\x9f\xb2\x17\xc6\x88\xa8\xf4\x39\x41\xe4\x7c\x52\x3b\x77\xe8\x20\x0c\x81\x61\x75\xb2\x92\x6a\x6a\x44\x3e\x19\x6a\x4c\xaa\xe5\x54\xb9\xb9\x35\x23\x86\x09\x01\xfe\x60\x63\x5b\xb5\x2a\x06\x3c\x7a\xd5\x6f\xec\x7e\xce\x73\x44\x50\x28\x61\x8c\x55\x81\x4c\x59\x70\xa9\x28\x5a\xe7\xe6\xc7\x4c\xad\x34\x69\x64\x24\x8c\xc0\x63\x34\x72\x92\x72\x14\x89\xd5\xa9\xc2\xdc\x22\xf6\x0d\x7b\x59\xf2\x25\xa2\x7b\x80\xa7\x94\x67\xc0\x70\x0a\xf6\x20\xf9\x5f\xa2\x34\x10\x7f\x22\x65\xfa\xea\xcd\xae\x3e\xd1\x5b\xf4\x12\x9a\x81\x43\x6e\xe7\x30\x4c\x90\x5c\x84\x7f\x88\xf4\xd5\xae\x96\xc2\x4b\xf7\x62\x33\x6a\x4e\x72\xf7\x2d\x7e\xcb\x0f\xc2\xe2\x7e\xae\x6b\x1c\x06\xdd\x3f\x1c\xcd\xe7\x22\xfb\x29\x7c\x28\xdb\x29\x8a\xde\x4a\xc5\x0f\x93\x41\xad\xc3\xeb\x97\x6b\x30\xdf\x74\x55\x28\x6c\x11\x3d\x4f\xe6\xd6\x39\x41\xab\x45\x30\xdb\x1d\xd9\x66\xae\x7e\x25\x07\x7c\xeb\x4a\x64\x79\xa4\x26\xd8\xfd\xe2\x69\x82\xb1\xb2\x93\x35\x5d\xd7\x95\x42\xca\x67\xc4\xf7\x3c\xd2\x49\x27\x91\x11\x1a\x1f\x4f\xd5\xc4\x5e\xd0\xa6\x2c\xb4\x5a\x66\x1b\xc6\xd3\x07\x69\x42\xc9\x42\x7b\x20\xab\xb5\x28\xa8\x0b\x69\xd0\xbe\xa2\x72\x5f\xdc\x5d\x6c\x76\x6c\xf6\xea\x03\x35\xd4\x95\xd5\xb4\x7f\x44\x0b\xd2\x8e\xd2\x38\x7c\x5c\x4b\x82\x03\x2d\x6e\x43\x76\x7e\x61\x27\xf5\x4f\xb1\x1f\x9a\xad\x83\xcb\xda\xc9\xcb\xe3\xa6\xc3\x9a\x66\x7d\x87\xb3\x7a\xf0\x85\xd0\xf7\x22\x70\xf5\x31\x2a\x4c\x68\xb3\xfd\xb8\x10\x26\x0e\x6e\x90\x31\xdd\xf8\x40\x1a\x35\xea\xbd\xb1\xb3\x5a\x48\xf0\x74\x99\x92\x97\x32\xa1\x5b\x40\x17\xe4\xaf\x27\x0f\x4a\xf7\xd2\x1e\x6a\x7d\x9a\x84\x67\xdb\x2b\xbc\x03\x3d\x81\xcf\xef\x76\x69\xd3\x71\xc3\xb6\x77\x52\xf8\x24\x3a\xcb\x86\x14\x24\x6c\x7c\xf9\x69\x78\x7d\xf7\x88\x42\x3f\x76\x01\xdc\x5a\xc0\xa9\x41\x57\x14\xcf\x28\x30\x68\x4a\x5a\xa5\xf8\x21\xbc\xd4\x36\xe4\x28\x98\x2a\xbd\x80\x92\x95\x59\x57\x8e\x42\x5e\xe8\xb5\x1c\x52\x33\x05\x61\xfb\xd7\x0e\xe5\xb1\x27\x66\xe6\xb0\x20\xe0\x68\xc5\xed\x45\x3d\x02\xfb\x06\x27\xe7\xe9\x8e\x33\xb4\xe6\xf9\x93\x26\x7c\x1f\xc6\xe9\x84\xad\x11\x60\x46\xb3\x07\xec\xe9\x02\xd2\x90\x61\x92\x1f\xf9\x26\x10\x1d\x75\x55\xc3\x50\x83\xb6\xc3\x9d\x7d\x7c\xa2\x16\x7a\xc0\xe1\x0c\xc4\x44\x74\xfa\xb8\xdb\xb3\xd1\xf9\xf3\x98\x1b\x5c\x7d\x9c\xd3\x3e\xe7\xf1\xb4\x6d\x53\x0f\x3e\x99\x6e\x06\x3f\x67\xc4\x3d\x16\x22\xd1\x3b\x7f\x1b\x72\xb7\xd6\x8f\x56\xd4\x22\x83\xe1\xec\x9e\xaa\xf7\xb5\x7d\xf8\xec\x73\xd4\x68\x07\x7e\x0b\xa9\x81\x57\xed\xad\x7e\x81\x39\xa3\x43\xd2\x6b\xb2\x0e\x64\x62\x1b\x56\xd5\xc3\xf5\xe8\x6b\x78\x1c\x6c\xc9\xed\x9b\x0c\x90\x66\x86\xac\x86\x90\x67\x44\x84\x0f\x0b\x99\x09\x33\x66\x93\x96\x70\xbd\xa3\x5b\xf0\xe9\x01\x98\xf8\xe9\xb4\xa7\xaa\x90\x51\x99\x7f\xa7\x23\x31\x09\xe5\x06\x63\xc8\x52\x14\x9e\x82\x40\xf9\x4a\x3f\x62\xae\x65\x21\xad\xcc\x42\x65\xb5\x84\xe0\xa5\x95\x05\x92\x62\x7f\x18\x3a\xf5\x2f\x68\xcc\x80\xb1\x66\x8e\x0f\x7b\xc6\x1e\x88\xe6\x92\x3e\x47\xc1\xd6\xfe\x6c\x16\xae\xd7\x5b\xfb\x46\x1f\xa3\xc0\x3d\x7b\xc0\xe8\xbc\x96\x3f\xdc\xa5\xfb\x0e\x5e\x75\x2e\x7c\xce\x16\x85\x00\x2b\x7b\xed\x19\xf2\xb0\x44\x86\xd6\x70\xdf\xdd\x9c\xfd\x78\x7c\x37\x61\xa2\x4c\x58\x26\xef\xc5\x54\x25\xe6\x01\x8c\xbe\x5f\x2a\x51\xda\x3f\x77\x38\x81\xe4\x5a\x28\x03\x92\x40\x96\x3d\xed\x35\x37\x31\xf6\x7f\xcf\xea\xef\xf7\xb1\xca\x3d\xab\xab\xdd\xbb\xae\x7a\x25\x6c\x53\x28\xd0\x87\x53\xdb\xe2\x65\x7e\x8b\xee\xf3\xf3\xb6\xda\xf6\x4f\x48\x7e\x57\x7f\xa9\xd4\x40\xa5\xeb\x34\xbc\x14\x8d\xa2\x43\xa7\x5b\xe7\x1c\x2a\xd7\x0c\xcb\xaa\xc7\x77\x5a\x5b\xdf\x27\x44\x02\xc9\x91\x43\x4a\x08\xdf\x0c\x2b\x0b\x21\x40\x84\xf8\xfd\x44\x77\x3d\xf1\xea\xf9\x0f\x8b\x5e\x1a\x4f\xd5\x7b\x87\x9f\x0c\x7f\x35\x21\xaa\xb4\x9e\x47\x05\x7d\xea\xad\x40\xb3\xa9\x34\xfe\x0f\x50\x9e\xd1\x54\x59\x89\xf5\xa9\x17\x52\xf1\xcc\x0f\x14\x7f\x69\x93\x12\x05\x57\xc9\xea\x50\x40\x84\x5c\xcc\x44\x36\x44\x13\x9d\x2c\xce\x33\x63\xf7\x77\x72\xdf\x71\x3a\x9f\x52\x81\x3d\x7c\x0c\xc6\x16\xa9\x8a\x2b\x0b\x80\x0a\x9e\x61\x7d\x68\xc1\x00\x71\xd7\xe4\x42\x40\xba\x37\xbb\x8a\xa4\xa9\x23\xe0\x0e\x93\x90\x7d\x82\x21\xf4\xc2\x78\x39\x55\x45\xa5\xc0\x0b\xee\xf1\xb7\x9c\x85\xea\x3f\x89\x43\xc3\x10\x36\x69\x69\xc5\x04\x16\xd7\xc1\x87\xad\x7d\xa6\x2b\x03\x91\xc7\xb5\x28\xed\x05\xf5\x52\x8c\x97\x63\x02\xc0\x8f\x58\x5e\xc8\x35\x80\x07\x7c\xec\x20\x5e\xba\x53\x5e\xf2\x4c\x2f\x9f\xdb\xab\xf4\xc4\x64\x2a\x37\x0c\x36\x39\xb3\x93\xbf\x14\x4a\x14\xf0\xa1\xe0\xcb\x6e\x3d\xc2\x3d\xbc\xdc\x1d\x92\x1b\x62\xc6\x14\xe6\x37\xde\x63\xc1\xab\x52\xaf\xad\x7d\xcb\xb3\x6c\x33\x42\x7c\x81\x60\x2b\x6e\x56\x6e\xa1\x31\x34\xdc\xe7\x6e\xa2\xc9\x3d\xe5\xc9\x4a\xdc\x94\xbc\xac\x5a\x31\x78\x8d\x51\x7e\x25\x54\xb5\xfe\xea\x0d\xfb\x53\xf8\xc6\xd3\x93\xd3\x1f\xce\x67\x67\x93\x9b\x93\xb7\x17\xe7\x67\xd1\xf7\xd0\x2f\xef\x27\x37\x37\xdb\x7f\xfd\x61\x72\xbb\xfd\xc7\xab\x0f\x57\x77\x17\x27\xb7\x6d\xad\x5c\x7c\xf8\xf0\xe3\xdd\xd5\xec\xdd\xc9\xe4\xe2\xee\xfa\xbc\xe5\xd5\xbb\xdb\xee\x1f\x6f\x7e\x9c\x5c\x5d\xb5\xb5\x7a\xfe\xd3\xe4\xd4\x76\x47\x7f\xff\x73\x74\xec\x00\x24\x61\x67\xa0\xe3\xfb\x9a\x27\xf3\x88\xd5\x1f\x7c\xc3\xee\x9a\x15\xce\x28\xe5\x0e\xe9\xe2\x1e\xb9\xb1\xc2\x0d\x32\x3e\xc1\x05\x1b\x66\xab\xeb\x55\x44\xa5\x27\x2b\xc1\x32\xad\xef\xab\x9c\x64\x1e\x7a\xdb\x95\x46\x8f\x90\x30\x51\x6b\x3f\x4c\x6e\xdf\x6c\x57\x5a\xf3\x8d\x45\xc4\xb8\xde\xb9\xfc\xc8\x91\x24\x02\xe4\x2c\x06\x17\xa9\x02\x57\x00\x29\x44\x3d\xf8\x25\xdb\xd5\x0f\xb6\xc6\x55\xd9\xe8\x26\x4d\x03\x9d\x16\x7c\x58\xd4\x70\x7d\xc1\x77\xcd\xa6\x9f\x0e\x2c\x31\xcb\xe6\x22\xe1\x15\x62\xf7\xed\x05\x56\x14\xba\x88\x07\x1c\x36\xca\xf3\x35\x4a\x1b\xac\xb5\xc1\xc6\x9a\xd9\x0f\x37\xf7\x32\xcf\x6b\xcb\x4e\x1b\x71\xff\xca\x43\x51\xbf\x07\x99\x94\x22\xfd\x6a\x5b\x2f\x0a\x6c\x0b\xa8\x37\xdb\x53\x6d\x87\x1c\x9d\x75\xa9\x96\xe8\x4b\x70\xe5\x15\x57\x1b\x8f\x37\x03\x78\x73\x00\x7c\x43\xb9\x17\x7b\xd7\xf8\xf2\x77\x12\x00\x64\xbc\x64\x8f\x02\x88\x87\x2a\xaa\x2f\x8b\x36\xbd\x95\x19\xd0\x1d\x22\x3f\x5c\xb5\xe8\x1a\x21\x51\xa7\x90\x7f\x0e\x45\xde\xbe\x6f\xc4\xb0\x20\xde\x5e\xf6\x98\x33\x6c\x14\xa4\xbe\xcb\x0c\x81\x11\x3f\x67\xd0\xaf\xe5\xa6\xdb\x73\x09\xd9\xeb\xa0\xcf\x78\x1c\x63\x5f\xad\x0c\xce\x80\x10\x7c\x5c\x2a\x65\xef\x5c\xdd\xea\x94\x6f\xec\xe6\x00\x10\x89\xa9\xf2\x5c\x17\x25\xeb\x68\x03\xb1\x0a\x38\x3e\xb8\xcb\xe8\x3b\xbc\x88\x84\x46\xac\xe6\x62\x5a\x2a\xee\xf5\x23\x11\xa3\x79\x8d\x62\x67\x51\x1a\x19\x18\x98\xbe\x3a\xea\xba\x66\xaa\xd7\x76\x68\x9b\x52\x7d\x48\x1e\x6e\x6e\x15\x87\xbe\xc5\xba\xdb\x7a\xff\xe0\x5a\x68\x5d\xf2\x4c\x2c\xca\xd9\xc0\x60\x17\xb4\xa8\xba\x78\x1f\xe5\x72\xf5\x0c\x2d\xf6\xb7\x3e\xbe\x21\xf8\xbb\x35\x39\x22\xcf\x43\xa1\x75\x89\x7a\x6f\xb0\x8d\x98\x9b\x4d\x70\x5b\x50\xa7\xc4\x98\xe0\x95\x4b\x6b\x4b\x20\x6a\xd0\x93\x0b\x8c\xa7\xea\x1c\x60\xc6\xc1\xc0\x71\x44\x0a\x60\x5d\xec\xb5\x2b\x1c\x81\x19\x14\xdd\xf9\xa2\x39\x4d\xdd\x75\x20\xc2\xbe\x47\x70\xaa\xc8\x36\x9e\x0d\x2c\x65\xb5\xf7\xfa\x9c\x1e\xf4\xa6\x3b\xd5\x12\x3f\x18\x8f\x8e\x29\x45\x4e\x1e\x7f\xfc\xce\x80\x87\x87\x68\xb3\xed\x6a\xcc\x7e\x76\x1e\x25\x48\x0f\xf3\xe9\x52\x0e\xe1\x9c\xf1\x8d\xa3\x8e\x6f\x9b\xd8\xe7\x60\x63\x7f\xee\x84\xb1\xdd\x13\xec\x69\x57\x5b\x66\xb9\x66\xd8\x2b\x85\x9e\xde\x01\xa0\xb0\x53\xff\xd2\x8d\xd8\x8d\x9d\x7d\x07\xc5\xfa\x29\xff\x00\x74\x16\x95\x6d\xfe\x11\x17\x0b\xf9\x5a\x1c\x48\x83\x8a\xa7\x53\x64\xd6\x9e\x1f\x88\x2c\x22\x9d\x0b\x5b\xc8\x2c\x03\x3d\x60\xcc\x4e\xd4\xc6\xd1\x9d\xd8\xab\xd0\xc1\x90\xe5\x52\xe9\x7d\x4c\x0c\x1d\x9b\x29\x89\x36\xd3\x4d\xf7\x66\x42\xfc\x47\x60\xbb\x7a\x9e\x1d\xf5\x0c\xcc\x87\x56\xb6\xf0\xed\xba\x39\xfd\xf9\x0e\x07\x38\x05\xe2\xdb\xfc\x4b\xe5\x10\x6e\x0d\x37\x7a\xf1\x6f\xed\x43\xff\xbe\xe2\x05\x57\x25\x64\xc6\x91\xd2\x5a\x88\x28\x41\x5f\x7c\x02\x14\xb3\x42\x07\x33\xfc\x29\x5e\x5c\x07\x25\x40\xf8\x99\x4c\x47\x4c\x8e\xc5\x18\x6a\x38\x17\x56\x97\x98\x87\x27\x57\x56\x73\x98\xaa\xad\x8c\x9f\x31\x3b\xc9\x8c\xa6\x37\x84\x4a\x32\x6d\x00\xc4\x3d\x8f\xe9\xf5\x61\xe7\x53\xb8\x6a\xbe\x01\xfb\x06\x96\x32\x34\xaf\xe9\x87\xe8\x45\x28\x45\x0c\xb1\xf6\x0c\x4e\x7a\xf8\xfb\x3f\x6b\x22\x0a\xee\xc2\x5f\x7c\xc6\xa2\x6f\x5b\xd7\xd0\x67\x5b\x24\x2c\x28\xbe\x6b\x81\xe0\x09\x58\x98\x90\x89\x15\xf1\x14\xb2\x97\xbc\x64\x99\xe0\xa6\x64\xaf\x5f\x0d\xc2\x9c\xb8\x0f\x0c\xd2\x95\x8e\x6f\xa0\x53\x70\x09\xb9\xb1\x72\xe7\x3b\x86\x0a\xd3\xbc\x28\x19\x67\x4a\x3c\xc6\xf9\x57\x1a\x52\xe6\x5c\xd9\x68\x11\x31\xc0\x60\xd6\x05\xf2\x57\x41\x4e\x33\x9a\x4c\x1d\x72\xc4\x15\x45\xa1\xb0\x2c\x0d\xab\x65\x67\x8d\x3c\xaa\x0d\x12\x16\xec\x43\x21\x35\x76\xc5\xcb\xa9\x22\xc9\xea\xe0\x28\x11\x19\xc2\x49\x96\xd5\xd3\x51\x39\x64\x5c\x2b\xfb\xc1\x76\xf4\xe9\xd8\x4f\xd0\x25\x98\x5f\x3e\x27\xb0\xe6\xff\x0b\x87\x05\xb3\x56\x3c\x2b\x68\xdc\x76\xab\xb6\xd3\xe6\xb7\xfe\x82\x4a\x70\x4b\xf7\x17\x7a\x29\x13\x9e\xf5\x50\x86\x45\xdb\x90\xf7\x1c\xac\xed\x58\xc1\x0e\xdd\xf8\xb9\x3b\xe8\xaf\x2a\xb7\xfb\xdd\xe1\x9a\x7d\xd4\x2d\x6e\xfc\x8e\xc5\x8d\x74\x8b\x43\x0c\x70\x9f\x9c\xfa\xa5\x22\xc9\xb5\xa1\x4f\x52\xa0\xc6\xd8\x2f\x05\x03\xd5\x84\x13\x1d\x98\xa1\x98\x46\x99\xef\x51\xa2\x2d\x81\x48\x51\xf0\xd1\x93\x1d\x11\xdd\xfc\xbf\xf7\xe7\x8f\xc2\xf7\xbb\x4f\xf1\xe0\xba\xed\x0f\xef\x56\xf6\x4e\xd2\xbf\xf0\x04\xf2\x61\xa1\x27\x97\x89\xbb\x4d\x5b\xea\x8a\xdd\x70\x08\x12\xb4\xaa\x87\x79\xa1\x13\x61\xcc\x98\x9d\xc3\x45\x43\xff\x64\x7c\xe1\x02\x1d\xd1\xc3\x53\x65\x2d\x13\xc7\x72\x18\xb5\x5f\xdf\xe2\x6d\x27\x00\x29\x93\x0f\x8a\x11\xad\xf7\x57\x32\xec\xb2\x26\x1c\x63\x33\xb4\x01\xc5\xcf\xd8\xf9\xf2\x0d\x4b\x75\x72\x2f\x8a\xe3\x42\xa4\xd2\xbc\x81\x98\x7d\xd9\x19\x2c\x5c\x5b\x6b\xfb\x60\x4d\xa3\x0b\x80\xb0\x87\x3a\xe2\x14\xfb\xa7\x94\x02\x97\x84\x36\x62\x72\x01\xe6\x84\xcb\x5c\xc6\x54\x3d\x47\x0a\x29\x54\x59\x6c\x10\xed\xec\x5c\x59\x8d\x89\x70\x96\x86\x55\xda\xba\x72\xee\x8b\xe7\xc0\xf6\x3c\xf1\xb3\x6f\x57\xc2\x08\x07\x64\xc0\x8f\x2a\x35\x65\xfc\xa1\xb8\xc8\x79\xb9\x32\x40\xf0\x52\x9f\x03\x32\xba\xe0\x55\x3b\x43\x3c\x07\x1c\x04\x7a\x29\xc2\x4b\x9e\x86\xc4\x94\x32\xcb\xa6\x0a\x13\x37\x80\x8b\xe5\x45\x2b\x8f\x94\x7d\x75\xc4\x78\x9a\xb2\xff\xef\xe5\xbb\x8b\xff\xb8\x3d\x9f\x4d\x2e\xc1\xe7\x3d\xb9\x38\x7f\x35\xf2\x7f\xfc\x70\x77\xeb\xff\x8a\x1e\x96\x07\x51\xb0\x35\xbf\x07\x13\x4f\x19\x41\x29\xc6\x62\xaa\xe2\x91\x3a\x86\x2d\xfb\x8b\x11\x0e\x41\x4b\x6a\x8a\x27\x1a\xa7\x35\xec\xa2\xe7\x25\xe2\xd5\x01\xc6\xef\xb5\x7f\x65\xf7\x1e\x74\x9b\xc7\x77\xe1\xd4\x40\xc8\x24\xe7\x26\xa2\x5c\x22\xdb\x37\x6c\x38\xa1\x96\x52\x75\xe1\xfc\x84\x7a\xf8\x9c\x4a\xfc\x8f\x62\x03\x40\xf3\x2b\x2e\x8b\xde\x7b\xaf\x9d\x33\xd3\x9d\x18\x6b\xa7\x73\xd3\x3c\x54\x06\x75\x61\xcc\x49\xef\xc4\x92\xb6\xd1\x25\xff\xea\x9f\x4b\x24\xac\xe2\x53\x59\x38\x2e\x37\x9f\xf5\xec\x08\x4f\xfd\x45\x13\xf6\xe0\x54\xdd\x7e\x38\xfb\xf0\x86\x89\x8c\xcf\x35\x24\xbc\x12\xd4\xc8\x35\x41\x13\x96\xe8\x75\xd4\x50\x8d\xc7\x6f\xc4\xf2\xc0\xe3\x17\x3b\xd1\xc6\xd8\xc6\x1e\x3e\xbf\x5c\x17\xdb\x2c\x78\xcf\x6b\x02\xd2\xc7\x5e\xe9\xa2\xcf\xf5\x6f\x1f\xc3\xbc\x90\xdc\x1a\x72\x0d\xc9\x4b\x77\xf3\x42\x70\xe0\x78\xa1\xb0\x10\xf9\xf2\x09\x18\x9b\x65\xb5\xaa\xeb\xf6\xe0\x98\x31\x85\xf6\xc3\x93\x5a\xb1\x1f\xbf\x35\x6c\x5e\x95\x53\x55\x6f\x43\x2b\x76\xf2\xf3\x0d\x7b\xcb\xcb\x64\xf5\x6a\xaa\x20\x4b\xf4\xc7\x6f\x3b\x08\x47\x07\x73\x78\xdb\x39\x39\xe3\x25\xbf\xd0\x3c\x95\x6a\xd9\x46\xe0\x1d\xaa\x4c\x9e\xdf\x9e\xbc\x61\xae\xd8\x4f\xc8\x97\x2e\x1d\x71\x4e\xd4\x10\x08\x64\xf8\x10\x27\x45\x28\x67\xb0\x46\x72\x8c\x96\x19\x5c\x58\x53\x75\x8b\xcc\xe5\x56\xaa\xca\x92\xe5\x9a\x2a\x9d\x5a\xab\x0c\x39\xdd\xb9\xe3\x11\x10\xd9\x86\xd9\xd9\x81\x6d\xec\x17\x83\xf4\x31\xd0\x67\xb6\x85\xfd\x54\x81\x81\xee\x33\xb8\x33\x9d\xf0\x0c\xb0\x7e\x47\x91\x4f\xcf\x9a\xed\xba\x02\x16\x25\x00\xd9\xa8\x4d\x1d\x92\xeb\x89\xbd\xbc\x52\x16\x2f\x14\x38\x00\x60\x1d\x29\x0e\xb9\xd6\x56\xe2\x20\x63\x31\x38\xdf\x32\x9c\x1d\xfb\xa2\x67\x30\xc6\x69\xb1\xbf\x7a\x72\x03\x5d\x29\xc7\xd8\x97\x80\xfb\x5e\x6d\x00\x16\x0e\xa5\x09\x35\x40\x4a\x82\x74\xa6\x4d\xb9\xb5\x8a\xfe\x4e\x8c\x5e\x9b\x2a\x44\x20\xd6\xd6\x25\xe6\xb8\x8c\x7a\xd7\x0a\x00\x92\xdb\x8c\x0a\x55\x4e\x80\x49\xd2\xf5\xf3\x42\x1c\x79\x9e\x80\xb4\x36\xa7\xf6\x86\x1d\xb3\xeb\xd8\xbc\x4e\x75\x52\xad\x5d\xfd\x11\xe0\x18\x20\x64\x1d\x5d\xa2\x7e\x87\xe0\xc5\xbe\x6f\xc7\x03\x97\x61\x29\x80\x64\xa9\xb7\x7d\x8c\x1b\xe6\x24\x7e\x75\x5b\x53\xef\x56\x7c\x41\x76\x1c\x86\x86\xc3\x86\x66\x79\xbd\xa5\x5a\x6b\x07\xb3\x77\x5c\x86\x1a\x09\xba\x00\x65\x4b\x7c\xca\x35\x38\xb9\x91\xc4\x40\xa7\x2f\x0c\x9b\x5c\x59\x0d\xc8\x5a\xbc\xfe\x0c\x56\xa6\x44\xd0\x1a\x66\xa3\xc3\xdb\x98\x86\x30\x62\x5f\xb3\x69\xf5\xf5\xd7\xbf\x4b\xd8\x27\xf7\x1f\xff\xfa\x2f\xff\xf2\xbb\x7f\x1d\x92\xa6\xe2\x0c\x72\x68\x37\xcc\x91\x2f\x3a\x5b\x57\x89\xe2\x15\xd8\x96\x54\x07\xac\x02\x1d\xc0\x03\xd9\x05\xba\x30\x49\x7c\x49\x27\xdc\xc4\x27\x93\xd5\x8e\x66\x40\x12\x40\x4e\x75\x4d\x42\x78\x65\x97\x34\xfa\x7f\xdc\x41\xe9\x3b\xb3\x47\xe5\x69\xd8\x29\x99\x79\xf5\xda\x36\xc2\x5e\x92\xff\xaf\x84\x00\xe2\x2b\x77\xc1\xe9\x2c\x15\x05\x8e\xc9\xbb\xec\xbc\x23\x11\x84\x83\xf8\x94\x67\x3a\x75\x45\x04\x02\x63\x86\x04\x05\xe1\xfc\x13\xb7\x92\x7b\x44\x64\xb3\x94\xb7\x0a\x91\x97\x05\x4f\x04\xe5\x58\xbf\xfc\xf4\xc6\xfe\x6d\xc4\x36\x6f\x00\x9c\x3a\x62\x7f\x7d\x43\x9c\x92\xbc\x28\x67\xf6\x4f\xaf\x9c\xae\x4d\x4d\xc0\xa0\xa5\x61\x2f\x8e\x1f\x78\x71\x0c\xe2\xf9\x18\x47\xf4\x22\x24\xa9\x63\xf5\xec\x58\x37\xcf\xb4\xbe\x27\xe0\xee\xd6\x8b\xc7\x8e\x9e\x18\xb6\xb7\x8f\x9b\xe0\xd2\x7b\xfa\xaa\x92\x1d\xc1\x03\x82\x8d\xf3\x39\x1b\xff\xc5\x68\xc5\xc6\x1b\xbe\xce\xe8\xaf\xee\x57\xc2\x15\x73\x43\xb9\x76\xa9\xc7\x08\x65\x1b\xf4\x94\xbe\xcd\xf4\x1c\xbe\xea\xbd\xfb\x52\x44\xe6\xc2\x40\xc3\xed\x13\x2e\x2c\xfa\x10\x47\xd7\x02\x2c\x9b\x6b\x5d\xe2\x23\x94\x36\xbb\xfd\x55\x9f\xfc\x90\xfe\x88\x71\x61\x98\x14\x97\x1c\x88\xce\x61\x8f\x8a\xb3\x8d\x7e\x62\x2f\x49\x04\xbd\xb2\x77\x0c\xc1\xa0\x71\x1a\xda\x3a\xd8\xf8\x0e\xfe\x23\xea\x40\x2a\x86\xe9\x9e\x3b\xde\xfc\xeb\xf1\x78\x3c\xf6\x6f\x03\xb7\xd3\xff\x61\xb2\x34\x22\x5b\x60\x4b\xee\x06\xdb\x4c\xd5\x7b\x57\x9e\xcc\x39\xaf\x03\xf1\x79\x5e\xe8\x52\x27\x3a\x63\x47\xc1\xa1\x9b\xea\xc4\xb0\x7f\xb2\x6a\x6d\x34\x95\xf0\x47\x6b\xc7\xb5\x9f\x29\xaa\x87\xf2\x85\x0e\x15\x39\xc4\x9b\xc7\x2a\xe6\x3a\xf6\x86\x2d\x37\x71\x92\x33\xec\x05\xbb\x73\x8e\x89\x0f\xb9\x28\xec\xc3\xe2\x53\x09\x3f\x75\xd0\x4d\xb7\x42\xe4\xdb\x6f\xca\x2d\x71\x1b\x58\xa7\x71\x5b\x77\x4c\x00\xb1\xc2\x92\x64\xc0\xef\x1c\xc5\xe1\x13\x7b\xb9\xa8\xb8\x60\x96\xa9\xd6\x6b\x5e\x6c\x8e\xc3\x69\xdb\xde\x9c\x81\x8f\x18\x64\x4c\xe6\x26\x00\x42\xb8\x19\x1d\x2d\x42\x31\x90\x7a\xe9\x6e\x34\x7f\x76\x13\xa8\x78\x1e\xf1\x7a\x09\x95\xe8\x94\xf6\x75\xc8\x6a\xad\x6b\x2c\xfe\x99\x6d\x5d\xc5\x21\x62\x4c\x70\xc6\xa9\x12\x89\xee\xe8\x09\xf7\x72\x87\xf8\xd6\x33\x53\x5a\x41\xb9\x1c\x10\x1e\x9d\x7c\xb8\x71\xef\xf4\xbf\x74\x61\x1e\xea\x2a\x3b\xcf\x62\x16\x69\xb5\x64\x05\x7f\x0c\xd7\x2f\x60\x3b\xd0\x3b\x53\xf9\x9c\x5f\xfc\xf7\xa9\xbe\x92\x99\xbd\xb5\x60\x8f\x8f\xa7\xaa\xf6\xe7\x11\x13\x99\x5c\x4b\xe5\xb1\x75\x28\xdc\xf5\x02\xb5\xe7\x7b\x59\xda\x25\x33\xe9\xbd\x95\x60\x8e\xfd\x34\x32\xa9\x4e\xd4\xc6\x6d\x1d\x1f\x98\x22\x0f\x44\x65\xec\xb8\x82\x8d\x0e\x6c\x00\x32\x15\x47\xa4\x90\xca\x68\xe3\xc1\xf9\x9d\x2a\xdb\x9a\x3b\x4b\x01\x86\x1c\xb5\x17\x35\x77\xe4\xca\x46\x45\x12\x00\xfa\xa8\x61\x89\xbd\xfe\xdb\xa2\xa0\x9c\xab\x6a\x7d\x68\x12\x0b\xc1\x92\x7f\x2d\x37\xdd\x55\x21\xdc\x4d\x45\x09\x51\x42\x55\x6b\x77\xa0\x06\xec\xb8\x73\x52\x7f\x52\x91\x64\x1c\xf9\x1c\x6d\x43\x80\x7c\x1c\x61\x80\x34\x8f\xfa\xc2\xeb\x05\xbb\xc1\x4a\x94\x99\x50\x2f\xf1\xdf\xaf\x18\xdd\x0d\x5f\x8f\xe8\x3e\x2f\x8c\xe7\xc9\xc3\x35\x87\x4a\xee\x22\x45\x1f\x3a\xd4\xee\x58\xf2\x22\x45\x6f\x79\x6c\x55\x60\x66\xb0\xd5\xbf\x36\xba\x62\x8f\xd2\xac\xa6\xea\x56\x3b\x87\x23\x53\xda\x57\x3f\x19\x81\x31\xba\xd5\x1f\x37\x20\x04\x60\xd4\x6d\x3b\xc0\x0a\xe1\x83\x72\x98\x00\x44\x3b\x53\x3a\x15\x87\xd1\x7c\xde\x86\x58\x85\x8b\x5f\x17\x02\xf3\xcc\xe0\xa6\xe8\x4a\xd3\x15\xc6\x0c\xf4\xcd\x37\x17\x1e\xee\x21\x6a\xc7\xf6\xaa\x1f\x07\xd5\xa0\x89\x19\x74\xfd\xad\x06\xad\x38\x8b\x33\xca\x32\xae\xcd\xbd\xaf\x29\x72\xe8\x22\x24\x2d\x9c\x9e\xbd\xee\x7e\xfc\xf6\x04\xa6\xdd\x03\x8c\x39\x5b\x16\xba\xca\x7d\x2a\xbe\x4b\x23\xc4\x65\x20\x9d\x66\xa2\x16\xfa\x0d\xd9\x54\x17\x52\xdd\xe3\x8e\xff\x5c\x6b\x84\x65\x63\x44\x5a\x23\x3b\xa6\x3b\x0c\x67\xfc\x88\x49\x95\x64\x15\x5c\x7c\xa6\xe4\xc9\x3d\x96\xbe\xe9\x72\xfa\xda\x77\x66\xfb\x93\x34\x3b\x34\xa6\x2a\xcb\xa8\xdb\x70\x81\x02\x9d\x20\xb8\x80\x1e\x24\x67\x9c\xdd\x5d\x4f\xda\xfb\xbe\x97\xdb\xc1\x9c\xf6\xdb\xb3\xbe\x41\xe0\xff\xfd\x28\x07\xe1\x2e\x1b\xe4\xd1\xa2\xb6\xd5\xbd\x73\xa9\xab\x34\x01\x6e\xd2\xd2\x1a\x10\xe9\x75\x8b\x6b\x7f\xf0\x3e\x5d\xe6\xd5\xcc\x4e\x54\x36\x04\x20\x60\x47\xf1\xfd\xd5\xdd\x49\xf4\xde\xae\xad\xf2\xfd\xd5\x1d\x8b\xfa\x40\x5a\xf0\x4c\x24\xa5\x47\x1a\x8f\xd9\x69\xa8\xd6\xd1\xd4\xcc\x53\xf1\x20\x13\x4c\x9d\x1d\x59\xad\x68\xaa\x80\x04\xdf\xda\x3a\x47\x8e\x39\x95\x7d\x7f\x75\x47\x7c\xab\x81\x37\x07\x0b\x8f\x00\x35\xc6\xb0\x6b\xa7\x41\x3f\xaf\xb4\x3a\x42\xca\xa0\x22\x0d\xd1\x8e\x11\x18\xd7\x09\xcf\xcb\x8a\x14\x8c\x87\xd7\x63\xb7\x26\xd7\x21\x12\x62\x87\xa5\xa7\xca\xea\x4a\x98\x63\x00\x35\xf2\xec\x47\x6f\x2f\x6d\x63\x52\x0f\x01\x07\xc0\xa4\x1d\x24\xfc\xa5\xcf\x1c\xe4\x6a\xc3\x78\x31\x97\x65\x61\xcd\x30\x7c\x79\x84\x0c\x67\x2b\x57\x07\x0d\xd7\x2d\x68\x46\x54\xd6\x10\x16\x58\xaa\xd2\x4c\x55\x94\x00\xe3\xb3\x8d\x31\x79\x41\x2a\x06\xa4\xd1\x80\xbd\x71\x24\xb6\x49\xa6\xab\xd4\x5d\xab\x85\x2f\x93\xb8\xc9\x51\x89\x9a\x2a\x60\x3c\xb1\x77\xab\xb6\x6a\x68\xb8\xfb\xdf\xb0\x8f\xea\x41\xa6\x92\x1f\x95\xc2\x64\xfc\xa8\xfc\xdf\x1f\x47\x8d\x3f\xf1\xd7\x5f\x7f\xfd\x11\x2b\x3e\x76\xd1\x39\x44\xac\x4d\x07\x3a\x78\xda\xe3\x14\x9e\xe9\xd2\xee\xd2\x03\xd6\xe9\x42\xde\x0b\xf6\x11\x97\xfb\x23\xd1\x5c\x3f\x6d\xd9\xa6\xaa\x6d\xdd\xd8\x53\x96\x0d\x8a\x0e\xb4\xaf\x1b\xdb\xb1\x6c\xaf\x97\xe3\x7f\x59\xce\xed\x6a\x7d\xb3\x1c\xbf\xfe\x1a\xfe\xb3\xb1\x46\xfb\x0e\xaf\xcf\x9e\x69\x1b\x76\x8b\x20\x6a\x39\x96\x5e\x16\x4d\xd5\x7e\x61\xc4\x86\xc9\x22\xd8\xb5\x6d\x07\x9f\x97\xe2\xd0\xac\x59\x64\x3f\x1f\x80\xbe\xde\xa2\x95\xdf\x19\x11\x3c\x90\x93\x3d\xf0\xa9\x03\xdc\xb3\x9b\x1c\x3e\x06\xe0\xc2\x8f\x03\x78\x7e\xe0\xf9\x7e\xdf\xd3\x78\x76\xcf\xe7\xec\x1e\x66\x26\xc4\x00\x66\x9a\x1b\xfb\x78\xcf\x41\xd6\x1e\xdd\x35\xc6\x47\x8e\x75\x27\xb7\xcb\x1d\xa5\x64\xad\x0f\x39\x45\x6e\x3b\xa2\xcb\xc4\xf8\xb4\x41\x3f\x12\x07\xad\xf4\xf6\xb5\xeb\x77\x49\x67\x29\x2e\x6b\xe9\xa3\x6e\x2d\x1b\x3f\x72\x45\x1c\x08\x85\xb3\x26\xf5\x6c\xdd\x9b\x4a\x3f\x74\x7c\x46\x2f\xbf\xdf\x22\xd6\xf7\xea\xe5\x7b\xc8\xf8\xf6\x24\x5b\x6b\xae\xac\xb6\xe6\x7a\xed\x08\x2c\xa1\x95\xff\xa4\x21\xdd\xe5\x4f\x1a\x10\xf6\xd8\x2f\x59\xcb\x75\xe5\x5a\x79\xc4\xd8\x2a\xcf\x30\x76\x50\xae\xc0\xad\x1c\x2a\x25\x3b\x31\x17\xdc\xcb\x58\x55\x39\xe3\xc5\x12\x9d\x5e\x46\x94\xe6\x55\xcb\x0a\x87\x3c\xb6\x03\x56\xd8\xa9\x5d\xb3\x61\xfc\x21\x4e\x1f\x03\x97\xca\xae\x93\xe6\x47\x59\x2f\xab\xe2\x2d\x2d\xd7\x7f\x5c\x33\x20\x24\xd7\x25\xba\xc0\x1a\x64\xc0\xf3\xda\xcd\xaf\x75\x20\xcd\xec\x25\x5f\x7b\xf6\x18\x6a\xcd\xa5\xfc\xe2\xe0\xe6\x02\x2a\x02\x75\x8f\xa1\x17\x87\x6c\xdf\x21\x10\xd3\x6d\xd7\x08\xa6\xea\xc4\x3d\x12\x78\xcd\x8d\x44\x2f\x0b\xa6\x23\x56\x73\xcc\x70\x01\x9f\x19\x0f\xb3\x4e\x1f\xd7\xf1\x11\x43\x13\xfd\x1b\x9f\x70\x67\x44\x11\x6e\xa3\xc0\x86\x1a\x7f\x47\x47\xcf\xfd\xb8\xa9\x77\x4a\x74\xf7\x89\xd4\x94\x9b\xcb\x3d\x1d\xeb\x62\x1f\xb3\xe4\xae\x4e\x1d\xe2\xc2\x9e\xe2\x25\xf0\x9a\x45\x05\xbb\xbb\x7a\xef\x6f\x26\xd1\x67\x40\xba\x13\x61\x7a\x31\x2b\x21\xdb\x84\x43\x12\xc8\xf9\x1b\x9d\x6d\xcb\x8a\xf2\xa0\xbb\x40\xf2\xf5\xac\xd0\xdd\x65\xc4\x7b\xcc\x97\x6b\xa2\x16\x31\x58\x61\x59\xd1\x0d\xfb\xa5\xe2\x19\x5e\xad\x8a\x0e\x83\x1b\x36\x38\x5f\xbe\xf9\x57\x76\x02\x77\x1f\x7b\x0f\x52\x19\x20\x63\xd0\x5a\xa9\x99\x5c\xe7\xa2\x30\x5a\xf1\xce\x7a\xfa\xf7\xdf\x9a\x19\xd5\x04\xb6\x86\xb9\xae\xb6\xeb\xff\x0e\xf8\x92\x96\xd6\xe2\x8f\xe2\xec\xbe\x9a\x8b\x42\x89\x12\x90\x88\xf0\x1c\x73\xcf\xf5\x1a\xae\xe6\x55\xb9\xfa\x66\x96\x64\xb2\x77\xa1\x62\xc8\x57\x3d\xb1\xaf\x9d\xe2\x5b\xbb\x3e\xa0\xd6\x7e\x6d\xe8\x8a\xe1\x6f\x0c\x7f\x1b\xb3\xb7\x3c\xb9\x17\x2a\x65\x79\x56\x2d\x25\xd1\xde\xa0\xb1\x21\xeb\x6e\x85\xfa\x87\xa1\x66\x83\xed\xdb\x4b\x70\xaa\xd6\xfc\x1e\x8b\x07\x91\x0a\x6b\xed\x96\x2e\xd2\x44\xef\xa8\x99\xc9\xed\xbd\xbb\x77\xb5\xfc\x6d\xbc\xdd\x4c\x73\xef\x99\x0a\xb3\xf5\x1e\x57\x9a\x30\x4e\x35\x3f\xd1\x80\x83\xeb\x77\xeb\x16\x3b\x99\x63\x90\x31\x22\xa9\x0a\xfb\x04\x0d\x06\x4f\x2f\x04\x10\xa1\x00\x56\xa5\x18\x07\x82\xb3\x17\x86\x55\xb9\x13\x22\x10\xd9\xca\x00\x67\x84\x4b\x60\x7f\xc8\x65\x72\x8f\xc8\x56\xc8\xdd\x60\xfe\xf3\xb6\x8a\x8c\x33\x11\x20\x96\x6d\xa2\x61\x81\xf4\x3e\x87\xa1\x66\xb6\xea\x67\xed\xd9\xa7\x3d\xf3\x52\xca\x95\x50\xb3\x27\x94\x71\xea\xbf\x68\xb5\x1c\x14\x52\xc2\x7d\x84\xd0\x4f\x61\xa5\x24\x91\x79\x07\x0b\xdf\xd7\x28\x91\x8b\x86\x12\x2f\x0d\x33\xbc\x94\xc6\xca\xb2\xd6\x19\x0f\xa4\x4a\x87\xcc\x3a\x1f\xc6\xe4\xd4\xc2\xe2\xd4\x98\x0b\x9f\xe7\x36\x66\xef\x20\xae\x12\xd9\x25\xda\x73\x22\x75\x09\xac\x72\x25\x3a\xc9\x81\x9f\x03\x20\xea\xbe\x20\x7a\x7e\x67\xb8\xcc\xe7\x34\x8e\xd9\x49\x88\x67\x23\x2b\x14\x46\xaa\xf7\x7c\x91\xc8\x8c\x78\xca\xe6\xeb\x15\xfa\x01\xcc\x17\x6c\x20\x26\xb1\x14\x09\x53\x11\x4b\xbc\x1f\xe6\x23\xd0\x06\xf0\x7b\xa1\x76\xf9\xf7\xfb\x8f\x10\x03\x30\x3b\x1d\x12\x3e\xb2\xa3\x31\xb8\xf3\x94\x01\xf6\x3f\x76\x81\x88\x4b\x2e\x8e\xed\x94\x5b\x23\x28\xb9\xa7\x64\x45\x8c\xef\x11\x95\xd7\xe3\x4a\x9b\xf8\x9c\xb9\xf5\x43\x3b\xba\xa8\x7c\x75\x36\x48\xf6\xf4\x13\x8c\x28\x4f\xa5\x63\xa6\x2f\x18\xb5\x3f\xa4\xe8\x54\xf2\xeb\xcd\x9c\x08\x85\x69\x00\x5c\x84\x6b\xaa\xe5\x34\xab\xbc\x7a\xae\xea\x3f\xfb\xe9\xb5\xb7\x67\x78\x6b\x40\x3f\x7e\x6b\x3e\x40\x7f\xcf\x41\x46\x83\x5e\xc6\xe7\x4f\x04\x7b\x62\x08\xdc\x43\x9c\x9d\xf7\x53\x43\x92\x08\x5d\x94\xb9\x4e\x59\xd8\xef\x5d\x99\x36\x4a\x69\x84\xb8\xfe\x06\x3f\x2b\x1a\x5c\xef\x6f\xdb\x77\xd4\xde\x47\x38\x39\x36\xaf\x64\x96\x22\x4b\x61\xa4\xa1\x6a\xa7\x02\x41\x21\x2c\xd0\x47\xa4\xf1\x17\x5c\xcb\xa6\xff\xf1\x5b\x73\xa5\xd3\x43\x36\xd6\x70\x26\xda\xed\x7d\xdd\x23\x8d\xc6\xc4\x58\xa6\xf5\xfe\x99\xc8\x75\x77\x02\x44\x3a\x33\xf5\xca\xcf\x3b\x06\x0c\x88\xb7\x79\xb5\xb8\x81\x32\xb3\x5d\xa4\x4c\x51\x05\x46\x97\x65\x6d\xd7\xd9\x76\xe3\x73\xfe\xba\x16\x85\x00\x54\x41\x1f\xe1\xec\xf7\x37\x1f\x2e\x8f\xd6\xbc\x30\x2b\x0e\xa4\x17\xae\xad\x91\xab\xdc\x8f\xde\x02\x07\xec\x90\x6a\xaa\x8e\xd8\x52\x8f\x10\x46\xf4\x86\xad\xca\x32\x37\x6f\x8e\x8f\x97\xb2\x5c\x55\xf3\x71\xa2\xd7\xc7\x61\x6a\x8e\x79\x2e\x8f\xe7\x99\x9e\x1f\x17\x02\x12\x49\x8e\x5e\x8f\xbf\x79\x0d\x2b\x73\xfc\xf0\xfa\x18\xc0\x23\xe3\xa5\xfe\xa7\x8b\x6f\xfe\xed\x77\xff\x6a\x1b\xce\x37\xe5\x4a\xab\x37\x84\x51\xda\xd9\xf6\x11\x9a\x09\xc7\xf8\x4a\xa3\x97\x7f\x1b\x7f\x1d\x0f\x83\x1e\x5d\xeb\x54\x64\xe6\xf8\xe1\xf5\xcc\x2d\xcc\x38\xef\xa8\x98\xf1\xf7\xd4\x8b\x2f\x90\x7a\x71\x2f\xcb\xbf\xa7\x5e\xfc\xaa\xa9\x17\xfd\x55\x2e\x2f\x63\x80\x23\x3b\xc8\x47\xfb\x77\x2f\x23\x5d\x24\x62\x9f\x1c\x6a\xb9\x1c\xe2\xc4\xb8\x03\xae\x88\xfd\xa5\x0a\x77\x5d\x00\xde\x96\xe9\xf0\x38\x0e\xad\x53\xd3\x69\x5d\x0c\xe2\x01\x01\xa0\xa3\x4c\xc0\x57\x18\x95\xae\xdb\x9e\xc4\xa8\x6e\xcf\x01\x53\x88\x75\x44\xda\x49\xcf\x7a\xd6\x53\x14\x0e\x00\x58\xab\x49\x82\xd0\x5d\x70\x03\xd1\x51\x0b\xf5\xfe\x48\x9d\xe8\xb0\xe8\xe5\x5a\x0c\x1f\x4f\x08\xfe\xa7\xbc\x14\x47\xb6\x91\x1d\x03\x06\xae\xdf\xae\x71\xd6\xca\xf1\x35\x6b\x40\xc6\x03\xc5\x62\x5b\x22\x9d\x3d\x4b\xcd\xb1\xd6\x3e\x10\x0e\x3b\xb8\xfd\x2d\x93\xa3\x87\x55\x40\x00\xf1\x83\x36\xd3\x67\xac\x0e\xf3\xdc\x65\x61\xe8\x73\x9f\x58\x12\x26\xc3\xb7\x1d\x9c\x5d\x3f\xba\x52\x30\xcf\x51\x40\x25\x40\xf5\xfb\x15\x4f\xc1\xf3\x00\x63\x71\xe3\xea\x18\xc6\x8a\x9b\xa7\xe5\x45\x9c\x20\xfb\xb2\x0f\x83\x23\xa8\x5c\x1a\xd7\xa1\x53\x44\x1c\xf1\x94\x3d\x52\x8e\xdf\x32\xaf\x8a\x5c\x1b\x61\xc6\xec\x9d\x2e\x90\xd1\x8c\xe8\x86\x42\xae\xc7\xf5\xbb\x53\xf6\xfa\xdb\x7f\xfb\xdd\x54\xbd\x6c\xd1\x03\x41\x7f\xd0\xc5\x92\x52\x4f\x40\xfb\x5b\x73\x53\x8a\xe2\xb8\x58\x24\xc7\x78\x6b\x1e\xdb\xf7\x8f\xa8\xd3\x23\xbd\x38\xf2\xd5\x21\x8e\x88\x28\x7f\xbc\x4e\x5f\x75\x81\x32\xdb\x6d\x8d\x5f\xcd\xde\x3b\xe9\xb0\x49\xda\xd6\x77\xff\x9d\x52\x3b\x42\xa8\x83\x91\x02\x66\x40\x59\x43\x16\x4a\xbd\xf0\xf5\x8c\x30\xc5\x19\x4b\x9f\xe9\x45\xcb\x7f\xbc\xcd\xf4\xdc\xbc\xf2\xdc\xb7\xdc\xb8\x3e\x02\x19\x65\xdb\x95\xb5\x75\xe6\x0e\x71\x3c\xd0\x54\x7c\x4e\x8f\xa2\x93\x89\xf1\xb2\x0d\x99\xf8\x76\xa1\x11\xd4\x60\xa4\xe2\xe2\x85\xae\x94\x2b\x18\xa2\x95\xd0\x0b\x40\x78\x81\x85\xe8\x00\xaa\x10\x54\x01\xd8\xa3\xa7\xdd\x2a\x44\x8e\x8a\x17\x84\xff\xba\xa7\xfb\xc0\xa2\x39\xfb\xe6\xf9\x73\x14\xcd\x39\x74\xde\x49\x30\xfe\x4a\x13\x7e\x68\x16\x09\x1e\xa5\x21\xe0\x2b\xfb\xfc\x5e\xa0\x85\x97\x03\xa1\x3c\x7b\xa8\x4f\x91\xf3\x02\x8c\x17\x71\x54\xea\x23\xe0\x2b\x04\x16\x3c\x2c\x63\xd5\x85\xbe\x02\x80\xca\x90\xeb\xde\x3e\xdf\x63\x9c\x68\xb0\x7e\x8a\x06\x4a\xba\xba\x41\xf2\x77\x42\xe3\x4b\xa5\x44\x41\xc1\xef\xbd\x9a\xc1\x40\xf8\x4a\xbc\x94\xbb\x06\x1b\x7b\x68\xe2\x12\x43\x3e\x15\x93\x47\x42\x60\xcc\xc0\x2a\x5b\xe9\xb5\xb6\x6a\xbe\xae\x4c\xf4\x23\x5a\xf5\xa0\x4c\x74\xda\x24\x6b\x9e\xa3\x6a\xfc\xeb\x7d\x8d\x3d\x5a\xf6\x27\xf4\xbe\xc7\x0f\x0d\xaa\xda\x36\xaf\xd7\xa9\xda\x33\x7e\x5f\x60\x68\xf7\xbe\x01\x70\xd4\x1a\xa2\x9d\x50\xc9\x9b\xca\x86\xc8\xbf\x5a\x7b\xdf\x6e\x29\x6f\x41\x7b\x0d\x04\xb1\x7c\x48\xc3\x1d\x23\x57\xdd\xad\xdb\x49\x94\x53\xad\x07\xae\x81\xcf\x2f\xeb\xb3\x00\x5c\x61\xc6\x95\x4b\xb5\x3a\x6a\xcd\xb5\xea\x3a\x97\xe0\x52\xac\xac\x65\xe2\xa8\xe2\x87\x0d\xf5\xc6\x37\x40\xac\xf0\xdb\xe3\x0e\x4c\x9b\x90\x98\x87\x73\x8c\x02\xc1\xe9\x16\x5d\xf8\xee\xe1\x87\x11\x6a\xf6\x0d\x99\x3b\xe8\x04\x37\xe7\xd6\x0c\x46\x67\xa1\x6b\x02\x87\xb9\x9e\x77\x79\x72\xdb\xa0\xfd\x48\x4e\x1c\x12\xb7\xed\x28\xb7\x9c\x2a\xfe\xc5\x87\x50\x17\x1a\x90\xcf\xf3\x0a\x7e\xbf\xfc\x70\x1b\x83\xba\x24\x7e\xed\x51\xb2\x12\xc9\x3d\x38\x12\xf1\xca\xc3\xc3\x40\x3c\x04\x80\x34\x0f\xd5\x64\x4b\xed\x30\x42\x1b\x5f\x60\xc7\x17\x99\xd2\x05\x4b\xa5\xc9\x33\xbe\x01\x34\x86\xc2\x14\xcd\x80\xe4\xf0\xb9\xcd\x56\x14\xec\x8b\xa3\xf4\x5f\x69\xbb\x2a\x27\xe1\xbd\xa1\x73\x19\x30\xf7\x61\x32\xb7\xe5\x01\x33\x62\xcd\x55\x29\x93\xa9\x5a\x0b\xae\x62\xf0\x2e\xa1\x51\xec\x24\xa7\x5a\x50\xa9\x88\xc5\x42\x24\x65\xe0\x9a\x06\x23\xc4\xcf\xd4\xbe\x33\x38\xec\xdb\xfd\xc9\xdb\xf9\xe9\x3f\xb8\x8a\xd7\x72\x0d\xd0\x70\xda\x43\x74\x35\x3e\x31\xca\x0a\xd5\x87\xe9\xca\x75\x46\x2d\xfc\xcb\xed\x29\x36\x17\xe5\xa3\x00\x2a\x25\xe2\x7e\x68\xd3\xf1\x0f\xae\x40\x75\x48\xde\xe4\x89\xa7\x5e\x24\x66\xfd\x2d\xee\x64\x3a\x60\x31\xe6\xd4\x73\x3e\xaa\x06\x79\xe3\x0b\x62\xa3\x00\x2f\xe8\x0b\xf2\xe7\xbe\x80\x6b\xda\x5a\xc1\xc5\x83\x48\xa7\xaa\xce\xa8\x49\x3a\x63\x38\x70\x2c\xd4\x56\x7d\x1e\x69\xe3\xe6\xb8\x57\x8c\xeb\x1c\x58\xc4\x02\x7f\xb8\xe7\x5b\xd8\x51\xeb\x15\x3f\xfa\x73\x5a\x55\xae\xcc\x74\x5f\x63\x38\x94\x5f\xa5\xda\x89\x54\x6a\xb9\x06\x7c\xf2\x9b\xd2\xf3\x05\x22\x99\xb0\xc7\xc9\x93\xbf\x7e\x2b\x02\xd0\xd6\xc6\x54\x39\x22\x9d\x45\x95\x21\x41\x7c\x57\xba\x12\xd1\x87\xba\xa4\xdf\x5f\x2f\xf9\xdb\xfb\x9b\x59\x54\xae\xd6\xe3\x93\xa2\x9c\x05\x94\x75\x6e\xd7\x0b\x65\x2a\x50\x29\x5c\xa5\x4a\x08\xc8\x2c\x45\x09\xb7\x79\x5a\x65\x88\x55\x85\x48\x12\x50\x91\xf2\x2c\x63\xb2\x34\x53\xe5\x99\x53\x31\x27\x09\x24\xac\x0b\x35\xa5\x64\x72\x41\x17\xd0\x2c\xfc\xcc\x15\xe8\x61\x32\x91\xe5\x56\xa6\xc7\x26\xae\xee\x96\xe7\x82\x23\x8d\x01\x2e\xdb\x54\xc5\x36\x57\x73\x11\x28\xe7\x9f\x67\x92\x9b\xe7\x48\xbf\xdf\xe1\xb8\xb5\x5d\x3c\x09\x60\x84\x5f\x67\x0d\x2e\x57\xb8\x1d\x47\x4b\xd4\x49\x04\xc8\xb6\x56\x4d\x69\x5c\xec\x28\xd8\xad\x90\xce\x94\x54\x19\x2f\x30\x8f\x6b\x51\x65\x4c\x2e\xa2\x1a\xf4\xb0\x06\xc8\x9b\x69\x97\x2b\xd1\x70\x57\xbb\xe8\x91\xe1\x6b\x11\x51\xf6\x90\x7b\x27\x8b\xc0\x4e\x58\x0c\x04\x51\x34\xb6\xad\x57\x63\x76\x16\x98\x81\x71\x85\xe1\x4c\x44\x7c\xdb\xd2\xa0\xf8\xf3\xe3\x8d\xd8\x26\xe0\xeb\xec\x10\xb5\xb2\x27\xd2\x9f\xba\x8e\x15\x84\xba\x3d\xc3\x90\x54\xae\x6a\xd3\xee\xe4\x82\x56\xb6\x19\xfb\x6a\x03\x5f\xe5\x0f\x44\xc7\x00\xdd\xad\x30\x70\x90\x31\x57\xf9\x13\x06\xea\xb9\xe0\x5b\x06\xbb\xde\x51\xf2\x1e\xd6\x71\xe0\x50\xa3\x02\x92\xc3\x07\x1a\xed\x9c\x18\x37\xd7\x67\x66\x97\xbc\x1c\x0a\xa2\xf3\x39\x7b\xc3\x07\xda\x0a\x58\xec\x33\x4c\x90\x1e\x03\xc7\x79\x62\xdf\x79\xe2\x40\x4d\x35\x3f\x42\x01\xed\x4b\x41\x81\xa8\x10\x3c\x59\xd5\xe9\x33\x1c\xc9\xb5\xff\x02\x48\x9f\x84\xf3\x38\x9c\xf9\xe3\x24\xec\x39\xa8\xa1\xc9\xec\xf0\xc7\xec\x83\x12\x08\x71\xd5\x8b\xe8\x52\xa1\x01\x50\xb1\x4d\xa8\x33\xe4\xa5\xdc\xdc\x0e\x4c\xdd\x3b\x56\x31\x7b\xe4\x46\x8c\x87\xd6\x41\xea\xe1\xb6\x41\x29\xd2\xa1\x4b\xb6\x55\xe5\x3a\x40\xbd\xec\xc7\xcd\xd1\x6e\xf3\x47\x48\xf1\xe1\x12\xa0\xed\x3b\xfa\x2f\xcb\xce\x94\x0f\x6f\xc5\xb9\x3c\x8f\xfa\xbe\x61\x88\xfb\xde\x37\xbf\x57\xab\x3a\x5c\x78\x40\x6d\xcc\xbb\xcb\xb3\xf3\x77\x93\xcb\x7a\xe9\xc9\x3f\xdc\x9d\xdf\xd5\xff\x72\x7d\x77\x79\x39\xb9\xfc\x3e\xfe\xd3\xcd\xdd\xe9\xe9\xf9\xf9\x59\xfd\xb9\x77\x27\x93\x8b\xc6\x73\xf6\x4f\xf5\x87\x4e\xde\x7e\xb8\x6e\x94\xd0\x6c\xa9\x7f\x79\x3b\x79\x7f\x7e\x36\xfb\x70\x57\xab\xc2\x79\xf6\x1f\x97\x27\xef\x27\xa7\xb3\x96\xf1\x5c\x9f\x9f\x7e\xf8\xe9\xfc\x7a\x4f\xad\xcc\xf0\xbd\xad\x53\xfa\x1c\xb0\xca\x27\x97\x54\x3d\x61\x8b\x42\x0a\x95\x66\x1b\x4c\x92\x71\x96\x6d\x03\xf5\xde\x08\xb8\xeb\xea\x90\x5c\x97\xdb\x95\x60\xfa\x41\x14\x40\x80\x86\xad\x11\x5b\x4a\x20\x5b\x68\xf6\x5a\x88\xb2\xd8\x8e\x0a\xec\x4c\x28\x2c\x8b\x8d\x4f\x59\xdd\x35\x9c\x40\x9e\x49\x9d\xb0\x5c\x14\xbb\xc6\x02\x9a\x51\x51\xe5\xa5\x9c\x77\x67\x2f\x0d\xe6\x1c\xe8\x6b\x7b\x23\xd5\x73\x3b\x2f\xde\x65\xbb\x60\xac\x25\xf1\x1c\x92\x21\x00\x2d\x3c\xb5\x52\xb0\x7f\xdb\xa1\xaa\xf3\x6a\x9e\xc9\x84\xc9\xb4\xe9\x4f\x21\x2a\x10\x70\x19\x37\x19\xe1\x73\x51\x80\xaa\x6a\x2d\x80\xbc\x10\x47\xbc\x2a\x57\xc8\x5e\x4a\x39\x43\x54\xbf\x67\xaa\x8c\x48\x0a\x81\xb1\x00\x61\xc0\x49\x8b\x95\x60\xa3\x9e\x60\x30\x44\xde\x93\x02\x4f\xe0\x38\xaa\xce\xd3\x11\x23\xc0\x37\xb1\xf5\x01\x4e\x52\x7c\x7e\xe7\xd4\xd0\x88\x25\xd6\x9a\x8d\x10\x71\x70\xc3\xe3\x8f\xae\x9e\xac\xfd\x6e\x2b\xa9\x7d\x3d\x55\x5c\x64\x97\x64\xd5\xfe\x19\xfb\xf6\x58\xbc\x51\xea\x59\x47\xd4\x3a\xfd\x74\x5a\x08\xb8\x44\x08\xd2\xe0\xfc\x17\x00\xe9\xa2\xa4\x2c\xc8\xc5\xb2\xa6\xda\x5c\xac\x78\xb6\x40\x8d\xc3\x2e\x4d\x3b\xa1\x0a\xb6\x7f\xab\xef\x85\xba\xc6\x05\xfb\x55\xc4\xa1\x42\xcb\x27\xd0\x39\x79\x8f\x50\x70\x61\xda\x31\xba\x5d\xe5\x52\x62\x41\x99\x2a\xd1\x4e\x88\x7e\xc6\xdc\xab\x50\xac\xc1\x65\xd3\x2e\x16\xf2\x93\x6d\x70\xaa\x44\x2b\x5d\x3d\xe0\xe8\x1c\xb1\xa6\x97\xcb\x80\x19\x44\x76\xc2\x7b\xa1\xa0\x94\x2c\xf0\x22\xee\xdf\xb3\xc3\xfc\xe7\xdb\x6b\xb1\xc3\xa1\x0f\x3e\x3f\x59\xab\xb0\x1b\x47\x79\xdc\x3c\x95\x98\x0c\xe7\xe9\x47\x60\xdf\x9c\x5e\x4c\xce\x2f\x6f\x67\xa7\xd7\xe7\x67\xe7\x97\xb7\x93\x93\x8b\x9b\xbe\xc7\xef\x39\x12\x18\x1b\xa7\xaf\x99\xc7\xe7\x25\xc4\x31\x9d\xbc\x90\xc5\xef\x3f\x2a\x1c\x3b\x58\x92\xfd\xa3\x97\x69\x3e\x4b\xa5\x49\xec\xf5\xb7\x99\x09\x95\x42\x9d\x8f\x27\x6d\xd5\xf6\xa6\x9a\x5f\xe1\x9f\x60\xfe\x09\x27\x41\xf0\xb6\x7b\x70\x3b\xda\xff\x0e\x68\x54\x70\x43\x16\xc2\x1e\xfe\xb4\x46\xaf\x32\xde\x5f\xdc\xcd\x36\x77\xd8\xb7\xd5\x9b\x68\x7e\x13\x8e\x57\x1a\x53\x01\x8b\x8b\x7b\x0c\xa0\xb8\x1d\xb3\x42\xe4\xcb\x71\xb1\x11\x19\x15\xe0\x67\xd2\x4c\xd5\x9a\xab\x94\x97\xba\xd8\x74\x7c\x62\x3f\xe1\x19\x1f\x9b\xba\x08\x8d\xaf\x6c\x25\x44\xea\x56\x01\x1f\xe5\xaa\xb9\x95\xb0\x24\xc9\xed\x87\x1f\xcf\x2f\x6f\x66\xe7\x97\x3f\xcd\xae\xae\xcf\xdf\x4d\xfe\xe8\x11\xc2\x39\x37\x6d\x75\xb5\xf3\x42\x58\xe9\xe2\x18\xde\x5a\xe5\x0b\x56\xab\x76\xed\x50\x85\x52\xb9\x98\x2a\x27\x59\x8a\xd0\xfc\xaa\xd0\xd5\x72\xd5\xde\x50\x73\x94\x57\x27\xb7\x3f\x3c\x69\x98\xc0\xbf\x89\x25\x6d\xf1\xb4\x6d\x23\xa5\xe5\x82\xe4\x1e\xc2\xab\x1b\xc3\x03\x16\x59\x78\xb4\x2d\xca\xd0\x21\xd1\x9e\x64\xbd\x6c\x0b\xad\x9d\xca\x7f\xcb\xe3\x5d\x1b\xe8\x36\x92\x9b\xb5\x6b\x04\x90\xfb\x58\x17\x7d\xab\xb5\x37\x2d\x7f\xab\xdd\x60\xdf\x1c\x65\x62\xb9\x14\x29\x6e\xaf\x66\xc3\xe4\x83\x23\x11\x98\x84\x7b\xbd\x6d\x16\xa9\x76\xf1\x01\x17\xb3\xc7\x7b\xf5\x17\xe0\x57\xfe\x95\x76\x59\x71\x4a\x1c\x5a\x10\xdf\x2c\xb9\xea\x08\x24\xef\x4f\x85\x6b\x6f\xfe\x43\xc1\x7c\x96\x22\x39\x4c\x5c\xc8\x20\x9c\x83\x2e\xc0\xcb\xe1\xf8\x56\x3f\x8e\x6b\x91\x67\x3c\x11\x3e\xb7\x07\xc9\x8f\xc1\xae\x7f\x4a\x00\x8f\x2a\x44\x2b\xf2\xb7\x44\x95\xa3\x43\x51\xbc\xb6\x2d\x00\x9e\xdb\x6b\x27\x8f\x3f\xbf\x6b\x65\xa7\xe1\x46\x94\xa7\xe0\x68\xc6\x12\x9d\x94\x12\x82\xbe\x28\xa8\x7b\xdb\x09\xd7\x1f\xb4\x1d\x1a\x3d\xff\x44\x0b\x8f\x36\x73\xdd\xd1\xcd\x1d\xa9\xb0\xdf\x1e\x5e\x75\xdc\xe5\x2f\x2c\xcb\x62\x27\x0f\xf9\x73\x84\x23\xae\x0a\xbd\x96\x46\x9c\x94\x65\x21\xe7\x55\x5c\x88\x79\x20\x60\xae\x66\x9c\x84\x0f\xce\x0b\x9d\x56\x89\x63\x0e\x83\xaf\x0d\xb0\x1f\xf2\xf2\x39\xad\x23\x65\x47\x76\xf7\x91\xe5\x26\xd2\x23\x48\x74\x41\x6a\xbb\xb6\x18\x9b\x13\x8c\x1d\xbe\xbf\x2b\x77\x95\x3f\x73\xba\x6c\xf7\x64\xba\x3d\xd0\x2f\x03\x9e\xb9\xc7\x41\x03\xee\x40\x4d\xd1\x76\x99\x73\x0c\xa0\xd7\x75\x94\x2e\xa2\x20\x7f\xd5\x0c\x03\x77\xf5\xc3\xc6\xd4\x33\xc9\x50\x6f\x58\x71\x83\xea\x7c\x99\xac\xea\x03\x87\xaf\xa9\x13\x26\x37\x87\xeb\xd5\xe3\xc3\xdc\x26\xbd\xc2\x68\x23\x74\x34\x48\x72\x6c\xd7\x8a\xdf\xfa\x4a\xde\x9d\xfe\x7b\x4c\xb9\x98\xfd\x52\x89\x21\x05\xad\x5d\xaa\xc6\x1f\xe0\xb5\xbd\x80\x14\x89\xd8\x2d\xef\x7b\x85\x4c\x13\x23\x78\x91\xac\xd8\x9c\x1b\x62\x62\x8c\x89\x22\xb0\xf2\x3e\x93\xf6\x2d\x9e\x94\x54\x89\xd8\x75\xeb\xaa\x11\xdf\x3a\x28\xa4\x55\x6b\x83\xd7\xa3\x6d\xbb\xed\x9b\x80\x21\xde\x6b\x37\x8c\xc9\xd9\xa0\x18\x42\xac\x87\x7b\x3b\x19\xaf\x58\xb8\x9d\x32\x5e\xa9\x64\xc5\xf2\x8c\x23\x97\xc6\x8a\x1b\x14\x14\x0e\xa1\xc3\xe7\x32\x93\x25\x50\xa4\x61\xe0\xb8\xb1\x6f\xad\xf1\xcc\x8b\x7b\x57\x69\x82\x07\x3e\xbc\x5d\xa2\xe4\x40\x24\xb4\xff\xaa\x2f\x8a\x85\x0e\x82\x30\x16\xee\xfd\x0e\x3b\xe1\xa0\xc3\x72\xd8\xeb\x0d\x0e\x7b\xf8\x96\x61\xd1\x21\x6a\xf1\xaa\xf9\x7a\x63\xbe\x29\xd1\xeb\x30\xd9\xbd\x23\x05\xec\xb3\x80\xce\x43\x42\xdd\xee\x6b\x74\xfb\x83\x5b\x94\xe0\xe1\xc0\x27\xaa\x19\xf5\xa4\xa4\x37\xac\x28\xd5\x7a\xee\x17\x99\xe6\xe5\xee\x84\x3a\x2c\x10\xd5\x99\x50\xa7\xab\x79\x57\x49\x12\x1c\x55\xaf\x74\xbd\xd6\xf7\x9d\xf8\x7f\x2e\x9f\x7b\x7c\x8f\xf2\x52\x40\x1a\xe0\x81\x59\x84\xed\x8d\x53\x02\xf7\x60\x32\x0e\xbf\x0d\x42\x99\x42\xaf\xfb\x03\x24\xb5\xe5\x38\x35\x95\xbc\x83\xd2\x3d\x0f\x5b\x2f\xa9\xf6\x6c\xa5\xfd\x95\xcf\x7e\xf7\x4d\x9f\x6c\xc4\x3f\x54\xdc\x5e\x00\x1f\x16\x37\xc8\x8d\x76\xc8\x47\x97\x72\xfb\x58\xb5\x8b\x81\x66\xaf\xb7\xf5\x28\x6d\xbc\xf1\x7b\x13\x3d\xb4\x7d\xcd\x8d\x7d\xbb\xbf\xd8\x9d\xd4\xbc\xb1\x79\x21\x35\x70\x84\xe9\x45\x4d\xd7\x68\x91\xc4\xad\xfd\x1e\x30\x93\xbf\x54\xa2\x12\x76\x03\xcd\xab\x74\xb9\x1d\x2c\x19\x60\x70\x85\x4f\x5a\xe9\x47\xb6\xae\x92\x15\x73\x8d\xb3\x54\x64\x7c\x53\x57\xa3\xac\xad\x51\x6a\x60\x8f\x1e\x44\x95\x18\x71\xfe\x27\x95\x29\xf5\x1a\x70\xea\xa1\xdd\xa2\x52\x70\xca\x19\x77\xa7\xab\xed\x42\xab\x71\x99\x3e\x31\x42\x7e\x73\x75\x7e\x3a\x79\x37\x69\x84\xa7\x4f\x6e\x7e\x8c\xff\xfd\xf3\x87\xeb\x1f\xdf\x5d\x7c\xf8\x39\xfe\xdb\xc5\xc9\xdd\xe5\xe9\x0f\xb3\xab\x8b\x93\xcb\x5a\x10\xfb\xe4\xf6\xe4\xe6\xfc\x76\x4f\x9c\x7a\xbb\xd7\xee\x85\xe0\x11\xd5\xaa\x43\xce\xbb\x3a\x42\xce\x5d\x45\xbd\xbe\x61\x27\x8e\x78\xb6\x46\x8d\xec\xb0\x06\x00\x4e\xca\x10\x63\x89\x90\x84\x33\x5e\xf2\x53\x5e\xf2\x4c\x2f\xc7\xec\x84\x51\x5e\x01\xe6\x8b\x18\xab\x12\x12\x2b\xa7\x5d\x1d\x6c\xc2\xea\x85\x49\x70\x05\x85\x42\xe9\x7a\x41\x7c\xb8\x99\x88\x4b\x6a\xb9\x24\xcf\xa9\x3a\x7f\x10\xaa\xac\x40\xd1\xe6\x59\xc6\xa8\x5b\xf7\x40\x44\x88\xe2\x46\x69\xe4\x5a\x66\xbc\x08\x35\xad\x3f\x50\x5b\x60\xec\xba\xb1\x7a\x42\xbe\x6d\xb6\x0d\xe7\x0f\xb8\x9b\x30\x18\xf7\xe9\xc5\x04\x14\xdd\xa4\x74\x05\x1b\x5d\xe7\x53\x85\x7c\xab\xd4\xe3\x9a\x43\x0e\x53\xa9\xc9\x41\x8f\xdd\xd3\xc3\xdd\x1b\xf1\x20\xc5\xca\x85\xb2\x3e\x97\x63\xc2\x0f\xd2\xfd\xc7\xb9\x2a\x8b\x4d\x6f\xed\xf5\x16\xc8\x2c\x0c\xd8\x75\x04\x89\xac\xd7\xb9\x46\xff\x29\x73\xad\x5f\x82\x4a\xeb\xf0\xba\x14\xde\xf3\x51\x3c\x84\x47\x75\x98\x44\x99\xbd\x79\x7f\xab\xf3\x10\x13\xa0\xc1\x2c\xcc\x75\xa5\x52\x43\xe0\xcd\xb5\x54\xc7\x6b\xfe\xe9\x95\xfb\x52\xe4\xef\xf1\xd5\xe6\x80\x2c\x52\x64\xd6\x1e\xdc\x58\x21\xb7\x7b\xba\xa6\x6a\xc7\x7c\xed\xb7\x09\x9c\x64\x05\x97\x41\xf0\xef\x20\x0c\xf5\x41\x6c\xda\xd6\x6f\xab\x62\x28\x8b\xcb\x5e\x40\x23\x79\x21\xec\x83\x1e\xe3\x9a\x21\x74\xd9\xff\x1b\x72\x59\x6a\x55\xcd\xdb\x65\x77\x0c\x1b\x39\xe8\xd8\xb4\x02\x56\xfa\x2b\x3e\xbd\x4b\xbe\x52\x4f\x76\xcd\x10\xbe\xe2\x22\x27\x94\xbb\x43\x71\x79\xbb\x58\x7f\xd1\x73\xb6\x80\x44\x36\xf2\x13\x14\x02\x22\x65\xb0\x14\xae\x46\x11\x50\x0a\x6e\x61\x62\xdc\x16\xc8\x84\x81\xf8\x91\xb2\x46\xb5\xf8\xa5\x22\x08\xc0\xeb\xaf\x87\xdd\xb3\x25\x16\xba\x40\x66\xf3\x66\x09\x08\x7f\x97\xc3\xb8\x2a\x25\xdb\x78\x46\xaf\x2b\x65\xaf\xe2\xe7\x40\x4f\xf5\x0f\x8f\x37\x3a\xa5\x7f\xee\xcd\x35\x73\x91\x9d\x02\x9f\xff\x6c\xa4\xd5\x3f\x35\xb8\xaa\xa9\x3b\xc8\x6c\xa0\xd6\xe3\x0b\x6d\xce\x93\xfb\x47\x5e\xa4\xe8\xfe\x07\x38\xd3\x98\xfd\xa0\x1f\xc5\x83\x28\x46\x2c\x11\x45\xc9\x89\xaa\xd1\x00\x9e\x03\x0e\x14\xb5\x33\x55\x90\xe8\x83\xbc\x97\xca\x54\x85\x60\xa5\x5c\xae\x4a\x51\xc4\x68\x1c\x5d\x58\x71\x54\x22\x4b\x6f\x2e\x12\xe2\xa2\xeb\x98\x80\x45\xc6\x1f\xb6\xb9\x27\x9f\x42\xa2\xc3\x26\x3e\x5b\xd9\x85\xbb\x5d\xdd\xb7\x5d\xf8\x29\x9a\x30\x12\x9a\xc8\x1e\x36\x62\x4b\x9d\x71\xb5\x1c\x8f\xc7\x50\xe3\xe4\xd5\xa0\x8d\x4e\x0d\xc6\x01\x74\x8f\xd2\xcf\xb4\x36\x22\xdb\x78\xfe\x34\x9f\x47\x05\xc0\xdd\x4f\xa5\x50\x46\xa2\x63\xab\x65\xfb\xdf\x34\x83\x4b\x5f\x36\x16\xd7\x6e\x9e\x0f\xce\xd2\xed\x68\x07\xca\xc8\x0e\x68\x09\x9f\x6f\xb7\xbc\x9e\x94\x75\xde\xde\x96\xd2\x6a\x68\x2a\xf5\x4f\x5a\x76\x40\x41\x9e\xc4\xb3\xda\xda\x12\x71\x40\x3d\x29\xfd\xb4\x7d\xce\xb6\x32\x82\x0f\x48\x06\xde\x91\xd7\x3b\x30\xa5\xb7\x8f\x23\xe0\xa6\xb9\xdc\x83\x8f\xc5\xfe\xca\x76\xad\x1f\x34\x30\x65\x3a\x70\x1b\x0c\x51\x9d\x30\xeb\x32\xdb\x80\xc5\xe5\x13\xa8\x21\x3c\x90\x46\x51\xa5\x5a\xd0\x0c\x52\xf9\x42\xd4\xcd\x73\xf3\x45\x41\x36\x53\xea\x82\x2f\x05\x5b\x8b\x54\x56\xeb\x56\x61\xe3\x87\x7b\x08\x7c\x54\x67\xd5\xba\x9b\x25\xf5\x50\x05\x3a\x0c\x12\xff\xeb\x14\xba\xeb\xcf\xa1\xe3\x33\x23\x5c\x81\x51\x1a\x2f\x86\x90\x68\xae\xed\x4d\x59\x48\x03\x04\xc3\x4f\xc9\x9c\xf5\xcd\x60\xd3\x10\x80\xdf\xe4\xe8\x64\xaf\xad\xee\x91\x8b\x8c\xd2\x2b\x06\x57\x15\xa2\xf6\xdd\x97\x42\x13\x94\x3a\xbc\xcc\x60\xa1\xab\x2d\xee\xa9\xde\xcc\x6e\x2a\x2a\x3a\x42\xa8\x39\x68\x90\xa0\x3d\xa5\x66\x0b\x97\x8b\x79\x2f\x22\xd6\xc7\x14\xca\x91\x3c\x22\xe5\xd3\x8f\xdf\x1a\x07\x02\x22\x9c\x56\xd0\x58\xca\xd0\x09\x46\x80\x1e\x5e\x3b\x78\x1e\x7e\x21\x36\x01\xdc\x8c\x29\x57\x65\x6b\x03\x01\xbd\x0a\x6d\xe1\x2b\x3f\xf1\x2a\x6b\x7f\x9c\xda\x87\x47\xb1\x5c\xed\xc9\xcf\x37\x0c\xa7\x9a\x4a\x47\x14\xbb\x06\x1a\x35\xb2\x1f\x20\x08\xd3\x35\x7b\x82\x26\x58\x5b\x07\x9c\x74\x57\xb9\xc4\x4e\xbb\x28\x93\x55\xd0\x3c\x80\x9b\xd2\x73\x6a\x52\x2d\x72\xfa\xce\x75\x28\x86\x81\xd8\xeb\x18\xc4\x2a\x97\x4a\xc7\x55\xa4\xb4\x12\x10\x8a\xb3\x02\x48\xc7\xcd\x32\x59\xee\x47\x0a\x0e\x24\x64\xdc\xb7\xd5\x4a\x8d\x08\x30\xfa\xce\x5a\x9c\x1a\x4c\x0a\x89\x74\x55\x0e\x66\x8d\x36\x11\x95\xb6\x6e\x16\x49\xa8\x13\x80\x4c\x55\xbd\xab\xad\x49\x72\x50\x3e\x59\x08\xe4\x36\x37\x56\x7b\x2b\xe5\x83\x3d\xa8\xdb\xdb\xda\x6f\x50\x90\x00\xdb\x7b\x8f\xc2\xb6\x2c\x22\x48\xbf\x17\x1b\x13\xd7\xd1\xa6\x1d\xc5\xba\x36\xa4\xb4\xdf\x43\xeb\xb5\x7f\x29\x60\xe2\x66\x45\xa8\x86\xd9\xef\x2e\xc3\x4e\xdf\xdb\x97\x77\x60\x84\xb7\x1a\xb7\x7b\x30\x24\xbb\x06\x9f\x22\x89\x89\x30\xcf\xb4\x86\x01\x06\x08\x20\xcf\x18\xc6\x19\x67\x2e\x81\xe1\x6b\xed\xdb\xa9\xa2\x1a\x0a\xd1\x25\x67\x05\xce\xf6\xb2\x51\x06\x3e\x32\xb7\x6f\x6a\xec\x41\xc0\x2a\xeb\x18\x76\xeb\x5d\xba\xe8\x32\x94\x24\x84\xed\x01\x5d\x63\x8e\xb2\xf3\xe1\xb5\x76\xf8\x44\x6c\x29\x2d\x6e\x27\x9e\x34\x4a\x04\xc4\x27\x89\x58\x15\x0b\xb2\xa3\xf5\x93\x08\x3b\x7d\x27\xaa\x15\xca\xe9\x80\x9c\x37\xe7\xa7\xd7\xe7\xb7\x5f\x0c\x6f\xea\xc0\x9e\x83\x01\xa7\x6e\x9c\x67\xe7\xef\x4e\xee\x2e\x6e\x67\x67\x93\xeb\xcf\x81\x38\xa5\x9f\x9e\x00\x39\xbd\xa1\xd2\x2c\xa7\x5a\x95\xe2\xd3\x41\x77\x72\x51\xa9\x19\x1f\x90\xfa\xe4\x8b\x33\xed\x52\x77\xb0\xd1\xed\xd2\x32\xbe\xee\x0b\xd1\xfa\x12\xea\xc4\x55\x92\x59\x04\xa7\xe1\x42\x66\x19\x64\x82\x7b\xf7\x3a\x65\x19\xda\x49\x05\xf9\xe3\x98\x8c\x49\xa6\x4e\xd5\xbc\x56\xf9\x07\x5c\x7e\x2b\x6b\x04\x63\x0e\x78\x6e\x27\xa0\x90\x90\x61\xbb\xab\xfa\xcc\x52\x2a\x11\x86\x01\xab\x66\xc7\xd7\xc9\xd0\x4f\x8b\xf8\x39\x91\x75\xa4\x78\xf5\xd5\x35\xdd\x8e\xab\xed\x4f\xa7\x7e\xba\x1f\xfd\x17\xe2\x21\x96\x0a\x15\xd3\xda\x69\xbe\x69\xdf\xba\xc7\xe1\x08\xc0\xbc\xdb\x95\xe4\x10\x83\x30\x25\x2f\xca\xb0\x90\xb4\x10\x58\x13\x2f\x04\x27\xee\x25\x22\xd0\xf4\xa2\x31\xcf\x56\x14\xda\xb9\x96\x10\xa9\xe0\x44\x6e\x93\x64\x95\x29\x45\x41\x6e\x93\x93\x9f\x6f\xa6\xea\xad\xbd\xbe\x5e\xd1\x2d\x44\x95\xcb\xb0\x0b\x44\xea\xe8\x5a\xff\x4e\x43\x89\x25\xd8\x4b\xf4\x51\xaf\x05\x57\x86\xc1\xd1\xc8\x32\x51\x84\x9d\x81\xe3\x11\x22\xa5\xfa\xe1\xc0\x72\x1d\xde\x7f\xc5\x08\xdc\x6a\xa7\xc2\x8e\xd7\x57\x50\x5b\xeb\x72\x7b\x3f\x75\x11\x0d\x00\xe2\xfc\x73\xee\x9c\x96\xc4\xa7\xbe\xbb\x88\xc0\xfa\xad\x9b\xa8\x9e\x86\xd4\x6b\x2f\xdd\x62\x73\x7f\xdf\x4a\xcf\xb8\x95\x7a\xdc\xeb\xf1\x2d\xc1\x56\xda\x0a\x50\x5f\xd6\x2b\x84\x99\x3d\xd1\x49\x06\x28\x37\x3b\x8d\xad\xb7\x4e\xa3\xb0\xee\x21\xd8\x0f\x68\xea\x30\x84\xf6\x49\x0b\xa3\x52\xa8\xe0\xe8\x62\x3b\x3b\x6b\xf6\x7e\x1e\xe6\xc2\x13\x87\x55\x55\xba\x74\x1c\x24\x1e\x1e\x4a\x58\x57\xfb\x80\x27\xbf\xd9\x39\x46\x22\x94\x71\x5a\xca\xec\xc0\xba\x97\xb7\x31\xa6\xb6\x96\x95\x8d\xa3\x88\xf9\x1c\x1c\x87\x83\xe7\x80\x19\xb2\xf9\x9e\x5e\x59\xb9\xbe\xe7\x3c\x9f\xe8\x93\xc0\x0e\x97\x1f\x2e\xcf\x63\xa8\xc2\xe4\xf2\xf6\xfc\xfb\xf3\xeb\x5a\x3e\xff\xc5\x87\x93\x5a\x4e\xfe\xcd\xed\x75\x23\x15\xff\xed\x87\x0f\x17\xe7\x5b\x98\x87\xf3\xdb\xc9\xfb\x5a\xe3\x67\x77\xd7\x27\xb7\x93\x0f\xb5\xe7\xde\x4e\x2e\x4f\xae\xff\x23\xfe\xcb\xf9\xf5\xf5\x87\xeb\x46\x7f\x77\xa7\xbb\xd1\x13\xb5\xcf\x68\x77\xff\x84\xe0\x6c\x44\xad\xda\x7a\x8c\xeb\x95\xa7\x0f\x38\xc5\x3d\x91\x67\xfb\xb6\xa3\x4b\xd7\x4f\xe3\x4a\x24\x78\x30\xec\x50\x07\xed\xba\xe7\x2f\x95\x5d\x9b\xba\x9c\x1f\x26\xf6\xec\xad\x36\x7b\x0e\x24\xe0\x4e\x05\xd0\xf7\xd2\x70\xdc\x52\x65\x7a\x9c\xda\x1c\x22\x58\x4b\xde\x59\xaa\x4d\xa5\x9f\x7d\xa4\xae\x8f\x7d\xe3\x0c\x54\x5e\x7b\x18\x91\x9e\x8b\x0d\x65\xd7\xa0\xa3\xce\x1c\xd9\x80\x4c\x9d\xa2\xe0\x7e\x8c\x61\xf7\xf6\x33\xec\xce\x89\xb6\x63\x57\x4d\xe3\xf6\xb4\xa5\xdd\xec\x7b\x43\xc7\x4f\x9d\x6c\x8f\xbd\x41\xd5\x32\x60\xdc\x40\x99\x35\x64\xdc\xb7\xdc\xdc\x0f\x1d\x37\x75\xb2\x3d\x6e\x50\xfb\x9e\x34\x6e\x70\x78\x97\xed\x34\x3a\x03\x84\x58\xdc\x4c\x7d\x78\x3e\xc7\xdf\x3f\x12\x95\x0e\xef\x37\x46\x7b\x00\x3e\xaf\x79\x99\xf3\xfe\x81\x0c\x18\x8d\x3f\xae\xbc\xc1\x2a\x7f\x03\xbf\xc2\x17\xce\x0b\xc1\xef\x53\xfd\x48\xeb\xd1\x44\x86\xb2\x5e\xd2\xbc\x3e\x41\x56\x86\xbb\x2b\xa2\x2c\x28\x02\x85\x28\xb5\xd0\x3c\xc0\xe4\x24\xf1\xa2\xa3\x0e\x16\x55\x9d\x6e\x12\x11\x01\xf5\x93\x0a\xab\x33\x55\xa8\xcd\xb7\x55\xae\xb6\xab\x6a\x47\x44\xd4\x21\xf0\xa9\x5e\x87\xc6\xe0\xba\x89\x16\x96\xf2\x80\xaa\x02\xc0\x74\xf3\x02\x6c\x26\x98\x10\xa9\xc0\x99\x5c\x58\x83\xa7\x10\x89\x34\x22\x2a\x96\xd7\x7a\x63\xff\x72\x58\x29\x94\x92\x97\xad\x6e\xd7\xde\xfe\x70\x9e\x94\x15\xcf\x18\xa4\x2b\x11\x03\x23\xfa\x2a\xf1\x2f\x09\x57\x98\x1a\x53\x8a\x75\x0e\x59\xfd\x71\x4e\xc7\x54\xfd\x0c\x40\x09\x5c\x82\x17\x86\x7d\x0f\x90\x07\xf7\x30\x5d\xc2\x6b\x5e\xc2\x5d\xfc\x07\xec\xc3\xff\x36\x9e\xaa\x5a\xf1\xa9\xe8\xad\x5a\x1d\xaa\xf1\x54\xb9\x6a\x1d\xa9\x4e\xcc\x18\x2c\xbe\xb1\x2e\x96\xc7\x54\x46\xde\x6e\x76\x7d\x3f\xd7\xfa\xfe\x58\xa8\x63\xf0\x49\x95\xc7\xbc\x2a\xf5\x31\xc0\xa5\x70\xfd\xcd\xb1\xab\xf7\xec\x0a\x66\x9b\xe3\x95\x7c\x10\xf0\xff\xc6\xab\x72\x9d\xfd\x93\xc9\x57\x9f\x8e\x96\x59\x71\x64\xdf\x3d\x8a\xdf\x3d\x72\xef\x1e\xb9\x77\x8f\xec\x6b\xf8\xff\xf2\x0d\x86\x77\xc4\x27\x6e\xef\xb2\xd1\x54\x49\x65\x44\x51\x82\xf6\xf3\x58\xc8\x32\x54\xf9\xda\xb0\x17\xff\xf9\x9f\x6c\x5c\xf0\x47\xcc\x88\x3d\xe3\x25\xbf\x42\xff\xe2\xdf\xfe\xf6\x02\x02\xaa\x98\xc5\x94\xf3\xe2\x97\x4a\x94\x53\x65\x84\x3d\x84\xec\x7f\x4d\x15\x44\x60\xd7\x9b\x59\x89\x7e\x57\xf4\x41\xa6\x86\x7d\x87\x6d\x4e\x90\x8d\x34\x35\xb6\xa5\x8e\x74\x02\xc9\x6d\x63\x3d\x5d\xf4\xbf\x64\x67\xf4\xfc\x80\x63\xfd\x4b\x56\x3f\xd5\xae\xce\x94\xf9\x25\x83\x0b\x34\xd3\xdc\x81\xb5\x98\xdf\xbc\x60\x27\xd3\xe0\xda\xce\xc8\x16\x34\xe0\xb3\x86\xe9\xdb\xcf\xca\x0d\x32\xa2\x3b\xcf\xfd\x96\x18\x81\x58\x41\x88\x43\x40\xf4\x5c\xda\x13\x72\x83\x9e\x50\xd0\xdc\xf0\xcb\x41\x27\xa5\xd0\xb9\x6f\x0f\x1d\x17\xe6\x77\x6f\x8e\x8f\x47\x6c\x69\xe0\x7f\xe6\xbf\xc0\xff\x00\x7a\xe8\xb9\x48\x7d\xb7\x26\xd3\x03\xe1\xb6\x57\x79\xff\x4a\x3c\x07\x8a\xee\x4b\xf0\xc8\x37\xb6\xe9\xdb\x4a\xa5\x99\x08\xa9\x8d\xb5\x90\x48\xa6\xed\x4a\xba\x85\xda\xae\x3c\x04\x6b\x3c\x17\x09\xb7\x82\x6f\xab\x6f\x04\x97\xea\x45\x29\x14\x7a\xc3\x8a\x50\xe8\x92\xa3\xe7\x0a\xd4\x62\x80\x42\xf2\x92\x20\xe7\x58\x2b\x0c\x3a\x01\x62\xf6\x51\xf3\x27\xb6\xd1\x15\x71\x8c\x03\x73\x6e\x2a\x92\x0c\x0a\x39\x38\xf6\x20\x56\x88\xb2\x2a\x14\xe3\x2c\xe7\x2a\xe5\x06\x76\xe0\xa2\x80\x68\x67\xc1\xf8\xf6\x40\x47\x08\xc7\xd5\x55\x09\x9c\x58\x88\x2c\x88\x67\x02\x49\xe0\xa3\x31\x8f\xa2\x41\xe0\x9d\x00\x5c\xd4\x5b\x2f\x8e\xa7\xca\x95\x62\x24\x2c\x1c\x7a\xca\x12\x9d\x6f\x88\xf1\xa8\x39\xe9\xd2\x79\xce\x68\xba\x47\x01\x6f\xd2\x7c\x76\xc4\x64\x3d\xb4\x06\x7c\xf3\x65\x54\xdd\x1e\x3d\x7a\x86\xbd\x14\x2a\xd1\xa9\x28\xcc\x2b\x7b\x0c\xa5\xb7\x3b\x50\x7f\x90\x26\x2c\x06\x48\x29\x7b\xb9\x91\xb7\xd0\x36\xef\x0b\x4c\xd9\xd9\xa9\x31\x94\xb7\xe9\x39\xfb\x8f\xca\x6f\x1d\x05\xd3\x36\x5e\xfa\xcf\x2f\x8a\x88\x89\x71\x9d\xce\xe6\x7c\xba\x0b\x02\x8f\x6c\x2c\x71\xb1\x51\xd4\x71\x48\x39\x71\xa5\xc4\x65\x09\xc5\x41\x0b\x61\xca\xa9\xa2\x1b\x78\xc4\x16\x82\x5b\x3d\x6f\xc4\x12\xf3\x80\xc2\x18\xaf\xfb\xf2\x51\x07\x0c\x8e\x2b\x6f\x03\x60\xd8\x5a\xe3\xc1\x49\x8c\x8f\x71\xca\xc0\x46\x80\x41\x97\x85\xee\x55\x15\x98\xac\x56\x81\xf8\x84\x79\x70\xd5\x52\x9a\x15\xd6\xe2\x62\x3d\x30\x13\x1b\x0c\x14\xb3\xe6\x38\xf0\x07\x2b\x78\xf0\xeb\x10\x06\x12\x09\x47\xd0\xb8\x09\x4b\x8b\xe7\x2c\xc4\x70\x63\xca\x7a\xf0\xcd\x74\x1d\xaa\x1d\x13\x01\x03\x78\x9a\xdf\xc2\xbe\xba\xd7\x61\x65\x44\xe1\x4a\xb9\xe0\xb7\x22\xc1\xe4\x4a\x16\xe9\x51\xce\x8b\x72\xe3\xb6\x6f\x26\xe7\x50\x01\x22\x93\xf7\x82\x9d\x14\x85\x7e\x7c\xee\x59\xe8\x14\x2d\x5d\x16\xf6\x21\x48\xf6\xa1\x56\x7e\x2b\xbd\x6c\xd3\xdd\xf1\x34\x2a\xdb\x2e\xc7\x47\x6b\x3f\x85\x28\x8b\xcd\xcc\x6e\xc4\x75\xde\x29\x29\x7a\x25\x4d\xf4\x57\x72\x87\xb1\xe4\x36\x5c\x18\x9d\x2c\xb9\xb5\x55\xfd\xed\xb0\xe4\xb6\x10\xe0\x6e\xb3\xe4\x4e\x2e\x27\xb7\x93\x93\x8b\xc9\xff\x69\xb4\xf8\xf3\xc9\xe4\x76\x72\xf9\xfd\xec\xdd\x87\xeb\xd9\xf5\xf9\xcd\x87\xbb\xeb\xd3\xf3\xdd\xb4\x57\xdb\xa3\x0f\x2a\xf8\x11\x8b\xfb\x79\xc3\x6e\x23\xa0\x06\x26\x1b\x90\xfe\x4d\xa5\x81\x61\x57\xd9\xc3\x2c\xd5\x72\x04\x07\xf5\x0d\x3b\x2f\x8a\xc9\x9a\x2f\xc5\x55\x95\x65\x00\xa7\xc2\xcc\x9e\xd3\x42\x80\xe1\x39\x62\x57\x3a\x9d\x44\xef\x41\x3a\x62\xeb\x67\x40\xff\x3c\x4d\x0b\x61\x0c\x76\x3f\xa2\xfe\x23\xf0\x90\x4f\x75\x24\xf0\x1c\x7f\xe0\x32\xb3\xf6\xdb\x1b\xf6\x96\x27\xf7\x7a\xb1\xc0\xf4\x99\x91\x4f\x9c\x62\xbf\x54\xba\xe4\x4c\x7c\x4a\x80\xea\xad\x7d\x9f\x5c\xe8\xe5\xaf\x00\x55\xee\x11\x9e\xea\x30\x52\xa0\xd4\xdd\xac\xfd\x3a\x6f\x17\x04\xf4\x95\xef\xf1\xd5\x77\xf8\x66\xbb\x83\xb2\xcc\x9e\x21\x3d\xfe\x42\x2f\xdb\x0b\x0f\x81\x76\x4d\xd5\x92\x28\x90\x90\x10\xbb\x88\x5e\x32\x23\xd5\xfd\x54\xfd\xbc\x12\x8a\xe9\xaa\xc0\x3f\x81\x99\x6f\xd5\xcc\xac\x32\x2b\x01\x15\xba\x47\xec\x51\xb0\x35\xdf\xa0\xda\x0c\x36\x81\xaf\x96\x02\x5b\x06\x6e\x11\xfb\x76\x26\x95\x95\x16\xb9\x74\x79\x09\xcd\xa5\x7f\x0e\x8b\xcb\x11\x1d\xf2\xc3\x79\x88\x77\xdd\xa7\x35\x7c\x1e\xb8\xca\x02\x6e\xd2\x01\x84\x48\x72\x43\x51\x59\xad\xef\xab\x3c\x50\xa2\xbe\x70\xc1\x49\x98\xee\x07\x2d\x53\x96\x56\x79\x26\x13\x2f\x77\x1f\x75\xd1\xc9\xfb\x8c\x09\x34\xfd\x6f\x9d\x66\x5a\xd8\xae\x0f\x6b\xc9\xce\x89\x90\x74\x3b\x18\xa0\x3f\x33\x07\x36\x93\x2a\xc9\x2a\x28\x33\x57\x19\x51\x1c\xf9\xd2\xd1\x3e\xd7\xef\xb7\x4f\x92\x1d\x48\x38\x0f\x4f\x6b\x8b\x93\xce\x33\xbd\x94\x09\xcf\x62\x70\x73\x40\x45\x78\x16\x5e\x77\xec\xa9\x98\x30\xe4\x41\xb8\x01\x75\x12\x69\xe5\x85\x00\x22\xe8\x19\x88\xf2\x19\x89\xbb\x43\xc6\xbd\x60\xd6\x40\xc7\x71\xc5\x1c\xb9\x2e\xbc\xe0\x6e\xb8\xd0\xb7\xab\xc4\x06\x2a\xa6\x50\x00\x01\xd0\x8f\x4a\x14\xa0\xc1\x02\xec\xc3\x7e\xa9\xd2\xa0\x9b\xf8\xea\x6c\x1e\x9f\xec\xaa\x13\x2e\x3c\x10\x1b\x33\x67\x97\xf2\x41\xa8\x2f\x4f\x6a\x1e\x75\x90\xf0\x64\x25\x66\x4e\x2f\x7f\x6e\x91\xe5\x2f\x80\x81\xc2\xca\x95\x49\x89\x45\xa9\x0f\x6f\x82\xe9\x84\x23\xde\x96\x5d\x18\x48\xdc\x91\x91\x65\x07\x31\x4b\x45\x72\xff\xc5\x45\x73\x00\x59\xb9\x81\x30\xce\xce\x44\x72\xcf\xee\xae\x27\x98\x0d\x2c\x4b\x66\x45\x81\x59\x85\xb2\x4f\x9d\xb6\x5b\xc9\x97\x9f\x81\xc2\xaa\x6f\xdd\xaa\x50\xaa\xc0\x57\xeb\xb3\x03\x22\x40\x14\xe4\x4b\x5a\x21\x49\xb9\x34\x00\x04\xe3\xa5\xab\x66\x04\x8e\x78\x66\xd6\x50\xbc\xa8\x2a\xa3\x8a\x7f\x19\x9f\x8b\xac\x83\xb8\x33\xd7\xe9\xcc\xc5\x49\x0e\x05\xf3\x6c\xb5\xe5\xfc\x18\x14\x75\x74\x79\x0c\xdc\x6a\xac\xb7\xf4\x20\xbb\xff\xd6\x44\xf4\x1a\x3a\xe6\x0f\x07\xbb\x9e\x1b\x48\xef\x5e\xc8\xa5\x8b\xb6\xc9\x05\x95\x58\xc2\x84\x7e\xab\x07\x83\xbc\xb4\x2d\x5d\xe9\x94\x60\x7a\x9e\x0b\xcf\x6a\x41\x82\xbc\x27\x01\x57\x11\x0f\xc1\xe1\x00\xa1\x5f\x7b\x22\x04\x4f\x99\x5e\x90\x37\x31\xcf\x33\x09\xcc\xd0\x29\x92\xd0\x03\x7b\x86\xa9\xa3\xe3\xe3\xd6\xdc\x60\x23\x92\x8f\x2b\x07\xc4\xeb\x2a\xc6\x0b\x02\x03\x33\x18\x66\xc0\x06\x37\x7b\xe0\xdd\x64\x6a\x9f\xbd\x62\x5a\xc7\x78\x7c\x34\xb9\x4e\x09\x5b\x23\xed\x23\x5f\x01\x5e\xeb\x2e\x21\x3f\xe1\x59\x52\x51\x9c\x0c\xca\xe5\xbb\x2a\xf8\xbb\x11\x84\x21\xea\x67\x17\xba\xee\xf5\x6f\x2a\x99\x87\x56\x57\xf4\x09\x5a\x4f\xf5\x29\xec\x76\x2f\x2e\x33\x3d\x87\x9d\xd3\x8d\x12\xdc\x71\x63\x59\x71\x5d\xc8\x74\x88\xbe\xe3\xe6\xe4\x83\x7f\x75\xd7\x00\x3f\x38\xd7\x8f\xef\xc9\xed\x7b\x46\x85\x0c\x1a\xcc\x8d\xc3\x28\x10\x16\x54\x55\xb5\x6e\x9e\x94\x54\xc6\x03\xb6\x95\xbf\x9f\x3a\xfc\x0c\xf5\x6f\x39\x68\xa1\xb7\x99\x62\xf6\xcc\x65\x20\x97\xd9\xbd\xc8\x07\xd0\x7d\xa0\x28\xf3\x9c\x1f\xdd\x9e\x45\x95\x8a\x74\xf6\x84\x6f\x38\xa7\x77\xfb\x7d\x8b\x9f\x69\x1c\x1e\xf8\x00\xd5\x91\x55\x15\x52\x5e\xa4\xe1\x3b\x46\x70\xde\x13\x9e\x83\x1b\x1e\xc2\x1a\x0f\xaf\xc7\xae\x8f\xeb\x90\x5d\x64\xe5\x25\xe6\xfc\x23\x7e\x5b\xb7\xd4\xc0\xd9\xb7\x8f\xfc\x26\x45\x78\xb7\xdd\x39\x61\xbb\xd6\xf2\x6e\x7a\xed\xdd\xe6\x0e\x73\x02\xfc\x90\xcd\xf5\x39\x64\x47\x55\xea\x10\xed\x81\xef\x99\x00\xed\x70\x9c\xd1\x07\x02\x72\x92\x76\x20\x45\x9c\xfa\xed\x84\xd0\x00\xfc\xf1\x20\x04\x74\x5e\x08\x17\x37\xdc\x88\xd2\xf3\x3a\x64\xae\xae\x20\x84\xc5\xfc\x57\xd7\x89\x6d\x1c\x77\x85\x27\x23\x83\x20\x16\xa9\xfa\x89\x5e\xe7\x5a\x01\x2c\x09\xb3\xd4\xa6\x8a\x1a\x77\xd5\xe1\x7d\x64\xad\x96\xea\x38\x22\x87\x26\x26\xce\x08\xa3\xb3\x07\x0a\xa1\x46\x45\x4c\xa0\xae\xa4\x1d\xe0\xa9\xb5\x0d\x75\x81\x04\x5b\xee\x66\x87\x4c\x80\x46\x89\xf4\x42\x2c\xa5\x29\x45\x9c\x1d\x1a\xbf\xff\x6c\xd5\x6c\x6b\xce\x93\x5d\x53\xdf\x59\xcd\x76\x9f\x15\x64\xe5\xd3\x80\xf1\x6c\x72\x91\x4e\xfc\x7b\xbb\x37\x43\x23\x81\x3f\x88\xc3\xda\x7d\x87\x7b\x00\xad\x3f\x83\x54\x5f\xc6\x97\x1f\xf1\x8b\x44\x24\x4c\x3c\x00\x1a\xed\x12\x2d\x2b\x5e\x70\x55\x0a\x61\xa6\x8a\x02\xcf\x48\x59\x17\xb3\xb2\x34\x80\x90\xde\xb6\x49\xb4\x29\x91\x01\x0a\x5e\x59\x70\x99\x55\x45\xa7\xbb\x01\x77\xe5\x93\x68\x27\x76\xcd\xd2\x29\x34\xcb\xda\x16\xcd\x27\x30\x47\xa7\xc8\xb3\xa6\x34\xc3\xc6\xf5\xfc\xde\x8e\x4f\x70\x97\x4b\xff\xf5\xf6\xbe\xe6\x8e\x9c\xe6\x6f\xcd\x2c\xd7\x03\x24\xde\x8f\xdf\x9a\x2b\xdd\x91\x0d\x6e\x7e\xd9\xf2\x89\xee\x80\x4f\xfc\xd2\x55\x90\x85\x9b\x7b\x88\x3c\xee\x73\xc5\xf4\x62\xe3\xdc\x1b\x9f\xec\x94\x5d\xb0\x6b\x57\x5c\xa5\x99\x55\x79\x79\xd9\xe4\xbd\xf6\x38\x6f\x6b\x12\x95\x4e\x38\x76\x27\xf5\x41\x8e\xcc\x2c\xd9\x4a\xb0\xdc\x37\x4f\x8d\xcc\xcc\x9d\x58\xca\x46\x2f\xf5\x7c\xc9\xb6\x3c\x9d\xa0\xc3\x50\x19\x64\x7f\x60\x7f\x75\xfd\xe5\x3c\x1e\xfb\x17\x52\x5f\xea\x67\x6d\x21\x97\xbf\x01\x47\xc2\xfb\xed\x2b\x21\x21\x99\x43\x17\xb5\xcf\x6e\x38\x50\xea\x40\x22\x99\x95\xda\x31\xe3\xf8\x54\x51\x39\x78\x44\x17\x40\x58\x19\xf9\xd6\x0c\x7b\xed\xb3\x8b\x5f\xff\x8b\x63\xdb\xda\xb0\x05\x6c\x2a\xa0\xb4\xd3\x49\x52\x15\x10\xfa\x27\xf7\x24\x13\x78\x09\x9b\x41\x44\x32\xa0\x7a\x78\xc0\x16\xea\x89\x6d\x6a\x92\xf7\x47\xd7\x3e\xea\x16\xdc\x90\x58\xd8\xde\x5f\xfa\x54\xaf\xac\x30\x25\x33\xa5\xc8\x5b\xc5\x6f\x4d\xbb\x94\xeb\x40\xfd\x7d\x88\x7a\xd9\x4a\x30\x3c\x8c\xb7\x7b\x47\x8c\x7e\x93\x8b\x13\xa5\x74\xd9\xcc\xa2\x19\x3c\x4c\xee\x5b\xe9\x79\xc0\x07\x5c\x99\x27\x91\x63\xeb\xf7\x37\x1f\x2e\x59\xce\x37\x80\xd0\x2c\x35\xc3\x47\x81\x16\xb5\x29\x4e\xf7\xed\x93\xfa\xc7\xd7\x65\x1f\xae\xbc\x83\x7a\xb7\x47\x51\xa8\xc7\x6d\x95\x16\x76\x36\x1d\x1c\x2b\x59\x0b\x9d\x1d\xe5\x19\x57\x11\x08\xdf\x8c\x59\xa3\xfb\x18\x75\xe1\xe3\xaf\x84\x6b\x83\x01\x80\x57\x85\x76\x6c\x51\xb5\xc2\xb4\x81\x1d\xc8\x6d\xfb\xc3\x80\x16\x9d\x92\x6c\x27\xfc\xf4\x3d\xd6\xaa\xc1\xca\x0d\xc8\xf1\xe1\xc0\x23\x1e\x7f\xc4\x0d\x40\x83\x3b\x79\xca\x79\x92\x71\x63\x76\x62\x89\x3e\x0b\xe1\x7d\x94\x5b\xb9\x5f\xc8\xd6\xc7\x89\x60\x47\x60\x60\x41\xeb\xd9\xff\x0c\x9c\x0e\x4e\xc0\x86\xd2\x74\x91\x55\x12\xd5\xac\x20\x80\x06\xb1\x5a\xc1\xfb\xc8\x57\x79\x2f\x36\xce\x0f\x47\x02\x95\xaf\xc5\xc8\xbb\x84\xbd\xcf\x33\x82\x26\x6e\x37\x3c\x55\x80\xdd\x7d\x17\x0f\x8f\xbd\xd3\x7a\x84\x28\x52\xea\x9c\x63\xb3\x3c\xc6\x61\x4d\xd5\x3b\xad\xc7\xdc\x9b\xda\x34\x7e\x12\x8a\xcd\x0e\x09\xbb\x05\xc8\xc8\xc6\x72\xf6\x3f\x9b\x3f\x48\x85\x45\x14\xe5\xda\x9a\x79\x34\x4f\xb0\xa3\x60\x40\xae\x66\xbf\x7e\x34\x2c\x45\xe2\x9b\x4a\x9a\x15\x04\x87\x30\x1a\x0b\xfd\xd3\xc5\x87\xb0\xb1\x82\x2b\x63\xcf\x30\x04\x94\xc4\x83\x20\xaf\x72\x0d\x09\x31\x39\xbb\xf0\xe0\x2a\x3c\x97\x54\x60\xa4\xe3\xb4\x45\xa6\xd1\x21\x2e\x04\x00\xc5\x0f\xa0\xdd\x23\x37\xec\x7b\x9e\xef\x4a\xd9\x3d\xb8\xc5\x7d\xab\xe4\x69\xbf\x9a\x76\x1f\xd4\x5b\x87\x4a\x8b\xb5\xbc\xdd\x78\xf6\xee\xd4\x17\xbe\x18\xa9\x30\x50\x6f\x37\xc8\x40\x51\xb1\xff\xba\x89\x18\x38\x3d\xb0\xd1\x5b\xac\x56\xb0\x43\x1d\x3b\xa0\x0e\xc4\x23\x3d\x66\x37\x42\xb0\x8f\x30\x53\xb6\xb3\x8f\x54\x27\x15\xb0\xda\x25\x97\xad\x65\xec\xe0\xe9\x89\x5a\xe8\xc3\xe4\x7f\xb1\xdc\xc2\x02\x1f\x34\x2b\xed\xe3\x3c\x14\x6d\x0c\xf1\x08\xf5\x79\xc9\x4f\x7a\x5d\x0c\x8d\xb5\xbe\x0a\x5e\x31\x4a\x89\x76\x23\xb5\x8a\x23\x2c\xf1\x53\xe8\xf5\x1a\x9b\xc4\x7e\xe5\x08\x29\xe3\xef\x95\x7e\x54\x28\x8f\xa9\x27\xf6\xd2\x9e\x3f\xd0\x59\x30\x7a\x85\xfa\x6a\x85\xd2\xf0\x15\x70\xd8\x9f\xf8\x7f\xb3\x1b\x0c\xd4\xe3\x98\xa1\xc0\x99\x01\xad\x9c\x4a\x93\xc1\x05\xfe\xf2\x64\xc4\xde\x8e\xd8\xe9\x88\x8d\xc7\xe3\x57\x23\x26\x78\xb2\x72\x23\xc2\x57\x50\xf4\x97\x7c\x69\xdb\xa6\xe2\x44\x8b\xa8\x03\x28\x62\x68\xf5\x13\x47\xd5\xc8\xc3\x53\x91\xef\xcf\x7d\x02\x26\x90\x53\xb6\x1b\x81\x9a\x92\x95\x96\x61\x50\x80\x8f\x17\x89\x2e\x1c\xc2\xde\x94\xba\x70\x68\xe1\x07\x5e\x70\xa9\x80\x57\x83\x6f\xe7\x4a\x50\xcf\x11\xb3\xbe\xf8\xc4\xd7\xf0\xfd\x52\x79\x72\x61\x3b\x4d\xb7\x7e\xfc\xe5\x26\xa7\x68\xe0\x63\x21\xcb\xd2\x2a\x64\x66\xaa\x6e\xd8\x9b\xef\xd8\x49\x9e\x67\x82\x9d\xb0\xff\x62\x6f\xb9\xe2\x8a\xb3\xb7\xec\xbf\xd8\x29\x57\x25\xcf\x74\x95\x0b\x76\xca\xfe\xcb\x4e\x9b\x6d\xef\x52\x5b\x0d\x68\x33\x62\x9c\xa9\x2a\x43\x45\xef\xa5\x43\xe2\xbe\xf2\xdf\xc5\xc3\xea\xcc\x45\xf9\x28\x84\x62\x46\xaf\xe9\x2a\xfc\xa3\xbf\xfd\x8d\x54\xcb\x4c\x94\xb4\x1f\xea\x98\x69\xec\xe0\x08\xbe\xf4\xcd\x54\x79\x6f\xfa\x1f\xed\x88\xff\xc8\xfe\x8b\x5d\x56\x59\x66\x87\x64\x05\x8d\xdd\x48\x6f\x98\xcb\x61\x13\x6a\xfc\x28\xef\x65\x2e\x52\xc9\x21\x8b\xcd\xfe\xeb\xf8\x16\x56\x7b\x56\x05\xc2\xd2\xf8\x4c\xfb\xa2\x71\x87\x88\x9e\xcf\xc2\x88\xe1\x4b\x1a\xc6\xda\x4a\x27\x54\x26\x7e\x75\xb8\x12\x1c\x68\x9a\xe9\x3c\x90\x8d\x82\x05\xff\xe2\x30\x6a\x7b\xff\xbe\x36\x59\x6e\xff\xab\x95\xa4\xa4\x57\x8d\xb2\x5d\xf3\x11\xc6\x08\xca\x29\x2e\x4e\x48\x21\x05\x43\x06\x32\x1e\xf1\xdc\x6d\xa0\x30\x89\x6b\x1b\x8d\x63\x0c\xc2\x5b\x83\x1f\xb5\xd1\x92\x2f\x47\x2c\xf7\xd5\xae\xdc\xa1\xf2\xe1\x77\x3c\xc7\x58\xd9\x81\x94\xcd\x97\x0e\xe6\x64\xf7\x32\x65\x49\x1e\xa7\x7a\xcd\xa5\x7a\x05\x7d\x38\x82\xbf\x3d\x13\xd5\x62\xae\xec\x9f\xa1\x5b\xbe\x13\x73\xd9\x5d\x80\xa0\xae\xec\x34\x0a\xcd\xb5\x1d\x87\x03\x2b\xad\x85\x3a\xac\x5f\xd0\x1c\xfa\x69\x6b\x8b\xee\xb9\xf2\xb6\xea\xac\xd5\x38\x5e\x40\x97\x0f\x3c\x77\xbd\x10\x00\xbe\xbe\xda\x4f\xf5\x42\xb4\xb5\x29\xd6\xb2\x57\xc5\xde\xc6\x60\xef\xc8\x12\xc3\xec\x6c\x2b\x26\x65\x76\x6c\x45\xe5\xf1\xa5\x56\x82\x71\x63\xe4\x12\xb9\xf9\xc0\xed\x87\xa5\x6e\x9d\x52\x76\x5b\x37\x19\x22\x11\x04\xfa\x99\x1d\x12\xe2\xba\x4b\x2b\x85\xed\x12\x64\x9b\xa9\xb2\x6f\x90\x46\x00\x39\x5e\xd2\x53\xb8\x63\x6f\xc4\x90\xee\xfa\xa2\x0b\x31\x6a\xbc\x65\x83\xed\x22\x90\x38\x60\xc3\xd1\x49\x3c\x20\x2e\x78\x19\xd1\x97\x52\x6b\x8e\xdb\x0a\x41\x3f\x73\x91\x69\xb5\xb4\xbb\xa2\x4b\x08\x83\x14\x78\xa6\x21\x60\x63\x9d\x23\xb0\xca\x0a\x3d\x42\x4b\x62\xf5\x14\x99\x06\xc7\x9f\xa9\xe6\x56\x8f\xf3\x31\x29\xaf\x8d\xd0\xc7\x75\xb1\x69\x1c\x06\xae\xba\xb3\x32\x58\x17\x0e\xde\xe7\xe3\x9d\xa8\xb8\x04\xa6\x29\xfc\xa2\x2e\x1c\x49\x31\xc8\x95\xd3\x11\x7f\x1f\x31\x5d\x20\xc9\xa8\x8b\xb3\x7b\x96\xb0\xed\xde\xbb\x8f\xf4\xce\x9c\x94\x76\x0f\x2d\x45\x73\xb7\x58\x4d\x7a\x9c\x86\x5f\x33\x3d\xa5\x4f\xd2\xca\xbb\x93\xc9\x45\xe3\xb9\xed\xa4\x95\x96\xcc\x96\xdb\xc9\xfb\xf3\xb3\xd9\x87\xbb\xdb\xad\xe7\x6c\x6b\xf4\xa7\x3d\x79\x2b\x9d\xb3\xf7\x1c\xc8\xfd\x5f\xb0\xd2\xda\x4c\x2f\x1c\x89\x41\xff\xeb\x79\xab\xd6\x5d\x3f\x80\x68\x19\x59\xd7\x71\x4d\xb8\xed\x8d\xd3\x49\xc5\xa2\x66\x14\x11\xee\x37\xd8\xe6\x84\x7d\x50\xef\xf0\xf5\x2b\x9d\xc9\x64\x37\xde\xdc\x5d\x96\x56\xab\xda\x06\xf0\xce\x05\x24\x60\x90\xc3\x97\x06\x85\xf6\x59\x29\x92\x32\x20\x1e\xb6\x3f\xee\xff\x69\x8c\xeb\x7e\x0f\x0c\xfa\x61\xfd\xb4\x41\x09\x75\x8f\xa1\x80\x9b\x1d\xb8\xad\xa1\xa4\x0b\x6a\xb9\xe0\xd9\x05\x99\x97\x70\x8a\x8c\xd5\x66\x1e\xae\x87\xc7\x95\xce\xc8\x1f\x8b\x3c\xe1\x53\x95\x8b\x22\xd1\x80\x0d\x45\x0a\x1a\xcd\x92\x95\xcc\xd2\x50\x37\xed\x25\x24\xd3\x00\xe4\xfd\x15\x95\x00\x16\x1e\xe3\xe3\x9a\xdf\x71\xe7\xbb\x6d\x77\x86\xa7\xfb\x20\x7c\xdc\x73\xa2\xe3\x77\x6d\xfb\x9f\x09\xc5\x8d\x53\x41\xcc\x7e\x0d\xb4\x06\xa8\xfd\xf1\x78\x06\x85\x74\xec\x65\x4f\x25\xb1\x92\x60\x36\x97\x8d\x75\xa5\x6d\xd6\x9c\x4a\xe0\x7b\x47\x3f\x3a\x42\x15\x8d\x80\xe1\xac\x05\x47\x4d\x30\xb0\x2f\xd3\xa2\x4e\x55\xc0\xa7\xbc\x30\xb1\x56\xd8\xba\xce\xe8\x7d\x77\xf8\xfb\x11\x7b\x51\xfb\xd0\x17\xc0\x07\xae\x34\xf4\x47\x18\x82\xda\xd4\xc0\x76\x1d\x31\x59\x4e\x95\xb5\xd9\xec\xce\x2c\x44\x26\x1e\xec\xe8\xe2\xe8\x10\xa1\x2a\x9d\xe7\xc4\x7d\x36\xa4\x70\x71\xc7\xfc\x41\xdb\x86\x0e\x61\x11\xf3\x4a\x63\xf0\x3c\x15\xc6\x6a\xad\x50\x11\x4b\x7c\xb2\x07\x40\x42\x88\x16\xe1\x77\xa9\x50\x6e\x7c\x80\xca\x83\xb1\x8d\xa7\x6a\xb2\x00\xfa\x05\x20\x7d\x48\x53\xf4\x41\xb8\x1a\x49\x9e\xe4\x53\x52\x34\x48\x93\x47\xc6\x2d\x04\x55\xb0\xc6\x93\x24\x1e\x44\xb1\x29\xc1\xa5\x0f\xf3\xaa\x04\x2f\x57\x4c\x96\x23\x60\x67\x75\x92\x72\xaa\x78\x9a\x52\xd6\x3a\x36\x17\x99\xb3\x9d\xeb\x4c\xbf\xcf\xf5\xc3\x2e\xb5\xfa\x50\x7c\x31\x9e\xea\x3c\xe3\x6a\x86\x37\xc8\xaf\x80\x30\x8e\x8a\x8b\x77\x41\x4d\xaa\xf9\xcc\x33\xca\x3d\xcb\x38\xbd\xbc\xbf\x76\x00\x6b\x32\x6d\xaa\xb9\xeb\x68\x54\x03\x90\xcf\x03\xf9\x88\xf7\xd2\x11\xba\xab\x60\x0e\x01\xd3\x5f\x0a\x04\xf0\x31\x6f\x20\xc1\xdc\x6e\xdd\x87\x3e\x76\x3b\xe0\xb7\x8a\x0f\xed\xb3\xf2\x8d\x3b\xa4\xb9\xec\xc3\xa1\x89\x5b\x1a\xe2\x93\xe0\x89\x7b\x86\xf5\x79\x21\x8a\x9d\x5e\x9c\x6d\xa8\xa2\xfb\xda\x28\xbe\x4f\x09\x0e\xe8\x85\xf5\x0e\xb4\xf6\xe2\xf1\xb1\x15\xa8\x5b\x50\xfc\xcf\x19\xa1\x00\x39\xd5\xd7\x4f\x13\x68\x4f\x60\x5c\x63\x36\x51\xcc\xa9\x7b\x23\xf6\x02\x37\x96\x79\x41\x0e\x68\x6b\xe7\x72\xc0\xd1\x8a\xe2\x41\xa4\x74\x7a\x88\x28\xa2\x09\x85\xc3\x74\xbd\x70\xdc\x30\x0e\xb8\x93\x55\xf8\xb3\xce\xcb\x5b\x09\xe9\x82\x4f\x61\x84\xc1\x18\xf2\x1c\x1b\x70\xd9\x2e\x91\x2b\x94\x3e\x17\x62\x19\xe1\x83\x5d\xb4\x93\xbd\x75\x2f\xda\x29\xca\x2b\xba\x4f\xdd\xef\x4c\x17\x53\xe5\x5a\x23\x87\xb4\xc1\x32\x86\xcd\xa6\xa2\xec\x25\xd2\xf9\xa3\x9d\x0a\x50\x00\x57\xb9\x12\x0a\xa2\x06\xea\xf3\xa6\x14\x00\xac\xd6\xdc\xe3\x64\xa1\x56\x46\xe8\xcd\x2a\x1e\x76\x83\xaf\xf1\x9a\x6f\xd2\x23\x67\x99\x9d\x14\x59\x3a\x36\xe6\x28\xb3\xd0\x54\xc0\x29\xbe\xa8\xac\x30\x8a\x88\xd7\xa7\xca\x4e\x1e\x5b\x48\xc8\x30\xa1\x79\x99\xaa\xf7\xda\x38\x22\x1b\x13\xe6\xc3\x01\x0b\x68\xda\x5e\xf8\x02\x9e\xf4\x87\x33\xb8\xb4\x29\xe2\x83\x94\x74\xfe\x6a\x81\x94\x52\x62\xa3\xda\xe8\xaa\x08\x1f\x95\x70\x35\x55\x7f\xb1\xd3\x03\xe6\x14\x57\x6e\x59\xf5\x02\x8f\x30\xac\x20\x84\xca\x3e\x62\xa3\x2f\xff\xe5\xd5\xc7\x57\x98\x02\x56\x19\xa8\x99\x3c\xaa\x5f\x20\xbe\x06\x47\x95\x65\x80\x43\x70\x5f\xe0\x79\xa0\x42\x17\x3b\xd1\x82\x64\xd4\xcd\x54\x5d\xc5\xe8\x73\xd0\xfb\xb9\xf5\x4f\x58\xc2\xcb\x64\x75\xe4\x74\x39\x12\x63\xee\xf6\xa3\xe5\xc3\x5c\x2d\xab\x69\xb1\xd6\x32\x14\xd6\xe0\x2c\xd6\x9e\x18\xb7\xb6\x5f\xec\x27\x80\xfb\xff\xb6\x59\x93\xcd\xf3\x76\xe3\xe6\x44\x1c\x50\x5d\xcf\xf3\x8f\xbb\x8a\xa8\xc1\xe2\xa4\x18\x89\xe2\x6b\x91\xb2\x17\x90\xac\xfc\xc2\x2d\xfe\x54\xe5\xf3\x71\xb6\x59\x94\xc4\xae\x68\x27\x65\x0c\xb5\x03\xf7\xdc\x72\xb3\x74\xdb\x4c\xda\x33\xd9\x9d\x86\x56\xbb\xae\xe3\xe7\xc6\xf7\xd4\x5f\x61\x41\x1f\x97\x9f\x9d\x9b\x3a\x72\xb1\x5e\xc4\x84\x9b\xfb\x11\x9b\x17\x5c\x41\xd9\xa7\x34\x56\xaa\xc2\xe9\x04\xe3\x19\xa9\x0b\x5d\xf6\xa2\xe2\xd9\x06\xb2\x94\x46\x53\x85\x3c\x8f\x50\x10\x60\x93\x64\x32\x61\xcb\x82\xe7\xab\x86\x1e\x24\x1e\x84\x2a\xa1\x7a\xf8\xb5\xe0\xe6\x30\xac\x46\xd1\x6c\x81\xf5\x8e\xa6\x9d\x28\xb0\x3e\xb8\x6a\x30\x73\xc3\xf0\x3a\xae\x16\x40\x91\x8a\x74\x36\x8c\x95\x6b\x2f\x77\x74\x8d\x91\x94\xe8\xf1\x20\xfe\x6c\x3f\x8e\xb9\x5e\xf7\x81\x1f\x70\x5e\x89\x30\xca\xe1\x8e\x0f\x05\x6c\x78\x02\xaa\x83\x68\x84\x27\x75\x2d\x92\x07\x66\xad\xe0\x37\xa7\xb0\x1f\x7a\x2a\x5c\xb2\x86\x17\x1c\x23\xaa\xee\x0a\x14\xa3\xec\x0f\xd5\x5c\x67\x8e\xa3\x75\x72\xc6\x74\x01\xe5\x91\x4a\x4d\x7f\x92\x69\x97\x76\x20\x55\x2a\x3e\x1d\x44\x94\xb4\xfb\xa2\x77\x6a\xb3\xed\x26\xaa\xc2\xd3\xfc\x58\x90\x4e\x85\xb0\x97\x70\xe9\x2c\xe3\xad\xa7\x4c\x13\x50\x7d\x92\x95\x2b\x40\x39\x63\x22\x51\x98\xd4\x35\xdf\xb0\x64\xc5\xd5\x32\x72\x4d\x00\x9c\x53\xe4\xba\xc0\x32\xc2\x0f\xc0\x48\xaa\x0b\x47\x44\x41\xf4\x0a\x94\xcd\xe4\xc3\x18\x98\x44\xa0\x1d\x87\x02\x5f\x2e\x0b\xb1\x84\x64\xdb\xa9\xaa\x11\xc4\x00\x1b\xab\xab\x60\x84\xfd\xec\xe2\xd7\x78\x1e\x92\xaa\x2e\x6b\xb0\x2c\x36\x9e\x9d\x80\x6a\x70\x87\xf3\xdc\x9c\xd6\x11\x93\x62\x3c\x62\xdf\x84\xc4\x09\x91\x68\xe5\xe9\x0d\x3a\x72\xdb\x1b\x2e\x7f\xb6\xc7\x74\xd8\x66\xb3\x6a\x1f\x3b\xfc\xb6\x55\xc9\xbb\x75\xd3\xec\xe4\x87\x28\x79\x59\x0d\xb8\x83\x4e\x79\xc9\x33\xbd\x3c\xb5\x2f\xdf\xe0\xbb\xbb\xf6\xf5\x29\x66\x35\x38\x26\x41\xfb\xbc\xbd\x39\x6d\xdf\xa1\xd2\x40\xdb\x5c\xef\x75\x20\x67\xba\xdb\x81\xfc\x1c\xaa\xba\xa3\x8b\xda\xef\x43\xce\x3a\x28\x90\x76\x7c\xd3\x50\x17\xb1\xcb\x3d\xa0\xf4\x29\xd3\x34\x63\x5b\x24\x40\x5e\xe8\xb4\x4a\x44\x6a\x4f\x2e\xd8\x43\x88\x87\xf2\x4c\x4c\x35\x21\xd9\x76\xd1\xd6\xe8\xe4\xe0\xd6\xfd\x52\x3e\x87\x5e\x0c\xfe\x7e\xfa\xef\x3a\xfc\x0d\x4e\xe3\x6b\x9b\xf4\xf8\x7c\xe2\x3c\x15\x03\xef\x29\xdf\x7d\x9d\x77\x5f\x17\x72\x29\x15\x2f\x75\xc1\x5e\x7a\xbe\x85\x57\xbe\x58\x5f\xb7\x86\x30\x50\x4c\xd4\xa6\x08\xc5\xc4\x17\x55\x3c\xda\x36\xa9\x7d\xca\x94\x7c\x9d\xc7\x4c\xd6\xe0\x05\x8e\x66\x26\xc3\x49\xf0\xba\x09\xf8\x4e\xa5\x09\xb9\xc5\x53\x45\x11\x07\x5c\x37\x5d\xc4\xa5\x18\x3a\xef\xe6\xbc\x2a\x67\x4f\x64\x67\xc3\x97\x87\x39\x9e\x08\x04\xf1\x9e\xe7\xbb\xf9\xae\x38\xb9\x1c\x30\xb9\x92\xdc\x11\x41\x53\xa9\xef\xcf\xdd\xc5\x8e\x06\xf2\x6f\x37\x03\xf7\xd7\x17\x2e\x50\x14\xec\xc1\x9a\x81\x05\x0b\x81\xc4\xbf\x98\xac\x86\xa6\xbd\x17\x6b\xf6\x16\x77\x24\x59\xa7\x99\xae\x52\x46\x42\x8d\x40\x00\xc5\x18\x6f\x47\x60\xe2\x1e\x8f\xbb\x92\xef\x06\x16\x61\xf7\xf2\x07\xde\x6b\x3f\x81\xf0\x5b\x87\x04\xde\x79\xf4\x69\x66\x3f\xdb\xd2\xd3\x4c\xc3\xda\x7b\x71\x3c\x68\xed\xbd\x17\x1c\x68\x41\x87\x39\x48\xc1\x1e\x95\x69\x06\xe7\x2d\x0e\x20\xb4\x10\x97\xd7\x02\xb3\xe6\xfe\xe0\xee\x1c\x57\xc6\xee\xae\x72\x5e\x08\x55\xce\xa0\xc7\x61\x9d\x41\x27\x57\xf0\x7a\x4d\x61\xea\xe5\x08\xfe\xd3\xad\x46\xff\xbe\xe3\x00\xfb\x33\xbb\x21\x9f\x96\x95\x57\x12\x20\xc4\xe6\x9e\xbd\x94\x80\x78\x8a\x62\xa1\x7e\xe1\x3a\x96\x8b\x3e\xe8\x09\xb3\x17\x7d\x50\x4d\xb4\xf7\xfa\xa0\x30\x7a\x08\x55\x43\x2b\xe4\xde\x23\xe6\x02\x2b\x6a\xdd\xdf\xa2\xba\x20\x97\xb5\x7f\x03\x87\xb3\x5d\xbf\x8c\xfd\x55\x14\x3a\xe4\x7f\xa1\xb3\x2a\x6e\x78\xa7\xbe\xfe\xf4\x92\xe6\xa8\x8f\x63\x31\xed\xb8\x9a\x2c\xfc\x85\x68\xd6\xd0\xa3\x30\xdf\x38\x73\xa4\x23\x84\x94\x8b\x64\xd6\x51\x3a\xa8\xd7\x50\x22\xc3\x33\x2e\x05\x24\x1b\x97\x99\x3b\xa0\xc7\xe0\xaf\xa0\xc4\xaa\x35\xcf\x09\x5d\x48\x40\xf2\x66\xf0\x66\x0c\x1f\xf1\xa7\x3f\xfe\x79\x2c\x3b\x12\xd1\x61\xe8\x43\xc1\x5a\x7e\xf0\xef\x0a\x29\x54\x0a\xc1\x58\x9e\x6e\x57\xb5\x53\x35\xef\x7c\x4d\x3c\xdb\x6d\xf8\x2c\x59\xeb\xed\x57\xad\x99\xe1\x26\xfa\x02\x11\xfd\x20\x64\xfd\xf1\xad\xc5\xfb\xba\x54\x09\x33\x4b\x37\x8a\xaf\x65\xf2\x45\xc7\xb8\x91\x22\x4b\x61\x88\xd4\xfb\xbe\xa8\x54\x2a\x92\xfb\xa1\x3a\xc1\x93\x6b\x72\x88\xe4\x9e\xfd\x70\xfb\xfe\x02\x4b\x30\x4b\x33\x55\x97\xbc\x94\x0f\xe2\xae\xc8\x7c\x38\x80\x40\xda\x45\xe6\xce\x48\x9d\x23\x3e\xe2\x23\x73\x84\xf2\x4e\x71\x88\x4b\x78\xac\x37\x47\xf3\x2a\xb9\x17\xe5\x71\xc1\x55\xaa\xd7\xf8\x19\xc7\xa6\x5a\x2c\xe4\xa7\x71\xc9\x8b\x8e\x7a\x1e\xe8\x47\xf8\x15\xf5\xdc\x50\xa5\xad\x0c\x3a\x2f\xaa\xba\x8f\x90\x8c\x4e\xb5\xff\x6b\xca\x2d\x66\x25\xf2\xb5\x00\x42\x56\x56\xaf\x85\x03\xad\x60\x7e\x37\x94\x8c\x35\x86\xf2\x27\x34\x15\xa4\xff\x18\x29\xf7\x1f\xa3\x51\x85\x10\x76\x3c\xa8\x50\x86\x75\xcd\xef\xd1\x3e\x5c\x16\xc2\x98\x11\x33\x1a\x46\x3c\x55\x2e\x13\xc1\x65\xcb\x01\xee\x05\x28\x9d\xb3\x0d\x4b\x74\xee\x21\xf3\xf8\x5d\x2b\xfd\x08\x7e\xfa\x38\x4f\x18\x0a\x8d\x57\xaa\x94\x19\xe3\x8b\x92\x9c\xf8\x50\xbf\xc2\xd5\xab\x33\xe3\xa9\x82\x50\x6c\x02\x9f\x0f\x10\x09\x1f\x7e\xf1\x1f\x61\xd8\x82\x27\x32\x93\x25\xb1\xea\x41\x8a\x19\xb7\xdf\x6b\xef\x03\x3b\x97\x05\xdf\xf0\x2c\x18\x56\x3c\xab\x42\x6a\xf4\x91\x11\x3b\x58\x5b\xa5\x99\xa1\x83\xe0\xf3\x1d\xf0\x80\x02\x94\x71\xf0\x01\x19\xee\x4f\x6c\xe7\x97\x8d\x5b\xf4\x1f\xe2\xff\xad\xd9\xe1\xbb\xb4\x82\x03\x0c\xf2\x43\x2e\xc7\x6d\x93\xdb\x17\x79\x0f\x7a\x86\x4c\x1d\x3a\xb9\xa6\x8a\x87\xe4\x63\x7f\x3d\x42\xcc\xa4\xc3\xe8\x1f\xbb\xd2\x7c\xdb\x3d\x0c\x98\xbd\x76\x25\xf1\x0b\xb9\x33\xba\xca\x0e\xf4\x19\xbe\xf3\xc6\x5f\x69\x9d\x1d\xea\x91\x27\xe2\x10\xa9\xd5\x0c\xaa\x55\x1f\x62\x4e\xe2\x06\xf0\x8e\xad\xc9\x99\x8f\xb9\x7b\x1e\xff\x7a\x8d\x3b\x82\x83\xd1\x10\x40\x90\xc1\x20\x76\xe0\xd4\x4d\xde\x02\xba\x18\x88\xb7\x87\x36\x10\xad\xe5\x54\xfb\xed\x10\x41\xc4\x21\xc3\xc3\x18\x81\xeb\xb8\x31\xc2\x41\xce\x3a\xac\x2d\xdd\xe8\xca\x3b\xee\x62\x4e\x74\x3f\x8f\x51\xdf\x6e\x3e\xd7\x5c\x91\xe7\x8f\xb4\xf8\xa9\x8a\x34\x76\xe4\xed\x73\x09\x0d\x7e\xd6\xda\xfc\x79\xb5\x6d\x78\xb0\x3f\xef\x90\xc2\x17\x3b\x25\xe7\x59\x5c\xc2\x12\xb0\x20\x89\x5e\xcf\xa5\x72\x9c\x14\xe4\xe4\x06\x53\xe3\xc4\xf1\x0a\xfb\x80\x84\x33\x19\xb0\xb0\x51\x63\xee\xbd\x9a\x13\x53\x34\xc7\x22\x6b\x9f\x39\x1e\xdb\x77\xcf\x5b\xa3\xa3\x23\xd2\xd8\xfc\x02\x7b\x81\x64\x8f\x7c\x63\xa0\xcc\xbb\xb0\x52\x71\x81\x8e\xdd\xfa\xf8\x47\x91\xfa\xe1\x38\xab\xa7\x0a\x66\x08\x39\xcd\x9c\x20\xb5\x92\x15\x36\x60\xe6\x0a\xda\x07\x3e\xba\x17\xa6\x7d\x72\x7e\x9d\x58\x4d\xb1\x33\x56\x83\x41\xe8\xff\x1e\xe1\x99\x1d\x4e\xe0\x03\x7d\xd1\xd1\x35\x89\x1a\x23\xc1\x84\x20\x6d\xcc\x87\xa8\x47\x6c\xcd\xa5\xa2\x63\x80\x45\x43\x53\x31\xaf\x96\xcb\x4e\x17\xe9\x6f\x3f\xd6\x52\x3f\x27\xff\xe3\x7d\xe1\x3b\x19\x15\x9f\xc3\x5b\x3c\x71\x3d\xa1\xfb\xda\xda\x7d\x5f\xc6\x41\xfc\x2b\x7a\xe3\x5b\x43\x62\x5b\x9b\xe8\x79\xbc\xf1\x93\x3e\xde\x78\x87\xed\x82\x04\x3f\x32\xa7\x1d\xfe\xe6\xef\x6e\xfa\x2f\xe3\xa6\xef\xb5\x29\x90\xd4\x67\x26\xeb\x0a\xfa\x8e\x11\x3e\x91\x9d\xd3\x13\x56\xc3\xa8\x90\x01\xcf\x4a\xf7\xd4\xb0\x39\x4f\x3e\x03\x5d\x27\xdc\x8e\x87\xfb\x03\xf7\x80\x5f\x6e\xf4\x5a\x30\xe8\xca\x60\xb9\x29\x46\x59\x8c\x23\x40\xab\xda\x0f\x0c\x88\x11\xc2\xa3\xc0\x75\x8a\xc8\x95\x34\x28\xd5\x2f\x95\x78\x64\xf6\xb6\x1a\xc5\xf0\xbd\x68\x79\xa0\x0e\xe1\x2b\xab\x1d\xd6\xb0\xfe\x9e\xb0\xa3\x10\x4b\x5e\xa4\x90\x61\x42\x47\x32\xe3\xc9\xbd\xfd\x6f\x18\x1f\xf5\x48\x10\x43\xc7\x15\x80\xb0\xd7\xd0\x9a\x54\x09\x12\x36\x3a\xe6\x79\x3f\x3e\x7c\xdd\x30\x9e\x14\xda\xa0\xd3\xc8\x97\xef\x86\xfc\x6a\x50\x60\x1f\x64\x5a\xf1\x0c\x7b\xec\xf4\xb4\x0f\x85\xaf\x35\x01\x47\x51\xa5\xbd\x6d\x34\x1b\x2d\x07\x32\x54\xc1\x34\x8e\xa7\xea\xcc\x07\x4c\xde\xb0\x3b\x23\x08\x65\x66\x5c\xad\x82\x9d\x23\xfd\x6c\xea\xc3\x16\x26\xb0\x53\x87\xd8\x31\x01\x0e\x64\x1d\x4d\x84\xe9\x9e\x89\x3d\xa4\xaf\x87\x2c\xca\x60\xf2\xea\x49\x54\xee\x3f\x4c\x0b\xda\x09\x85\xe0\xe9\x26\x66\x8c\x94\x8a\x41\x94\x8e\xf1\x74\x2d\x95\x3d\x04\xae\xa4\xac\xbf\x69\x5c\x75\x09\x84\x1c\x43\xe5\xb5\x2c\x6b\x08\x41\xc3\x94\xb0\xca\x25\x2f\x64\xb6\x01\x7b\x22\x2f\xc4\x51\xd4\x4f\xb4\x3e\x94\xf1\x04\x75\x32\x88\x44\xa6\x32\x62\x51\x65\x68\x75\x80\x5d\xee\x3f\x80\x24\xd2\xdd\x64\x64\x15\x8e\x92\xea\x1d\x45\x1d\x63\x15\xd1\xe7\xc8\x1e\xd9\x8a\x56\x0e\x8b\xb8\x05\x46\xd3\x02\x40\xee\x2b\xfd\xe8\x52\xdd\x1e\x79\xc0\x32\x77\xdd\xae\xcf\x16\x65\xd9\xad\x87\x3a\x0b\xd0\xc9\xa9\x88\xf0\xcf\x87\xd6\xe8\x37\x91\x7a\xd9\x24\x15\x7c\x0e\x15\xe2\x0e\x9e\xeb\xca\x60\xc6\x9c\x5d\x4b\xb8\xbf\x9c\xa3\xa3\xee\xb8\x66\xfe\xeb\xa4\xd1\x8a\x4d\xab\xaf\xbf\xfe\x9d\x60\x5f\x43\x0a\x21\xd9\x23\x18\x1f\x03\x4e\x53\x6c\x1d\x44\xb6\xef\x40\x20\xe1\xe9\xd6\x8a\xb0\x36\x88\xaa\xcb\xd7\x07\x90\x27\x4f\x56\xcc\x54\x73\x44\x30\x72\x0a\xb1\x70\xe5\xb9\xd1\x2f\x34\x80\x11\xf1\x66\x77\xa3\xff\x7f\x24\xa0\x80\xa5\x69\xa6\x2a\xd7\x48\xdf\x0f\xd0\xcf\xb9\x60\x6b\x5e\xdc\x43\xa5\x61\x74\xcf\x43\xb9\x82\x97\x52\x8c\xeb\xe1\x85\x57\xb5\xf1\x50\x40\x07\x69\xb9\x59\x51\x29\xe5\x4a\xa7\x31\xab\x98\x06\x5f\xff\x68\xaa\xe6\x55\x6c\x7b\xd6\x82\x05\x61\x6b\x41\xc0\x00\x84\xad\x06\xa6\x12\x1a\x14\x37\x61\x5c\x63\xd6\x23\x6a\x30\x55\xcf\x1c\x36\xd8\xe7\xf0\xbb\x22\x1d\xcc\x39\xf3\xa2\x7c\x05\xf8\xdc\xb8\xba\x37\x2c\x07\x6e\x7b\x50\x72\xae\xa0\xc4\xf7\x88\xfd\x20\x1f\xc4\x88\xdd\xe4\xbc\xb8\x1f\xb1\x33\x0c\xff\xfd\x5e\xcf\xdb\x7c\x78\x5b\x84\x12\x07\xfb\xf1\x9e\xe6\xc6\xda\x45\xf3\xd2\xae\xfd\xff\xbc\x45\x0c\xc0\xba\x62\xdf\xff\x3d\x11\x79\x1d\x5c\x1f\xff\xd3\x3d\x11\x7b\xc2\xd4\x7f\x07\xaf\xfd\x8f\xb4\x8a\x77\xd3\x7c\xfc\x43\xfc\xbf\x4e\x7e\x39\x8d\x0b\x74\x4f\x92\x72\xad\xa8\xb4\xdf\x56\x62\xb3\x4c\x9b\x97\xf2\x76\x7e\x73\xbf\xa3\x40\xe9\xe3\xa9\x4f\x6d\x1f\x00\xba\xa7\x57\xdd\x7c\x9d\x66\xda\x54\xc5\xee\xc3\x7f\x5d\x1f\xb5\xeb\xbd\x85\xe8\x15\x36\xdb\x7a\x2e\x80\xb5\xa0\x2f\xfc\x04\x1f\x9b\xfd\x45\xcf\x67\x80\xb5\x3a\xec\x84\xb7\x35\xe7\xe9\xa3\x75\x52\x1b\x6a\xb8\x21\x6f\x72\x01\x7c\x57\x41\x15\x0d\x01\x81\xc6\x0e\xf3\xae\x91\xa9\x72\x75\x01\x30\x63\xb6\x28\x04\x50\x83\x17\x02\xca\x51\x32\x62\x38\xcc\x36\x91\x46\x14\x59\x3e\x01\x14\x13\x67\xb9\x41\xb2\x2a\xd9\x5b\x73\x21\x94\x9f\xed\x21\xaa\x04\xd0\x60\x37\x66\x9f\xd0\x6e\x8f\xc2\x95\x87\xe8\x28\x9d\xbb\xf5\x5e\x64\x0b\x82\xca\xbd\x14\x65\x24\xcd\x1b\xaa\x45\xed\x68\xd6\x22\x54\xbf\x29\xc4\x7f\x6b\x0c\xba\x41\xce\x55\x73\xa0\xf4\x8a\xe9\x3d\x87\xbf\xfc\x8a\x97\x2b\x34\x68\xd7\xba\x14\x28\x33\x91\x25\x08\xf7\x0b\x7a\x9d\xe7\x99\x9e\x43\x1d\xc8\x72\x07\x83\x64\x42\x47\xbb\xd7\xd4\x6d\x2f\x58\x1f\xc9\x60\xa5\x09\x64\xda\x16\xc2\x00\xe1\xca\x76\x94\xaa\x2f\x3e\x79\x98\xd1\xbd\x3d\x5c\x2b\xf4\xcf\xb6\x8c\xed\xed\xc2\x21\xf6\x58\x03\x58\xf5\xfc\x09\x19\x34\x5b\x65\x58\x88\xaa\x9a\xc2\xc0\xc8\x56\xdb\xf8\x5e\x64\xcb\xd9\x00\x11\x1f\xfc\x12\x5d\x02\x3c\x54\x02\xf3\x78\x50\xaa\x2c\xed\xcf\x1f\xa6\xaf\xb2\x93\x18\x81\x48\x1e\x82\x51\xf0\x65\x82\x31\x30\x82\xac\x46\x55\xca\x42\x30\x05\x28\x84\xa9\x32\xd5\xfc\x28\x10\x93\x58\x2b\xee\x01\xc8\x74\x8c\xc8\x39\x98\x32\xc0\x57\x74\xd4\x72\x0d\xa3\x67\x32\x54\xf4\x71\xf4\x81\x3c\x23\xe1\x0f\xb9\x92\x98\x19\xef\xbf\xdd\xb7\x63\x8d\x35\xb0\xa2\x1d\x5c\x09\x2f\xbb\x5d\xf2\x02\x6a\x8e\x41\x06\xe6\x35\xa2\x28\x7e\xed\x0b\x3c\x8e\x86\xf6\xbd\xba\x21\x9e\x36\x55\xff\xec\xee\x86\x6e\x50\xf1\x80\x9d\x6e\x67\xc6\x5e\x51\x9d\x60\xe7\xda\xd8\x9c\x09\x19\x29\x81\xdd\x83\xda\xda\xf2\x6d\xad\x72\x87\x6b\x89\x0b\xcf\x68\x4a\x97\x85\x5f\x1f\xa4\x89\xc8\xde\xa1\xb7\x1b\x21\xd8\x9b\x42\x2c\xde\x7c\x2c\xc4\x62\xe6\x56\x7a\x0c\x1f\x34\xb6\x5f\xb4\x4d\xf9\xde\x73\x73\x98\x5c\xab\x76\xf2\xc3\x3d\xd4\xa8\x8d\x4f\xc2\x76\xa2\x6f\x92\x0b\x16\x6a\xf0\xda\xef\x01\x06\x08\x91\x36\xb9\xe8\xb7\x46\xf6\xc5\xaf\xb9\x2e\x24\x58\x0f\xa8\x55\x47\xa9\xd6\xff\xf9\xd7\x5b\x6d\xce\xfa\x5c\x6f\xb7\x75\xc8\x8c\x13\xf6\x5c\xf9\x0b\xaf\x1b\x17\xfa\x65\xd1\xe9\xb0\x80\x26\xe7\x8f\x8a\x78\x6c\x06\xb9\x9e\xfa\x5d\x6b\x0d\x00\x51\x74\xad\x6d\x61\xe0\xc2\x29\x53\xce\xd3\x27\x7d\xb5\xcf\x11\x0b\x16\x34\xcf\xb2\xb8\xa2\x46\x88\xb4\x4d\x55\xc8\x4b\xb5\x5a\x6b\x96\x39\x17\x5e\x4d\xdf\xf0\x65\x99\x4d\xc9\x4b\x31\x72\xa4\x2b\x44\x57\x48\xf1\xb0\xa3\x39\x87\x02\xdc\xbe\xd2\xdb\xbe\xd3\xfc\x5c\x46\xe4\x6f\x2c\x2f\x7a\x4f\xe4\x19\xbb\x9d\xdd\x8b\x2d\x38\xf3\xde\xb1\xb6\x47\x3a\x22\x4a\x09\x38\xcc\x4e\xca\x26\xbc\x28\x1c\xca\x9f\x7a\x65\x8e\xee\x3c\xb6\x4a\x3a\xc6\xb9\x12\xc9\x7d\xae\xa5\x1a\x2c\x8b\x6a\x14\x17\xb0\xd9\x4b\x16\x5a\xf3\xd6\x61\xaf\xcb\xb1\xa6\x4f\xe2\x87\x18\x80\x57\x38\x68\x68\x20\x63\xe3\xcc\xd7\xf3\xee\xde\x76\xcf\xed\xbf\x10\xe1\x6e\xf8\x0c\xbe\xd8\x96\xf8\x50\xe3\x56\xe1\x2d\x8e\x9d\x1a\x13\x28\xdf\xca\xfe\xea\x39\xd9\x9c\xd5\x28\x0c\x5b\xa7\x14\x5c\x90\x7f\xf7\x0c\xfd\xdd\x33\xf4\xdf\xdc\x33\xf4\x25\xdd\x42\x80\x8d\xf9\x9c\x3e\xa1\x1d\x01\xf2\x03\x8e\xa3\xef\x75\x70\x8e\x63\xab\x76\x3c\x8a\x4a\x93\x47\x99\x8e\xdb\x40\x7f\x47\x84\x61\xe7\x67\xce\x93\x7b\xa1\x3a\x63\xf4\x8e\xbe\xa8\xb3\x4a\xec\xf3\x22\x58\xda\xd8\x97\xa2\xb7\x77\x43\x59\x02\xd4\x89\x48\x83\xdb\x08\x41\xec\x39\x01\xdd\xd3\x7e\xf8\x11\x80\xc6\x74\xe1\x89\xad\x0d\x65\xe1\x61\x30\x12\x69\x92\x10\x2c\xd5\xa0\x82\xee\x8b\x89\x73\x1d\xcf\x72\xad\xb3\x56\x68\xdc\xb3\x4e\xe0\x56\xa2\x4c\xdf\xc9\x9b\xa0\x32\x6a\x62\xc0\x98\x9b\xc5\x90\x74\x11\x52\x34\x30\x1f\x03\x2a\x71\xc0\x6e\x4a\x2b\xc8\xa5\x0c\xd3\x11\x95\x57\xe4\xde\xe1\x42\x18\xb1\xb9\x48\x38\x94\xa7\x75\xe0\xbd\x84\xfb\xec\x93\x98\x14\x69\x2b\x1d\xc4\x6c\xf7\xd3\x11\xb5\x84\x76\x67\xb2\xad\xec\xc6\xd0\xc3\xd5\xd0\x10\x1c\xb4\x1c\x47\xee\x90\x24\x8e\x76\x71\x5f\xd9\x65\xc7\x31\x3d\x83\xea\x8b\xfd\x6e\xb8\x56\xb9\x33\xa1\x86\x4e\xa1\x9d\xfe\x82\xf4\x07\x48\xc7\x59\xf7\x44\xee\x4c\xd5\x89\xaf\xc6\x1b\xb0\x5f\x1e\xb9\x87\xe1\x52\xc4\x2c\x6e\x2d\x0d\x72\x39\x06\xcb\x65\xc4\x4c\x95\xac\x80\xad\xb2\x2e\xa7\x62\xb9\xb5\x7d\x62\x47\x53\x65\x0d\x22\x70\xb5\xac\x39\xe4\xc5\x3f\x5a\x65\xd5\xc8\xbf\x0a\x0f\xcf\x22\xf2\xae\x18\x91\x85\x86\x93\x56\xad\xe8\x35\x47\x1c\x8a\x00\x8b\x80\x29\xa9\xf2\x94\x97\x62\x3c\x0d\x68\x1b\x89\x9e\x4e\x87\xf2\x20\x95\xd9\xc4\x1f\x16\xe3\x18\x1b\x92\x36\x93\x0b\x91\x6c\x92\xad\x2a\x44\xbb\x69\x22\xfe\x6e\xb6\xfd\xb6\xcc\x36\x64\xd9\xc5\x9c\xc1\x21\x53\x4b\x43\xbd\x0e\xaf\x1f\x36\xb9\x82\x45\x23\x31\x03\xe6\xf9\x0b\x9a\x9d\x2d\x3a\xf0\x30\x7d\xbe\xb7\x1d\xb4\xfb\x3a\x0b\x86\x6d\xb8\xac\x23\x0a\x84\x2d\xb5\x30\x0e\x2e\x96\xf1\xd6\xb1\x0a\x6d\xef\xb0\x7e\x37\xcb\xcc\x6f\x0a\x9c\xd4\xc7\x70\xb5\x1a\xb7\x87\x2b\x5d\x3a\x4d\x5b\x09\xbc\xef\x76\x68\xdc\x11\xab\x3b\x2f\x5f\x18\x3f\xeb\x75\x09\xe8\xb0\xff\x27\x6a\x73\x50\x02\xe6\x26\x17\xb3\xaa\xc8\x0e\x82\x1b\xdf\x5d\x5f\x1c\x7b\x6d\x03\x34\xe7\xce\xba\x47\x65\xa3\x34\xb4\xab\x49\x2c\x52\x82\x83\x26\x3a\x63\xf3\x6a\xb1\x80\xfa\x25\x04\x0c\x75\xc2\x08\xea\xe7\x57\xa6\x74\xf7\x09\x32\xcd\x70\x53\x4e\x95\x56\x82\x4d\xbf\x3a\x9e\x7e\x65\xaf\xb2\x82\x27\xa5\x28\x90\x64\x20\xe3\xa6\x64\x46\x2c\x41\xd5\xa2\x4e\xef\xae\x2f\x20\x2b\xb1\x5c\x61\x73\xde\x64\xc5\x7c\x4f\xe4\x7c\x86\x5a\x3f\x40\x50\xad\xa2\x8a\x5b\x30\xf6\x97\xdc\x30\xa9\xa6\xea\xa3\x6d\xe2\x78\xa9\xf5\x32\x13\x63\xb7\x20\xe3\x33\x72\x3d\x7e\x7c\x85\x23\x80\xd7\x63\x58\xbf\xbd\x10\xb9\xd2\x4a\x26\x3c\x83\x84\x9c\xa9\x02\xad\x79\x64\x3f\x06\x5c\xa3\xd3\xaf\xc6\xd3\xaf\x18\x84\x4f\x4b\xc6\x93\x44\xe4\xa5\x48\xb1\xb4\xe9\x44\xb1\x1c\xf0\x8b\x89\x18\xb1\x52\xf0\xb5\x71\x94\xce\x2c\xb7\x36\x26\x98\x86\x4c\x2a\x42\x3a\xcd\xa5\xe2\xc5\x06\xc1\x4c\x58\xac\x9c\x92\x3f\x36\x53\x25\x3e\x01\xfd\xa7\x04\x06\xd0\xca\x78\x5a\x1a\x2a\x4c\x60\x3f\xf9\x44\x6d\xc6\xec\x07\x64\x68\x40\x0a\xd4\xbb\xeb\x0b\x47\x6f\x44\x39\xa0\x53\x65\x92\x95\x58\x0b\xf6\x71\x55\x96\xf9\xc7\x11\xfe\xaf\xf9\x08\x11\x47\xa5\x19\xfe\x3a\x62\x76\x89\xac\xa2\xea\xf0\xf2\xd9\x06\x6a\xc8\x56\x39\x15\x9c\x9f\x2a\xe0\x62\x2f\x62\x74\xaf\x9d\x6d\xe8\x31\x32\xc1\x6b\xb8\x70\x2b\xc5\xa1\xb8\xe3\x1b\x3b\x39\xff\x8b\x4d\x16\xa1\x4b\x3b\x81\xae\xb6\x98\x1f\x15\x28\x24\x06\x52\xb6\xc6\xf6\x85\x13\xc5\x7e\xb8\xbd\xbd\x62\xdf\x9f\xdf\x3a\x65\xe7\xee\xfa\x02\xf7\x05\xd0\xa9\x30\xce\xfe\xd4\x5c\xe2\xdb\x4d\x2e\xfe\xfc\xa7\x3f\x4f\x15\x73\x35\xca\x95\x9b\x69\x3c\xd1\x23\xa4\x84\x05\xbc\x13\x04\x66\x81\xca\x19\xfa\xc3\x92\x3b\x34\xfc\x02\xb5\xf3\x47\xf2\x16\xc0\x1d\x95\x69\x7d\x5f\xe5\xde\xcd\x1d\xeb\x61\xb6\xc3\xbb\xeb\x0b\x68\x1d\xe8\x94\xca\x15\xd4\x4f\x13\xde\xfb\x02\x0b\xcf\xdd\x60\xec\x7f\x3f\x68\x99\x32\xae\x36\xf6\x5d\x6c\x1a\xb6\x65\x21\x16\xba\x10\x23\xf7\xa4\x6d\x80\x97\x72\x2e\x33\x59\x6e\x40\x4a\xb9\xba\xf6\xb9\xe3\xc8\xb7\x0d\x58\x6b\x86\x00\xde\x76\x83\x61\x19\xdb\x97\x77\x26\x46\x80\xc3\xa2\xf9\xda\x88\x68\xe8\xd8\x77\xe7\x85\xe0\xf7\x76\x77\x53\x0b\xe3\x57\x54\x33\x56\xbc\xc1\x3b\x66\x51\xa9\x04\xb7\x86\x1d\x03\xed\x7e\xb2\x9c\xb2\x0d\xe3\x0f\x5c\x62\x4d\x59\x17\x2e\x5f\x2c\x64\x22\x79\x46\x92\x63\x5e\x2d\xa0\x6c\x0c\x37\x54\xb2\x08\xc1\x87\xb6\x11\xb0\x32\x5c\xc1\x7e\xdc\x50\x73\xb1\x94\x08\x38\x7e\x94\xe5\x0a\xf3\x0a\xc6\xb8\xce\x3c\x97\x66\x9c\xe8\x35\x9c\xb7\x1b\xd8\x4a\x86\x8c\x5e\xc0\x81\x37\xf6\x39\x7b\xe9\xa0\x76\xeb\xbc\xdc\xd0\xde\x7b\xc5\xd6\x72\xb9\x2a\xa1\x90\x0b\xf4\x0e\x90\x08\xb9\xce\x33\x30\xfa\x28\xc2\xe8\xf0\xbe\x46\xac\xb9\x2a\x65\xd2\x15\x53\x6a\x2d\x09\xde\x0f\xe3\x39\xdf\x94\xbb\xfd\x78\xef\x89\x67\x9f\x23\x85\x7e\x24\x91\x59\x53\x20\x93\x0c\x84\xf2\x32\x11\x81\x7f\xb3\xe4\xec\x3e\x13\xea\xe3\x89\xda\x7c\x0c\x24\xa4\x5c\x45\xb5\xaf\x76\xf4\xee\xce\x3f\xcf\x34\xad\x1a\xe3\x53\x05\xa8\x4e\x2b\x30\xa8\x18\xed\xce\x3b\xc6\x5f\x29\x76\x65\xaf\xdc\xa6\xc9\xe4\x1c\xfa\x26\x59\x61\x98\xa9\x72\xc8\x27\x28\x35\xcb\x79\x72\x7f\x5c\x29\xfb\x3f\x56\x18\xe2\x71\x37\x31\x39\xd1\x54\xe9\x05\xab\x4a\x3c\x38\x6e\x0b\x83\x53\x24\x72\x05\x04\x03\x6d\x2d\xca\x95\x4e\x7d\x5e\x98\x6d\x13\xe6\xcf\x8e\xe8\x9c\xe8\xa5\x5f\xbf\x61\x57\xb6\x43\xbb\x89\xa9\x6f\xee\x3f\x5f\x2a\x76\xfa\xcf\xff\x0c\xcf\xdb\xc9\x7d\xa7\x35\x5b\x68\xcd\xbe\x63\xe3\xf1\xf8\xdf\xf1\x6f\xb6\x51\xae\x36\xf4\x2f\xae\x36\x63\xdb\xdc\xbb\x42\xaf\x5f\x2e\xb4\x7e\x45\x7f\x87\xa2\xcd\xf6\x3f\xe4\x82\xbd\xb4\x0f\xdd\x41\x57\xb7\xfa\xe5\xb4\xfa\xfa\xeb\x6f\xfe\xd5\x3e\xfa\x8a\xfd\x27\x3e\x13\x3d\xfe\xb7\x78\xa8\xdf\xec\x19\xea\xef\xf9\x03\xef\x33\x56\xf6\x1d\xdc\x35\xb6\x81\x9d\x63\x94\xe6\xe5\x3b\xad\xc7\x60\xfd\xc7\xa3\xc3\x66\xed\x13\x38\x8a\xe8\xa9\x7f\x8f\x86\xcd\xdc\xb8\x7f\xb7\x67\xdc\x88\xaa\xf7\x23\xc7\xe6\xdf\x69\xfd\x72\x3c\xb6\x72\x8b\xe6\x15\x47\xfd\xf2\x55\x7d\xa2\xe1\x03\xb6\xc7\x6f\x7f\x9e\xe0\xf0\xcf\xce\x6f\x4e\xaf\x27\x57\xb7\x1f\xae\x5f\xbd\x71\x5f\x10\x56\x20\x7a\x9f\xb9\xd2\xda\x7e\xe0\xff\x7b\xcf\xc0\xbf\xd7\x6e\xcc\x30\xe8\x37\xdf\x31\x5c\xcd\x7c\x3e\x7e\xa7\xf5\x7f\x8e\xc7\xe3\xbf\xd1\xcf\x5c\x6d\x46\xf6\x62\xb2\xcf\xe4\x28\xca\xdf\xf3\xc2\xac\x78\x66\xbf\x29\x1a\x83\xff\x88\xd6\x16\x5d\x73\x72\xd1\x68\xec\x4e\xad\x43\x73\xd0\x19\x2c\x2c\x3c\xf5\x8f\xdf\x31\x25\xb3\xb0\x7c\x51\x1f\xb0\x4e\xb7\x40\x2d\x91\xdc\xfb\xe3\xe2\x6b\x84\xce\x37\x2c\x6f\x1e\x5c\xcc\x3b\xdb\xb8\x0a\x05\x56\xdc\x4f\xd5\x8b\x16\x89\x7e\x6c\x55\xbb\x31\xfc\x60\x2f\xa8\x17\xae\x7a\xbc\xbb\x16\x7c\x65\x2d\x9c\x59\x08\x44\xe3\x69\x55\x94\xa3\xd6\xa6\x1f\xfa\x0b\x2f\x22\xab\x02\xb5\xf3\xc5\xf1\x0b\x4a\x14\x0a\x5d\xd4\x89\xe4\xa7\x5f\x2d\xb4\x1e\xcf\x79\x01\xa3\xfb\x74\xbc\x19\xff\x75\xfa\x15\x7e\x0f\x2a\x1f\xa8\x18\x41\xe3\xd3\xaf\xe0\x57\xd8\x0e\x53\xf5\xfb\x9b\x0f\x97\x53\xf5\xdd\x77\xdf\x7d\x87\xb3\x65\xff\xdd\x12\x7b\xb1\xd7\x15\x88\x5b\xd4\x53\x2a\xe3\x4a\x4a\x8a\x65\x95\xf1\x62\xaa\xda\xc3\x35\xa9\x08\x42\x73\x14\x82\x37\xb4\xcf\x46\xae\xba\x05\x14\x29\x73\x32\x0e\x7d\x93\x1f\xff\x7f\x3b\xe4\x8f\xa4\x22\x7a\x21\x1f\x4f\xc1\xd8\x6d\xe6\x37\x6e\xab\xda\xc9\xb6\xfb\x37\xe8\x59\x0b\x99\x09\x3a\xb8\x6e\x73\x5f\x89\xc2\x68\x15\xf6\x0c\x19\x04\xc0\x6d\x06\x01\x00\xf6\x1d\x7b\xfd\xef\x8d\x5f\xed\x3a\xb8\x1f\xbf\xa9\x49\x02\xc6\x42\x53\xd3\xaf\x60\xd4\xd3\xaf\xde\xb0\xe9\x57\x6d\xfb\xa6\x3e\xb0\x31\x0e\x65\xfa\xd5\x28\x34\x00\xc3\xb8\xe4\x6b\x6c\xa4\xfa\xfa\xeb\xdf\x25\x38\x04\x4c\x5d\x8b\x9e\xb4\x43\xea\x7e\x30\x1a\xe2\xa4\x11\x3a\x73\x13\xe1\x52\x20\x1f\x45\x96\x1d\xdd\x2b\xfd\x88\x75\xc6\x21\x4e\x44\x59\xca\x0c\xb7\x47\x7d\x71\xa9\x36\x59\x63\xc5\x5d\xd2\xa6\xef\xc6\x97\xb7\x83\x05\x9d\xaa\x8f\xb0\x75\xdc\x8a\x12\x1d\x11\xd0\x81\xfa\x9e\xc0\xa8\xa1\x9d\xe0\x72\x2c\x68\x23\x4c\x15\x34\xe3\xd7\x9c\xbd\x04\xe0\x17\x7d\xca\x96\x66\xed\x8c\xa7\x3f\xff\xe9\xcf\xaf\xde\x1c\xb2\x4e\xf5\xe6\x6a\x4b\x05\xdf\x83\x6d\xbc\x1e\x7f\xf3\xfa\x1b\x33\xfd\x8a\x66\xbd\xdd\xc4\xbe\x90\xa6\xfc\xa9\xa1\x81\x3d\xa1\xd8\xb9\x55\x1c\x3e\x57\xf0\xc2\x0d\x15\x87\xd9\x37\x68\x71\x5d\x0f\x2b\xe8\x85\x73\xeb\x80\x71\xe6\xca\xc0\xdb\x71\x0f\x52\xef\xfc\x7c\xa1\xb1\xc5\x1e\x0b\x9e\xe7\xa2\x70\xbe\xf2\xad\x70\x06\xd4\x54\x87\x5e\x9c\xe8\x6f\x13\x66\x76\xdb\x34\x9a\x86\xc7\x60\xea\xc6\xed\x2b\x77\x59\x65\x59\xe7\xca\xed\x2f\x96\x7c\x79\x77\x71\x31\xfb\xe9\xe4\xe2\xee\xdc\x7d\x7e\x6b\xf1\xe1\xe8\xb1\xce\x39\xf1\x23\xa1\x39\x41\x5c\x55\x09\x58\xaa\x6a\x2d\x0a\xc7\x14\x16\xbe\x1a\x71\x24\x55\x96\xd5\xcb\x62\x4f\xd5\x47\x6a\x07\xc4\x40\xa5\xa4\x53\x53\x76\x4e\x5c\xbd\x7f\x78\xec\xa3\x6d\xfc\x23\xbe\x7b\xc4\xc2\x47\xbc\x61\x97\xbe\xd7\x8e\x79\x25\xc2\x89\x03\x8e\x03\xe6\xdb\x76\x1d\x87\xe7\x2e\xfc\xff\xb4\xe3\x71\xa7\xa0\xe8\x97\x95\xbc\x58\xaf\xff\x59\x4e\x07\xce\xdd\xc7\x3a\x14\xdc\xbb\x4b\x53\x8c\x1a\x42\xbb\x23\x2c\xd7\x6e\x4a\xe2\x2c\xc6\x39\x9b\x2a\x14\xc4\x76\x4c\xa5\xee\x1e\x13\x9b\x50\x04\x29\xe3\x6a\x59\xf1\xa5\x30\x23\xe6\x3a\x9f\x2a\x67\x9d\x3a\x5b\xc7\x03\x73\x80\x91\xb5\xb1\x85\x1a\x29\xc0\x52\x4d\x15\x7d\x13\xdc\xb0\xd4\x3c\xa6\xa3\xfe\xfe\xc6\x7f\x0e\xe5\x7d\x63\x43\x54\x71\x5e\x4d\x15\x2e\x2e\xfa\xc6\x1c\xd8\x10\xd4\x8e\xed\xbb\x89\x03\x3c\x18\xed\xba\x94\x95\x7a\x09\xb0\xc7\xa9\xf2\x2c\x58\x08\xce\x70\xf6\x5a\xa8\x0d\x8a\x43\xda\x2f\x4f\xdc\x62\xb8\x33\x41\x63\x6b\xdf\xf5\x07\xdf\x01\xf6\xc0\xcd\x5a\x6d\xf9\xdd\xdb\x36\x88\xb1\x9e\x80\x1c\x1e\x09\x8e\x2e\x6a\x44\xa0\x3e\x6b\x1f\x8d\xfb\x2e\x7c\xa6\x33\x7b\x54\x57\xf3\x6c\xc0\x90\xf0\xf9\x9d\x83\x42\x91\xbc\x7b\x50\x3d\x3c\xd2\xd7\x8d\xa3\x65\xb7\xe9\xae\x6e\xe7\x5a\x77\xac\xcb\x33\x62\x76\x6b\x83\xa2\x17\xf6\x4d\x46\x95\x94\x4f\xd9\x2f\x3d\xf8\x80\x9a\x53\xe4\xa4\xcf\xae\x01\x65\xd2\x3c\x69\x38\x41\x7f\xea\x3d\x22\xaf\x21\xd0\x65\x37\x48\xc2\xd2\x3d\x57\x13\xb0\x1d\x62\xd2\x99\x29\x98\xde\x22\x24\x8a\x17\x7b\x78\x46\x70\x88\xec\xfe\x1f\xf9\x4d\x34\x0a\x2b\x37\x82\x41\x26\x55\x61\xac\xb8\x24\x79\x47\x52\x5b\x17\x8c\x4f\x95\x63\x83\x71\xe2\xf8\xc4\xf9\x83\x0b\xff\x57\xe4\x58\xca\xb1\x64\x1d\x04\x85\x4a\xf0\x92\x93\x34\x9c\xaa\x07\x5e\x48\xae\x00\xd3\x3c\x37\x50\x6f\x18\x4c\xba\x0d\xa3\x1f\x3c\x01\x87\x89\x9d\xcc\x7b\x64\x5e\x43\x0d\xa8\xdd\xf3\xff\x60\xff\xef\x6f\xff\xf0\x7f\x03\x00\x00\xff\xff\x9e\xd5\x65\xf4\xf7\xfd\x06\x00") func adminSwaggerJsonBytes() ([]byte, error) { return bindataRead( @@ -93,7 +93,7 @@ func adminSwaggerJson() (*asset, error) { return nil, err } - info := bindataFileInfo{name: "admin.swagger.json", size: 317765, mode: os.FileMode(420), modTime: time.Unix(1562572800, 0)} + info := bindataFileInfo{name: "admin.swagger.json", size: 458231, mode: os.FileMode(420), modTime: time.Unix(1562572800, 0)} a := &asset{bytes: bytes, info: info} return a, nil } diff --git a/flyteidl/gen/pb-go/flyteidl/service/signal.pb.go b/flyteidl/gen/pb-go/flyteidl/service/signal.pb.go index 02d6a8c7be..3ad109ae7d 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/signal.pb.go +++ b/flyteidl/gen/pb-go/flyteidl/service/signal.pb.go @@ -29,27 +29,30 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package func init() { proto.RegisterFile("flyteidl/service/signal.proto", fileDescriptor_ca211d25a1023377) } var fileDescriptor_ca211d25a1023377 = []byte{ - // 317 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xc1, 0x4a, 0xf4, 0x30, - 0x14, 0x85, 0xff, 0xf9, 0x05, 0xc1, 0x8a, 0xa8, 0x41, 0x5c, 0x54, 0x05, 0xa7, 0x2b, 0x11, 0x6c, - 0x50, 0x77, 0x82, 0x08, 0xba, 0x70, 0x23, 0x08, 0x76, 0x21, 0xb8, 0x19, 0x32, 0xed, 0x9d, 0x18, - 0x27, 0xcd, 0xad, 0x49, 0x66, 0x46, 0x91, 0xd9, 0xf8, 0x0a, 0xee, 0x7c, 0x1b, 0x9f, 0xc1, 0x57, - 0xf0, 0x41, 0xa4, 0x49, 0xa7, 0xe8, 0xd0, 0xba, 0x0b, 0xe7, 0x7c, 0x9c, 0xdc, 0x93, 0x9b, 0x60, - 0x67, 0x20, 0x9f, 0x2d, 0x88, 0x4c, 0x52, 0x03, 0x7a, 0x2c, 0x52, 0xa0, 0x46, 0x70, 0xc5, 0x64, - 0x5c, 0x68, 0xb4, 0x48, 0xd6, 0x66, 0x76, 0x5c, 0xd9, 0xe1, 0x36, 0x47, 0xe4, 0x12, 0x28, 0x2b, - 0x04, 0x65, 0x4a, 0xa1, 0x65, 0x56, 0xa0, 0x32, 0x9e, 0x0f, 0xb7, 0xea, 0x38, 0x96, 0xe5, 0x42, - 0xfd, 0x0a, 0x3b, 0x7a, 0x5f, 0x08, 0x56, 0x12, 0x27, 0x24, 0x3e, 0x8c, 0xdc, 0x06, 0xeb, 0x97, - 0x60, 0xaf, 0xf5, 0x85, 0x06, 0x66, 0xc1, 0x7b, 0x64, 0x2f, 0xae, 0x2f, 0x75, 0x21, 0xb1, 0xd7, - 0x7f, 0x80, 0x37, 0xf0, 0x38, 0x02, 0x63, 0xc3, 0xcd, 0x66, 0x32, 0xfa, 0x47, 0x3e, 0x3a, 0xc1, - 0xf2, 0x95, 0x30, 0xd6, 0x0b, 0x86, 0x74, 0x9b, 0xc9, 0x12, 0x99, 0x85, 0x85, 0xed, 0x48, 0x64, - 0x5e, 0x3f, 0xbf, 0xde, 0xfe, 0xe7, 0x64, 0xe8, 0x6a, 0x8f, 0x0f, 0xab, 0x5e, 0x86, 0xbe, 0x4c, - 0x50, 0x0f, 0x07, 0x12, 0x27, 0x3d, 0x78, 0x82, 0x74, 0x54, 0x3e, 0x45, 0x4f, 0x64, 0x65, 0xdf, - 0x07, 0x48, 0xed, 0xb4, 0xcd, 0xcf, 0x30, 0x67, 0x42, 0xb5, 0xda, 0x8a, 0xe5, 0x30, 0x25, 0x32, - 0x58, 0x4a, 0xa0, 0x6a, 0x40, 0x76, 0x9b, 0xa7, 0x4b, 0xa0, 0x9e, 0xbf, 0xfb, 0x07, 0x61, 0x0a, - 0x54, 0x06, 0xa2, 0xd0, 0xd5, 0xd8, 0x88, 0x56, 0xe7, 0x6a, 0x9c, 0x74, 0xf6, 0xcf, 0xcf, 0xee, - 0x4e, 0xb9, 0xb0, 0xf7, 0xa3, 0x7e, 0x9c, 0x62, 0x4e, 0x5d, 0x14, 0x6a, 0xee, 0x0f, 0xb4, 0xde, - 0x2a, 0x07, 0x45, 0x8b, 0xfe, 0x01, 0x47, 0x3a, 0xff, 0x6f, 0xfa, 0x8b, 0x6e, 0xc9, 0xc7, 0xdf, - 0x01, 0x00, 0x00, 0xff, 0xff, 0xa0, 0x03, 0x65, 0xd6, 0x52, 0x02, 0x00, 0x00, + // 356 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x92, 0x41, 0x4b, 0xeb, 0x40, + 0x14, 0x85, 0x5f, 0xca, 0xe3, 0xc1, 0x8b, 0x88, 0x3a, 0x0b, 0x17, 0x51, 0xc1, 0x66, 0xa5, 0x05, + 0x33, 0xa8, 0xbb, 0x82, 0x08, 0x2a, 0x08, 0x22, 0x08, 0x66, 0x21, 0x74, 0x53, 0xa6, 0xc9, 0xed, + 0x38, 0x36, 0x99, 0x1b, 0x33, 0xd3, 0x56, 0x91, 0x6e, 0x04, 0x57, 0x82, 0x1b, 0xb7, 0xfe, 0x2b, + 0x5d, 0xbb, 0xf2, 0x77, 0x88, 0x64, 0xd2, 0x06, 0xad, 0x89, 0x3b, 0x77, 0xe1, 0x9e, 0x2f, 0xe7, + 0xce, 0x39, 0x5c, 0x7b, 0xa5, 0x1b, 0x5d, 0x6b, 0x10, 0x61, 0x44, 0x15, 0xa4, 0x03, 0x11, 0x00, + 0x55, 0x82, 0x4b, 0x16, 0x79, 0x49, 0x8a, 0x1a, 0xc9, 0xfc, 0x44, 0xf6, 0xc6, 0xb2, 0xb3, 0xcc, + 0x11, 0x79, 0x04, 0x94, 0x25, 0x82, 0x32, 0x29, 0x51, 0x33, 0x2d, 0x50, 0xaa, 0x9c, 0x77, 0x96, + 0x0a, 0x3b, 0x16, 0xc6, 0x42, 0x7e, 0x31, 0xdb, 0x7a, 0xfd, 0x6b, 0xcf, 0xfa, 0x66, 0xe0, 0xe7, + 0x66, 0xe4, 0xcc, 0x5e, 0x38, 0x04, 0x7d, 0x92, 0xee, 0xa7, 0xc0, 0x34, 0xe4, 0x1a, 0x59, 0xf3, + 0x8a, 0xa5, 0xc6, 0xc4, 0xcb, 0xe7, 0x9f, 0xc0, 0x53, 0xb8, 0xec, 0x83, 0xd2, 0xce, 0x62, 0x39, + 0xe9, 0xfe, 0x21, 0x2f, 0x35, 0x7b, 0xe6, 0x58, 0x28, 0x9d, 0x0f, 0x14, 0xa9, 0x97, 0x93, 0x19, + 0x32, 0x31, 0x73, 0xaa, 0x11, 0xf7, 0xae, 0x76, 0xfb, 0xfc, 0xf6, 0x58, 0x7b, 0xb7, 0x48, 0xcf, + 0x04, 0x1f, 0x6c, 0x8e, 0x93, 0x29, 0x7a, 0x33, 0xc4, 0xb4, 0xd7, 0x8d, 0x70, 0xd8, 0x86, 0x2b, + 0x08, 0xfa, 0x59, 0x19, 0x6d, 0x11, 0x66, 0x89, 0x2f, 0x20, 0xd0, 0xa3, 0x2a, 0x3d, 0xc4, 0x98, + 0x09, 0x59, 0x29, 0x4b, 0x16, 0xc3, 0xa8, 0xf5, 0x60, 0x91, 0x7b, 0x6b, 0x7a, 0x21, 0xa6, 0xbc, + 0xea, 0x2f, 0x4c, 0xf9, 0xe8, 0x37, 0x1f, 0x44, 0x9e, 0x2c, 0xfb, 0xbf, 0x0f, 0xe3, 0x56, 0xc9, + 0x6a, 0x79, 0x63, 0x3e, 0x14, 0x9d, 0xd6, 0x7f, 0x20, 0x54, 0x82, 0x52, 0x81, 0x7b, 0x64, 0x9a, + 0x3d, 0x70, 0xe7, 0xa6, 0x72, 0x36, 0xad, 0x46, 0xcb, 0x73, 0xd7, 0x4b, 0xd3, 0x8b, 0xd0, 0xfb, + 0x96, 0xbb, 0x69, 0x35, 0xf6, 0x76, 0x5b, 0x3b, 0x5c, 0xe8, 0xf3, 0x7e, 0xc7, 0x0b, 0x30, 0xa6, + 0x66, 0x75, 0xc6, 0x9b, 0x0f, 0x5a, 0x5c, 0x26, 0x07, 0x49, 0x93, 0xce, 0x06, 0x47, 0x3a, 0x7d, + 0xfb, 0x9d, 0x7f, 0xe6, 0x50, 0xb7, 0x3f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x52, 0xf5, 0x8e, 0x13, + 0x16, 0x03, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. diff --git a/flyteidl/gen/pb-go/flyteidl/service/signal.pb.gw.go b/flyteidl/gen/pb-go/flyteidl/service/signal.pb.gw.go index 43397bb36e..e0ba6b6ef4 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/signal.pb.gw.go +++ b/flyteidl/gen/pb-go/flyteidl/service/signal.pb.gw.go @@ -89,6 +89,77 @@ func request_SignalService_ListSignals_0(ctx context.Context, marshaler runtime. } +var ( + filter_SignalService_ListSignals_1 = &utilities.DoubleArray{Encoding: map[string]int{"workflow_execution_id": 0, "org": 1, "project": 2, "domain": 3, "name": 4}, Base: []int{1, 1, 1, 2, 3, 4, 0, 0, 0, 0}, Check: []int{0, 1, 2, 2, 2, 2, 3, 4, 5, 6}} +) + +func request_SignalService_ListSignals_1(ctx context.Context, marshaler runtime.Marshaler, client SignalServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.SignalListRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["workflow_execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.org", err) + } + + val, ok = pathParams["workflow_execution_id.project"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.project") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.project", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.project", err) + } + + val, ok = pathParams["workflow_execution_id.domain"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.domain") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.domain", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.domain", err) + } + + val, ok = pathParams["workflow_execution_id.name"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "workflow_execution_id.name") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "workflow_execution_id.name", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "workflow_execution_id.name", err) + } + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_SignalService_ListSignals_1); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListSignals(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + func request_SignalService_SetSignal_0(ctx context.Context, marshaler runtime.Marshaler, client SignalServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq admin.SignalSetRequest var metadata runtime.ServerMetadata @@ -106,6 +177,41 @@ func request_SignalService_SetSignal_0(ctx context.Context, marshaler runtime.Ma } +func request_SignalService_SetSignal_1(ctx context.Context, marshaler runtime.Marshaler, client SignalServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq admin.SignalSetRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id.execution_id.org"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id.execution_id.org") + } + + err = runtime.PopulateFieldFromPath(&protoReq, "id.execution_id.org", val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id.execution_id.org", err) + } + + msg, err := client.SetSignal(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + // RegisterSignalServiceHandlerFromEndpoint is same as RegisterSignalServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterSignalServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { @@ -164,6 +270,26 @@ func RegisterSignalServiceHandlerClient(ctx context.Context, mux *runtime.ServeM }) + mux.Handle("GET", pattern_SignalService_ListSignals_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_SignalService_ListSignals_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_SignalService_ListSignals_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + mux.Handle("POST", pattern_SignalService_SetSignal_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { ctx, cancel := context.WithCancel(req.Context()) defer cancel() @@ -184,17 +310,45 @@ func RegisterSignalServiceHandlerClient(ctx context.Context, mux *runtime.ServeM }) + mux.Handle("POST", pattern_SignalService_SetSignal_1, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_SignalService_SetSignal_1(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_SignalService_SetSignal_1(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } var ( pattern_SignalService_ListSignals_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"api", "v1", "signals", "workflow_execution_id.project", "workflow_execution_id.domain", "workflow_execution_id.name"}, "")) + pattern_SignalService_ListSignals_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5, 1, 0, 4, 1, 5, 6, 1, 0, 4, 1, 5, 7}, []string{"api", "v1", "signals", "org", "workflow_execution_id.org", "workflow_execution_id.project", "workflow_execution_id.domain", "workflow_execution_id.name"}, "")) + pattern_SignalService_SetSignal_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"api", "v1", "signals"}, "")) + + pattern_SignalService_SetSignal_1 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"api", "v1", "signals", "org", "id.execution_id.org"}, "")) ) var ( forward_SignalService_ListSignals_0 = runtime.ForwardResponseMessage + forward_SignalService_ListSignals_1 = runtime.ForwardResponseMessage + forward_SignalService_SetSignal_0 = runtime.ForwardResponseMessage + + forward_SignalService_SetSignal_1 = runtime.ForwardResponseMessage ) diff --git a/flyteidl/gen/pb-go/flyteidl/service/signal.swagger.json b/flyteidl/gen/pb-go/flyteidl/service/signal.swagger.json index 72ffbd861b..39b3f3e777 100644 --- a/flyteidl/gen/pb-go/flyteidl/service/signal.swagger.json +++ b/flyteidl/gen/pb-go/flyteidl/service/signal.swagger.json @@ -42,6 +42,128 @@ ] } }, + "/api/v1/signals/org/{id.execution_id.org}": { + "post": { + "summary": "Sets the value on a :ref:`ref_flyteidl.admin.Signal` definition", + "operationId": "SetSignal2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminSignalSetResponse" + } + } + }, + "parameters": [ + { + "name": "id.execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/adminSignalSetRequest" + } + } + ], + "tags": [ + "SignalService" + ] + } + }, + "/api/v1/signals/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}": { + "get": { + "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Signal` definitions.", + "operationId": "ListSignals2", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/adminSignalList" + } + } + }, + "parameters": [ + { + "name": "workflow_execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.project", + "description": "Name of the project the resource belongs to.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.domain", + "description": "Name of the domain the resource belongs to.\nA domain can be considered as a subset within a specific project.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "workflow_execution_id.name", + "description": "User or system provided value for the resource.", + "in": "path", + "required": true, + "type": "string" + }, + { + "name": "limit", + "description": "Indicates the number of resources to be returned.\n+required.", + "in": "query", + "required": false, + "type": "integer", + "format": "int64" + }, + { + "name": "token", + "description": "In the case of multiple pages of results, the, server-provided token can be used to fetch the next page\nin a query.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filters", + "description": "Indicates a list of filters passed as string.\n+optional.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.key", + "description": "Indicates an attribute to sort the response values.\n+required.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by.direction", + "description": "Indicates the direction to apply sort key for response values.\n+optional.\n\n - DESCENDING: By default, fields are sorted in descending order.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "DESCENDING", + "ASCENDING" + ], + "default": "DESCENDING" + } + ], + "tags": [ + "SignalService" + ] + } + }, "/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}": { "get": { "summary": "Fetch a list of :ref:`ref_flyteidl.admin.Signal` definitions.", @@ -76,6 +198,13 @@ "required": true, "type": "string" }, + { + "name": "workflow_execution_id.org", + "description": "Optional, org key applied to the resource.", + "in": "query", + "required": false, + "type": "string" + }, { "name": "limit", "description": "Indicates the number of resources to be returned.\n+required.", @@ -655,6 +784,10 @@ "name": { "type": "string", "description": "User or system provided value for the resource." + }, + "org": { + "type": "string", + "description": "Optional, org key applied to the resource." } }, "title": "Encapsulation of fields that uniquely identifies a Flyte workflow execution" diff --git a/flyteidl/gen/pb-java/datacatalog/Datacatalog.java b/flyteidl/gen/pb-java/datacatalog/Datacatalog.java index 9ad108fc95..4d0c9894e9 100644 --- a/flyteidl/gen/pb-java/datacatalog/Datacatalog.java +++ b/flyteidl/gen/pb-java/datacatalog/Datacatalog.java @@ -19954,6 +19954,24 @@ public interface DatasetIDOrBuilder extends */ com.google.protobuf.ByteString getUUIDBytes(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -19977,6 +19995,7 @@ private DatasetID() {
       domain_ = "";
       version_ = "";
       uUID_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -20033,6 +20052,12 @@ private DatasetID(
               uUID_ = s;
               break;
             }
+            case 50: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -20275,6 +20300,48 @@ public java.lang.String getUUID() {
       }
     }
 
+    public static final int ORG_FIELD_NUMBER = 6;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -20304,6 +20371,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getUUIDBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, uUID_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, org_); + } unknownFields.writeTo(output); } @@ -20328,6 +20398,9 @@ public int getSerializedSize() { if (!getUUIDBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, uUID_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -20353,6 +20426,8 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getVersion())) return false; if (!getUUID() .equals(other.getUUID())) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -20374,6 +20449,8 @@ public int hashCode() { hash = (53 * hash) + getVersion().hashCode(); hash = (37 * hash) + UUID_FIELD_NUMBER; hash = (53 * hash) + getUUID().hashCode(); + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -20521,6 +20598,8 @@ public Builder clear() { uUID_ = ""; + org_ = ""; + return this; } @@ -20552,6 +20631,7 @@ public datacatalog.Datacatalog.DatasetID buildPartial() { result.domain_ = domain_; result.version_ = version_; result.uUID_ = uUID_; + result.org_ = org_; onBuilt(); return result; } @@ -20620,6 +20700,10 @@ public Builder mergeFrom(datacatalog.Datacatalog.DatasetID other) { uUID_ = other.uUID_; onChanged(); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -21093,6 +21177,95 @@ public Builder setUUIDBytes( onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -31392,6 +31565,24 @@ public interface DatasetPropertyFilterOrBuilder extends com.google.protobuf.ByteString getVersionBytes(); + /** + *
+     * Optional, org key applied to the dataset.
+     * 
+ * + * string org = 5; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the dataset.
+     * 
+ * + * string org = 5; + */ + com.google.protobuf.ByteString + getOrgBytes(); + public datacatalog.Datacatalog.DatasetPropertyFilter.PropertyCase getPropertyCase(); } /** @@ -31461,6 +31652,12 @@ private DatasetPropertyFilter( property_ = s; break; } + case 42: { + java.lang.String s = input.readStringRequireUtf8(); + propertyCase_ = 5; + property_ = s; + break; + } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { @@ -31501,6 +31698,7 @@ public enum PropertyCase NAME(2), DOMAIN(3), VERSION(4), + ORG(5), PROPERTY_NOT_SET(0); private final int value; private PropertyCase(int value) { @@ -31520,6 +31718,7 @@ public static PropertyCase forNumber(int value) { case 2: return NAME; case 3: return DOMAIN; case 4: return VERSION; + case 5: return ORG; case 0: return PROPERTY_NOT_SET; default: return null; } @@ -31707,6 +31906,57 @@ public java.lang.String getVersion() { } } + public static final int ORG_FIELD_NUMBER = 5; + /** + *
+     * Optional, org key applied to the dataset.
+     * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = ""; + if (propertyCase_ == 5) { + ref = property_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (propertyCase_ == 5) { + property_ = s; + } + return s; + } + } + /** + *
+     * Optional, org key applied to the dataset.
+     * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = ""; + if (propertyCase_ == 5) { + ref = property_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (propertyCase_ == 5) { + property_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -31733,6 +31983,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (propertyCase_ == 4) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, property_); } + if (propertyCase_ == 5) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, property_); + } unknownFields.writeTo(output); } @@ -31754,6 +32007,9 @@ public int getSerializedSize() { if (propertyCase_ == 4) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, property_); } + if (propertyCase_ == 5) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, property_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -31787,6 +32043,10 @@ public boolean equals(final java.lang.Object obj) { if (!getVersion() .equals(other.getVersion())) return false; break; + case 5: + if (!getOrg() + .equals(other.getOrg())) return false; + break; case 0: default: } @@ -31818,6 +32078,10 @@ public int hashCode() { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); break; + case 5: + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); + break; case 0: default: } @@ -31998,6 +32262,9 @@ public datacatalog.Datacatalog.DatasetPropertyFilter buildPartial() { if (propertyCase_ == 4) { result.property_ = property_; } + if (propertyCase_ == 5) { + result.property_ = property_; + } result.propertyCase_ = propertyCase_; onBuilt(); return result; @@ -32072,6 +32339,12 @@ public Builder mergeFrom(datacatalog.Datacatalog.DatasetPropertyFilter other) { onChanged(); break; } + case ORG: { + propertyCase_ = 5; + property_ = other.property_; + onChanged(); + break; + } case PROPERTY_NOT_SET: { break; } @@ -32439,6 +32712,106 @@ public Builder setVersionBytes( onChanged(); return this; } + + /** + *
+       * Optional, org key applied to the dataset.
+       * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = ""; + if (propertyCase_ == 5) { + ref = property_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (propertyCase_ == 5) { + property_ = s; + } + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the dataset.
+       * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = ""; + if (propertyCase_ == 5) { + ref = property_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (propertyCase_ == 5) { + property_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the dataset.
+       * 
+ * + * string org = 5; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + propertyCase_ = 5; + property_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the dataset.
+       * 
+ * + * string org = 5; + */ + public Builder clearOrg() { + if (propertyCase_ == 5) { + propertyCase_ = 0; + property_ = null; + onChanged(); + } + return this; + } + /** + *
+       * Optional, org key applied to the dataset.
+       * 
+ * + * string org = 5; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + propertyCase_ = 5; + property_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -33869,73 +34242,74 @@ public datacatalog.Datacatalog.PaginationOptions getDefaultInstanceForType() { "datacatalog.DatasetID\022\'\n\010metadata\030\002 \001(\0132" + "\025.datacatalog.Metadata\022\025\n\rpartitionKeys\030" + "\003 \003(\t\"\'\n\tPartition\022\013\n\003key\030\001 \001(\t\022\r\n\005value" + - "\030\002 \001(\t\"Y\n\tDatasetID\022\017\n\007project\030\001 \001(\t\022\014\n\004" + + "\030\002 \001(\t\"f\n\tDatasetID\022\017\n\007project\030\001 \001(\t\022\014\n\004" + "name\030\002 \001(\t\022\016\n\006domain\030\003 \001(\t\022\017\n\007version\030\004 " + - "\001(\t\022\014\n\004UUID\030\005 \001(\t\"\215\002\n\010Artifact\022\n\n\002id\030\001 \001" + - "(\t\022\'\n\007dataset\030\002 \001(\0132\026.datacatalog.Datase" + - "tID\022\'\n\004data\030\003 \003(\0132\031.datacatalog.Artifact" + - "Data\022\'\n\010metadata\030\004 \001(\0132\025.datacatalog.Met" + - "adata\022*\n\npartitions\030\005 \003(\0132\026.datacatalog." + - "Partition\022\036\n\004tags\030\006 \003(\0132\020.datacatalog.Ta" + - "g\022.\n\ncreated_at\030\007 \001(\0132\032.google.protobuf." + - "Timestamp\"C\n\014ArtifactData\022\014\n\004name\030\001 \001(\t\022" + - "%\n\005value\030\002 \001(\0132\026.flyteidl.core.Literal\"Q" + - "\n\003Tag\022\014\n\004name\030\001 \001(\t\022\023\n\013artifact_id\030\002 \001(\t" + - "\022\'\n\007dataset\030\003 \001(\0132\026.datacatalog.DatasetI" + - "D\"m\n\010Metadata\0222\n\007key_map\030\001 \003(\0132!.datacat" + - "alog.Metadata.KeyMapEntry\032-\n\013KeyMapEntry" + - "\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"F\n\020Filt" + - "erExpression\0222\n\007filters\030\001 \003(\0132!.datacata" + - "log.SinglePropertyFilter\"\211\003\n\024SinglePrope" + - "rtyFilter\0224\n\ntag_filter\030\001 \001(\0132\036.datacata" + - "log.TagPropertyFilterH\000\022@\n\020partition_fil" + - "ter\030\002 \001(\0132$.datacatalog.PartitionPropert" + - "yFilterH\000\022>\n\017artifact_filter\030\003 \001(\0132#.dat" + - "acatalog.ArtifactPropertyFilterH\000\022<\n\016dat" + - "aset_filter\030\004 \001(\0132\".datacatalog.DatasetP" + - "ropertyFilterH\000\022F\n\010operator\030\n \001(\01624.data" + - "catalog.SinglePropertyFilter.ComparisonO" + - "perator\" \n\022ComparisonOperator\022\n\n\006EQUALS\020" + - "\000B\021\n\017property_filter\";\n\026ArtifactProperty" + - "Filter\022\025\n\013artifact_id\030\001 \001(\tH\000B\n\n\010propert" + - "y\"3\n\021TagPropertyFilter\022\022\n\010tag_name\030\001 \001(\t" + - "H\000B\n\n\010property\"S\n\027PartitionPropertyFilte" + - "r\022,\n\007key_val\030\001 \001(\0132\031.datacatalog.KeyValu" + - "ePairH\000B\n\n\010property\"*\n\014KeyValuePair\022\013\n\003k" + - "ey\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\"k\n\025DatasetProper" + - "tyFilter\022\021\n\007project\030\001 \001(\tH\000\022\016\n\004name\030\002 \001(" + - "\tH\000\022\020\n\006domain\030\003 \001(\tH\000\022\021\n\007version\030\004 \001(\tH\000" + - "B\n\n\010property\"\361\001\n\021PaginationOptions\022\r\n\005li" + - "mit\030\001 \001(\r\022\r\n\005token\030\002 \001(\t\0227\n\007sortKey\030\003 \001(" + - "\0162&.datacatalog.PaginationOptions.SortKe" + - "y\022;\n\tsortOrder\030\004 \001(\0162(.datacatalog.Pagin" + - "ationOptions.SortOrder\"*\n\tSortOrder\022\016\n\nD" + - "ESCENDING\020\000\022\r\n\tASCENDING\020\001\"\034\n\007SortKey\022\021\n" + - "\rCREATION_TIME\020\0002\206\007\n\013DataCatalog\022V\n\rCrea" + - "teDataset\022!.datacatalog.CreateDatasetReq" + - "uest\032\".datacatalog.CreateDatasetResponse" + - "\022M\n\nGetDataset\022\036.datacatalog.GetDatasetR" + - "equest\032\037.datacatalog.GetDatasetResponse\022" + - "Y\n\016CreateArtifact\022\".datacatalog.CreateAr" + - "tifactRequest\032#.datacatalog.CreateArtifa" + - "ctResponse\022P\n\013GetArtifact\022\037.datacatalog." + - "GetArtifactRequest\032 .datacatalog.GetArti" + - "factResponse\022A\n\006AddTag\022\032.datacatalog.Add" + - "TagRequest\032\033.datacatalog.AddTagResponse\022" + - "V\n\rListArtifacts\022!.datacatalog.ListArtif" + - "actsRequest\032\".datacatalog.ListArtifactsR" + - "esponse\022S\n\014ListDatasets\022 .datacatalog.Li" + - "stDatasetsRequest\032!.datacatalog.ListData" + - "setsResponse\022Y\n\016UpdateArtifact\022\".datacat" + - "alog.UpdateArtifactRequest\032#.datacatalog" + - ".UpdateArtifactResponse\022q\n\026GetOrExtendRe" + - "servation\022*.datacatalog.GetOrExtendReser" + - "vationRequest\032+.datacatalog.GetOrExtendR" + - "eservationResponse\022e\n\022ReleaseReservation" + - "\022&.datacatalog.ReleaseReservationRequest" + - "\032\'.datacatalog.ReleaseReservationRespons" + - "eBCZAgithub.com/flyteorg/flyte/flyteidl/" + - "gen/pb-go/flyteidl/datacatalogb\006proto3" + "\001(\t\022\014\n\004UUID\030\005 \001(\t\022\013\n\003org\030\006 \001(\t\"\215\002\n\010Artif" + + "act\022\n\n\002id\030\001 \001(\t\022\'\n\007dataset\030\002 \001(\0132\026.datac" + + "atalog.DatasetID\022\'\n\004data\030\003 \003(\0132\031.datacat" + + "alog.ArtifactData\022\'\n\010metadata\030\004 \001(\0132\025.da" + + "tacatalog.Metadata\022*\n\npartitions\030\005 \003(\0132\026" + + ".datacatalog.Partition\022\036\n\004tags\030\006 \003(\0132\020.d" + + "atacatalog.Tag\022.\n\ncreated_at\030\007 \001(\0132\032.goo" + + "gle.protobuf.Timestamp\"C\n\014ArtifactData\022\014" + + "\n\004name\030\001 \001(\t\022%\n\005value\030\002 \001(\0132\026.flyteidl.c" + + "ore.Literal\"Q\n\003Tag\022\014\n\004name\030\001 \001(\t\022\023\n\013arti" + + "fact_id\030\002 \001(\t\022\'\n\007dataset\030\003 \001(\0132\026.datacat" + + "alog.DatasetID\"m\n\010Metadata\0222\n\007key_map\030\001 " + + "\003(\0132!.datacatalog.Metadata.KeyMapEntry\032-" + + "\n\013KeyMapEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(" + + "\t:\0028\001\"F\n\020FilterExpression\0222\n\007filters\030\001 \003" + + "(\0132!.datacatalog.SinglePropertyFilter\"\211\003" + + "\n\024SinglePropertyFilter\0224\n\ntag_filter\030\001 \001" + + "(\0132\036.datacatalog.TagPropertyFilterH\000\022@\n\020" + + "partition_filter\030\002 \001(\0132$.datacatalog.Par" + + "titionPropertyFilterH\000\022>\n\017artifact_filte" + + "r\030\003 \001(\0132#.datacatalog.ArtifactPropertyFi" + + "lterH\000\022<\n\016dataset_filter\030\004 \001(\0132\".datacat" + + "alog.DatasetPropertyFilterH\000\022F\n\010operator" + + "\030\n \001(\01624.datacatalog.SinglePropertyFilte" + + "r.ComparisonOperator\" \n\022ComparisonOperat" + + "or\022\n\n\006EQUALS\020\000B\021\n\017property_filter\";\n\026Art" + + "ifactPropertyFilter\022\025\n\013artifact_id\030\001 \001(\t" + + "H\000B\n\n\010property\"3\n\021TagPropertyFilter\022\022\n\010t" + + "ag_name\030\001 \001(\tH\000B\n\n\010property\"S\n\027Partition" + + "PropertyFilter\022,\n\007key_val\030\001 \001(\0132\031.dataca" + + "talog.KeyValuePairH\000B\n\n\010property\"*\n\014KeyV" + + "aluePair\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\"z\n\025" + + "DatasetPropertyFilter\022\021\n\007project\030\001 \001(\tH\000" + + "\022\016\n\004name\030\002 \001(\tH\000\022\020\n\006domain\030\003 \001(\tH\000\022\021\n\007ve" + + "rsion\030\004 \001(\tH\000\022\r\n\003org\030\005 \001(\tH\000B\n\n\010property" + + "\"\361\001\n\021PaginationOptions\022\r\n\005limit\030\001 \001(\r\022\r\n" + + "\005token\030\002 \001(\t\0227\n\007sortKey\030\003 \001(\0162&.datacata" + + "log.PaginationOptions.SortKey\022;\n\tsortOrd" + + "er\030\004 \001(\0162(.datacatalog.PaginationOptions" + + ".SortOrder\"*\n\tSortOrder\022\016\n\nDESCENDING\020\000\022" + + "\r\n\tASCENDING\020\001\"\034\n\007SortKey\022\021\n\rCREATION_TI" + + "ME\020\0002\206\007\n\013DataCatalog\022V\n\rCreateDataset\022!." + + "datacatalog.CreateDatasetRequest\032\".datac" + + "atalog.CreateDatasetResponse\022M\n\nGetDatas" + + "et\022\036.datacatalog.GetDatasetRequest\032\037.dat" + + "acatalog.GetDatasetResponse\022Y\n\016CreateArt" + + "ifact\022\".datacatalog.CreateArtifactReques" + + "t\032#.datacatalog.CreateArtifactResponse\022P" + + "\n\013GetArtifact\022\037.datacatalog.GetArtifactR" + + "equest\032 .datacatalog.GetArtifactResponse" + + "\022A\n\006AddTag\022\032.datacatalog.AddTagRequest\032\033" + + ".datacatalog.AddTagResponse\022V\n\rListArtif" + + "acts\022!.datacatalog.ListArtifactsRequest\032" + + "\".datacatalog.ListArtifactsResponse\022S\n\014L" + + "istDatasets\022 .datacatalog.ListDatasetsRe" + + "quest\032!.datacatalog.ListDatasetsResponse" + + "\022Y\n\016UpdateArtifact\022\".datacatalog.UpdateA" + + "rtifactRequest\032#.datacatalog.UpdateArtif" + + "actResponse\022q\n\026GetOrExtendReservation\022*." + + "datacatalog.GetOrExtendReservationReques" + + "t\032+.datacatalog.GetOrExtendReservationRe" + + "sponse\022e\n\022ReleaseReservation\022&.datacatal" + + "og.ReleaseReservationRequest\032\'.datacatal" + + "og.ReleaseReservationResponseBCZAgithub." + + "com/flyteorg/flyte/flyteidl/gen/pb-go/fl" + + "yteidl/datacatalogb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -34101,7 +34475,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_datacatalog_DatasetID_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_datacatalog_DatasetID_descriptor, - new java.lang.String[] { "Project", "Name", "Domain", "Version", "UUID", }); + new java.lang.String[] { "Project", "Name", "Domain", "Version", "UUID", "Org", }); internal_static_datacatalog_Artifact_descriptor = getDescriptor().getMessageTypes().get(25); internal_static_datacatalog_Artifact_fieldAccessorTable = new @@ -34173,7 +34547,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_datacatalog_DatasetPropertyFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_datacatalog_DatasetPropertyFilter_descriptor, - new java.lang.String[] { "Project", "Name", "Domain", "Version", "Property", }); + new java.lang.String[] { "Project", "Name", "Domain", "Version", "Org", "Property", }); internal_static_datacatalog_PaginationOptions_descriptor = getDescriptor().getMessageTypes().get(36); internal_static_datacatalog_PaginationOptions_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/admin/AgentOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/AgentOuterClass.java index f71d1743ff..e5422d0656 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/AgentOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/AgentOuterClass.java @@ -6178,20 +6178,20 @@ public interface ResourceOrBuilder extends /** *
-     * The state of the execution is used to control its visibility in the UI/CLI.
+     * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
      * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - int getStateValue(); + @java.lang.Deprecated int getStateValue(); /** *
-     * The state of the execution is used to control its visibility in the UI/CLI.
+     * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
      * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - flyteidl.admin.AgentOuterClass.State getState(); + @java.lang.Deprecated flyteidl.admin.AgentOuterClass.State getState(); /** *
@@ -6285,6 +6285,23 @@ public interface ResourceOrBuilder extends
      */
     flyteidl.core.Execution.TaskLogOrBuilder getLogLinksOrBuilder(
         int index);
+
+    /**
+     * 
+     * The phase of the execution is used to determine the phase of the plugin's execution.
+     * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + int getPhaseValue(); + /** + *
+     * The phase of the execution is used to determine the phase of the plugin's execution.
+     * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + flyteidl.core.Execution.TaskExecution.Phase getPhase(); } /** * Protobuf type {@code flyteidl.admin.Resource} @@ -6302,6 +6319,7 @@ private Resource() { state_ = 0; message_ = ""; logLinks_ = java.util.Collections.emptyList(); + phase_ = 0; } @java.lang.Override @@ -6362,6 +6380,12 @@ private Resource( input.readMessage(flyteidl.core.Execution.TaskLog.parser(), extensionRegistry)); break; } + case 40: { + int rawValue = input.readEnum(); + + phase_ = rawValue; + break; + } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { @@ -6402,22 +6426,22 @@ private Resource( private int state_; /** *
-     * The state of the execution is used to control its visibility in the UI/CLI.
+     * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
      * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - public int getStateValue() { + @java.lang.Deprecated public int getStateValue() { return state_; } /** *
-     * The state of the execution is used to control its visibility in the UI/CLI.
+     * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
      * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - public flyteidl.admin.AgentOuterClass.State getState() { + @java.lang.Deprecated public flyteidl.admin.AgentOuterClass.State getState() { @SuppressWarnings("deprecation") flyteidl.admin.AgentOuterClass.State result = flyteidl.admin.AgentOuterClass.State.valueOf(state_); return result == null ? flyteidl.admin.AgentOuterClass.State.UNRECOGNIZED : result; @@ -6559,6 +6583,31 @@ public flyteidl.core.Execution.TaskLogOrBuilder getLogLinksOrBuilder( return logLinks_.get(index); } + public static final int PHASE_FIELD_NUMBER = 5; + private int phase_; + /** + *
+     * The phase of the execution is used to determine the phase of the plugin's execution.
+     * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + public int getPhaseValue() { + return phase_; + } + /** + *
+     * The phase of the execution is used to determine the phase of the plugin's execution.
+     * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + public flyteidl.core.Execution.TaskExecution.Phase getPhase() { + @SuppressWarnings("deprecation") + flyteidl.core.Execution.TaskExecution.Phase result = flyteidl.core.Execution.TaskExecution.Phase.valueOf(phase_); + return result == null ? flyteidl.core.Execution.TaskExecution.Phase.UNRECOGNIZED : result; + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -6585,6 +6634,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) for (int i = 0; i < logLinks_.size(); i++) { output.writeMessage(4, logLinks_.get(i)); } + if (phase_ != flyteidl.core.Execution.TaskExecution.Phase.UNDEFINED.getNumber()) { + output.writeEnum(5, phase_); + } unknownFields.writeTo(output); } @@ -6609,6 +6661,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, logLinks_.get(i)); } + if (phase_ != flyteidl.core.Execution.TaskExecution.Phase.UNDEFINED.getNumber()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, phase_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -6634,6 +6690,7 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getMessage())) return false; if (!getLogLinksList() .equals(other.getLogLinksList())) return false; + if (phase_ != other.phase_) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -6657,6 +6714,8 @@ public int hashCode() { hash = (37 * hash) + LOG_LINKS_FIELD_NUMBER; hash = (53 * hash) + getLogLinksList().hashCode(); } + hash = (37 * hash) + PHASE_FIELD_NUMBER; + hash = (53 * hash) + phase_; hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -6807,6 +6866,8 @@ public Builder clear() { } else { logLinksBuilder_.clear(); } + phase_ = 0; + return this; } @@ -6851,6 +6912,7 @@ public flyteidl.admin.AgentOuterClass.Resource buildPartial() { } else { result.logLinks_ = logLinksBuilder_.build(); } + result.phase_ = phase_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -6936,6 +6998,9 @@ public Builder mergeFrom(flyteidl.admin.AgentOuterClass.Resource other) { } } } + if (other.phase_ != 0) { + setPhaseValue(other.getPhaseValue()); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -6969,46 +7034,46 @@ public Builder mergeFrom( private int state_ = 0; /** *
-       * The state of the execution is used to control its visibility in the UI/CLI.
+       * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
        * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - public int getStateValue() { + @java.lang.Deprecated public int getStateValue() { return state_; } /** *
-       * The state of the execution is used to control its visibility in the UI/CLI.
+       * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
        * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - public Builder setStateValue(int value) { + @java.lang.Deprecated public Builder setStateValue(int value) { state_ = value; onChanged(); return this; } /** *
-       * The state of the execution is used to control its visibility in the UI/CLI.
+       * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
        * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - public flyteidl.admin.AgentOuterClass.State getState() { + @java.lang.Deprecated public flyteidl.admin.AgentOuterClass.State getState() { @SuppressWarnings("deprecation") flyteidl.admin.AgentOuterClass.State result = flyteidl.admin.AgentOuterClass.State.valueOf(state_); return result == null ? flyteidl.admin.AgentOuterClass.State.UNRECOGNIZED : result; } /** *
-       * The state of the execution is used to control its visibility in the UI/CLI.
+       * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
        * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - public Builder setState(flyteidl.admin.AgentOuterClass.State value) { + @java.lang.Deprecated public Builder setState(flyteidl.admin.AgentOuterClass.State value) { if (value == null) { throw new NullPointerException(); } @@ -7019,12 +7084,12 @@ public Builder setState(flyteidl.admin.AgentOuterClass.State value) { } /** *
-       * The state of the execution is used to control its visibility in the UI/CLI.
+       * DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI.
        * 
* - * .flyteidl.admin.State state = 1; + * .flyteidl.admin.State state = 1 [deprecated = true]; */ - public Builder clearState() { + @java.lang.Deprecated public Builder clearState() { state_ = 0; onChanged(); @@ -7602,6 +7667,71 @@ public flyteidl.core.Execution.TaskLog.Builder addLogLinksBuilder( } return logLinksBuilder_; } + + private int phase_ = 0; + /** + *
+       * The phase of the execution is used to determine the phase of the plugin's execution.
+       * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + public int getPhaseValue() { + return phase_; + } + /** + *
+       * The phase of the execution is used to determine the phase of the plugin's execution.
+       * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + public Builder setPhaseValue(int value) { + phase_ = value; + onChanged(); + return this; + } + /** + *
+       * The phase of the execution is used to determine the phase of the plugin's execution.
+       * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + public flyteidl.core.Execution.TaskExecution.Phase getPhase() { + @SuppressWarnings("deprecation") + flyteidl.core.Execution.TaskExecution.Phase result = flyteidl.core.Execution.TaskExecution.Phase.valueOf(phase_); + return result == null ? flyteidl.core.Execution.TaskExecution.Phase.UNRECOGNIZED : result; + } + /** + *
+       * The phase of the execution is used to determine the phase of the plugin's execution.
+       * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + public Builder setPhase(flyteidl.core.Execution.TaskExecution.Phase value) { + if (value == null) { + throw new NullPointerException(); + } + + phase_ = value.getNumber(); + onChanged(); + return this; + } + /** + *
+       * The phase of the execution is used to determine the phase of the plugin's execution.
+       * 
+ * + * .flyteidl.core.TaskExecution.Phase phase = 5; + */ + public Builder clearPhase() { + + phase_ = 0; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -12023,232 +12153,4697 @@ public flyteidl.admin.AgentOuterClass.ListAgentsResponse getDefaultInstanceForTy } - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_TaskExecutionMetadata_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_CreateTaskRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_CreateTaskRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_CreateTaskResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_CreateTaskResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_GetTaskRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_GetTaskRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_GetTaskResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_GetTaskResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_Resource_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_Resource_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_DeleteTaskRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_DeleteTaskRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_DeleteTaskResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_DeleteTaskResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_Agent_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_Agent_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_GetAgentRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_GetAgentRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_GetAgentResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_GetAgentResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_ListAgentsRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_ListAgentsRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_admin_ListAgentsResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_admin_ListAgentsResponse_fieldAccessorTable; + public interface GetTaskMetricsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.admin.GetTaskMetricsRequest) + com.google.protobuf.MessageOrBuilder { - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\032flyteidl/admin/agent.proto\022\016flyteidl.a" + - "dmin\032\034flyteidl/core/literals.proto\032\031flyt" + - "eidl/core/tasks.proto\032\035flyteidl/core/int" + - "erface.proto\032\036flyteidl/core/identifier.p" + - "roto\032\035flyteidl/core/execution.proto\"\232\004\n\025" + - "TaskExecutionMetadata\022A\n\021task_execution_" + - "id\030\001 \001(\0132&.flyteidl.core.TaskExecutionId" + - "entifier\022\021\n\tnamespace\030\002 \001(\t\022A\n\006labels\030\003 " + - "\003(\01321.flyteidl.admin.TaskExecutionMetada" + - "ta.LabelsEntry\022K\n\013annotations\030\004 \003(\01326.fl" + - "yteidl.admin.TaskExecutionMetadata.Annot" + - "ationsEntry\022\033\n\023k8s_service_account\030\005 \001(\t" + - "\022^\n\025environment_variables\030\006 \003(\0132?.flytei" + - "dl.admin.TaskExecutionMetadata.Environme" + - "ntVariablesEntry\032-\n\013LabelsEntry\022\013\n\003key\030\001" + - " \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\0322\n\020AnnotationsEn" + - "try\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\032;\n\031E" + - "nvironmentVariablesEntry\022\013\n\003key\030\001 \001(\t\022\r\n" + - "\005value\030\002 \001(\t:\0028\001\"\314\001\n\021CreateTaskRequest\022)" + - "\n\006inputs\030\001 \001(\0132\031.flyteidl.core.LiteralMa" + - "p\022-\n\010template\030\002 \001(\0132\033.flyteidl.core.Task" + - "Template\022\025\n\routput_prefix\030\003 \001(\t\022F\n\027task_" + - "execution_metadata\030\004 \001(\0132%.flyteidl.admi" + - "n.TaskExecutionMetadata\"b\n\022CreateTaskRes" + - "ponse\022\027\n\rresource_meta\030\001 \001(\014H\000\022,\n\010resour" + - "ce\030\002 \001(\0132\030.flyteidl.admin.ResourceH\000B\005\n\003" + - "res\":\n\016GetTaskRequest\022\021\n\ttask_type\030\001 \001(\t" + - "\022\025\n\rresource_meta\030\002 \001(\014\"h\n\017GetTaskRespon" + - "se\022*\n\010resource\030\001 \001(\0132\030.flyteidl.admin.Re" + - "source\022)\n\tlog_links\030\002 \003(\0132\026.flyteidl.cor" + - "e.TaskLog\"\230\001\n\010Resource\022$\n\005state\030\001 \001(\0162\025." + - "flyteidl.admin.State\022*\n\007outputs\030\002 \001(\0132\031." + - "flyteidl.core.LiteralMap\022\017\n\007message\030\003 \001(" + - "\t\022)\n\tlog_links\030\004 \003(\0132\026.flyteidl.core.Tas" + - "kLog\"=\n\021DeleteTaskRequest\022\021\n\ttask_type\030\001" + - " \001(\t\022\025\n\rresource_meta\030\002 \001(\014\"\024\n\022DeleteTas" + - "kResponse\"3\n\005Agent\022\014\n\004name\030\001 \001(\t\022\034\n\024supp" + - "orted_task_types\030\002 \003(\t\"\037\n\017GetAgentReques" + - "t\022\014\n\004name\030\001 \001(\t\"8\n\020GetAgentResponse\022$\n\005a" + - "gent\030\001 \001(\0132\025.flyteidl.admin.Agent\"\023\n\021Lis" + - "tAgentsRequest\";\n\022ListAgentsResponse\022%\n\006" + - "agents\030\001 \003(\0132\025.flyteidl.admin.Agent*^\n\005S" + - "tate\022\025\n\021RETRYABLE_FAILURE\020\000\022\025\n\021PERMANENT" + - "_FAILURE\020\001\022\013\n\007PENDING\020\002\022\013\n\007RUNNING\020\003\022\r\n\t" + - "SUCCEEDED\020\004B=Z;github.com/flyteorg/flyte" + - "/flyteidl/gen/pb-go/flyteidl/adminb\006prot" + - "o3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - flyteidl.core.Literals.getDescriptor(), - flyteidl.core.Tasks.getDescriptor(), - flyteidl.core.Interface.getDescriptor(), - flyteidl.core.IdentifierOuterClass.getDescriptor(), - flyteidl.core.Execution.getDescriptor(), - }, assigner); - internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_flyteidl_admin_TaskExecutionMetadata_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor, - new java.lang.String[] { "TaskExecutionId", "Namespace", "Labels", "Annotations", "K8SServiceAccount", "EnvironmentVariables", }); - internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_descriptor = - internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor.getNestedTypes().get(0); - internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_descriptor, - new java.lang.String[] { "Key", "Value", }); - internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_descriptor = - internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor.getNestedTypes().get(1); - internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_descriptor, - new java.lang.String[] { "Key", "Value", }); - internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_descriptor = - internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor.getNestedTypes().get(2); - internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_descriptor, - new java.lang.String[] { "Key", "Value", }); - internal_static_flyteidl_admin_CreateTaskRequest_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_flyteidl_admin_CreateTaskRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_CreateTaskRequest_descriptor, - new java.lang.String[] { "Inputs", "Template", "OutputPrefix", "TaskExecutionMetadata", }); - internal_static_flyteidl_admin_CreateTaskResponse_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_flyteidl_admin_CreateTaskResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_CreateTaskResponse_descriptor, - new java.lang.String[] { "ResourceMeta", "Resource", "Res", }); - internal_static_flyteidl_admin_GetTaskRequest_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_flyteidl_admin_GetTaskRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_GetTaskRequest_descriptor, - new java.lang.String[] { "TaskType", "ResourceMeta", }); - internal_static_flyteidl_admin_GetTaskResponse_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_flyteidl_admin_GetTaskResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_GetTaskResponse_descriptor, - new java.lang.String[] { "Resource", "LogLinks", }); - internal_static_flyteidl_admin_Resource_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_flyteidl_admin_Resource_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_Resource_descriptor, - new java.lang.String[] { "State", "Outputs", "Message", "LogLinks", }); - internal_static_flyteidl_admin_DeleteTaskRequest_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_flyteidl_admin_DeleteTaskRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_DeleteTaskRequest_descriptor, - new java.lang.String[] { "TaskType", "ResourceMeta", }); - internal_static_flyteidl_admin_DeleteTaskResponse_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_flyteidl_admin_DeleteTaskResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_DeleteTaskResponse_descriptor, - new java.lang.String[] { }); - internal_static_flyteidl_admin_Agent_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_flyteidl_admin_Agent_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_admin_Agent_descriptor, - new java.lang.String[] { "Name", "SupportedTaskTypes", }); + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + java.lang.String getTaskType(); + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + com.google.protobuf.ByteString + getTaskTypeBytes(); + + /** + *
+     * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+     * 
+ * + * bytes resource_meta = 2; + */ + com.google.protobuf.ByteString getResourceMeta(); + + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + java.util.List + getQueriesList(); + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + int getQueriesCount(); + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + java.lang.String getQueries(int index); + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + com.google.protobuf.ByteString + getQueriesBytes(int index); + + /** + *
+     * Start timestamp, inclusive.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + boolean hasStartTime(); + /** + *
+     * Start timestamp, inclusive.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + com.google.protobuf.Timestamp getStartTime(); + /** + *
+     * Start timestamp, inclusive.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder(); + + /** + *
+     * End timestamp, inclusive..
+     * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + boolean hasEndTime(); + /** + *
+     * End timestamp, inclusive..
+     * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + com.google.protobuf.Timestamp getEndTime(); + /** + *
+     * End timestamp, inclusive..
+     * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder(); + + /** + *
+     * Query resolution step width in duration format or float number of seconds.
+     * 
+ * + * .google.protobuf.Duration step = 6; + */ + boolean hasStep(); + /** + *
+     * Query resolution step width in duration format or float number of seconds.
+     * 
+ * + * .google.protobuf.Duration step = 6; + */ + com.google.protobuf.Duration getStep(); + /** + *
+     * Query resolution step width in duration format or float number of seconds.
+     * 
+ * + * .google.protobuf.Duration step = 6; + */ + com.google.protobuf.DurationOrBuilder getStepOrBuilder(); + } + /** + *
+   * A request to get the metrics from a task execution.
+   * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskMetricsRequest} + */ + public static final class GetTaskMetricsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.admin.GetTaskMetricsRequest) + GetTaskMetricsRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetTaskMetricsRequest.newBuilder() to construct. + private GetTaskMetricsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetTaskMetricsRequest() { + taskType_ = ""; + resourceMeta_ = com.google.protobuf.ByteString.EMPTY; + queries_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetTaskMetricsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + taskType_ = s; + break; + } + case 18: { + + resourceMeta_ = input.readBytes(); + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000004) != 0)) { + queries_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000004; + } + queries_.add(s); + break; + } + case 34: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (startTime_ != null) { + subBuilder = startTime_.toBuilder(); + } + startTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(startTime_); + startTime_ = subBuilder.buildPartial(); + } + + break; + } + case 42: { + com.google.protobuf.Timestamp.Builder subBuilder = null; + if (endTime_ != null) { + subBuilder = endTime_.toBuilder(); + } + endTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(endTime_); + endTime_ = subBuilder.buildPartial(); + } + + break; + } + case 50: { + com.google.protobuf.Duration.Builder subBuilder = null; + if (step_ != null) { + subBuilder = step_.toBuilder(); + } + step_ = input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(step_); + step_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000004) != 0)) { + queries_ = queries_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest.class, flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest.Builder.class); + } + + private int bitField0_; + public static final int TASK_TYPE_FIELD_NUMBER = 1; + private volatile java.lang.Object taskType_; + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + public java.lang.String getTaskType() { + java.lang.Object ref = taskType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskType_ = s; + return s; + } + } + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + public com.google.protobuf.ByteString + getTaskTypeBytes() { + java.lang.Object ref = taskType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + taskType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int RESOURCE_META_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString resourceMeta_; + /** + *
+     * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+     * 
+ * + * bytes resource_meta = 2; + */ + public com.google.protobuf.ByteString getResourceMeta() { + return resourceMeta_; + } + + public static final int QUERIES_FIELD_NUMBER = 3; + private com.google.protobuf.LazyStringList queries_; + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + public com.google.protobuf.ProtocolStringList + getQueriesList() { + return queries_; + } + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + public int getQueriesCount() { + return queries_.size(); + } + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + public java.lang.String getQueries(int index) { + return queries_.get(index); + } + /** + *
+     * The metrics to query. If empty, will return a default set of metrics.
+     * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+     * 
+ * + * repeated string queries = 3; + */ + public com.google.protobuf.ByteString + getQueriesBytes(int index) { + return queries_.getByteString(index); + } + + public static final int START_TIME_FIELD_NUMBER = 4; + private com.google.protobuf.Timestamp startTime_; + /** + *
+     * Start timestamp, inclusive.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public boolean hasStartTime() { + return startTime_ != null; + } + /** + *
+     * Start timestamp, inclusive.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public com.google.protobuf.Timestamp getStartTime() { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + /** + *
+     * Start timestamp, inclusive.
+     * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + return getStartTime(); + } + + public static final int END_TIME_FIELD_NUMBER = 5; + private com.google.protobuf.Timestamp endTime_; + /** + *
+     * End timestamp, inclusive..
+     * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public boolean hasEndTime() { + return endTime_ != null; + } + /** + *
+     * End timestamp, inclusive..
+     * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public com.google.protobuf.Timestamp getEndTime() { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + /** + *
+     * End timestamp, inclusive..
+     * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + return getEndTime(); + } + + public static final int STEP_FIELD_NUMBER = 6; + private com.google.protobuf.Duration step_; + /** + *
+     * Query resolution step width in duration format or float number of seconds.
+     * 
+ * + * .google.protobuf.Duration step = 6; + */ + public boolean hasStep() { + return step_ != null; + } + /** + *
+     * Query resolution step width in duration format or float number of seconds.
+     * 
+ * + * .google.protobuf.Duration step = 6; + */ + public com.google.protobuf.Duration getStep() { + return step_ == null ? com.google.protobuf.Duration.getDefaultInstance() : step_; + } + /** + *
+     * Query resolution step width in duration format or float number of seconds.
+     * 
+ * + * .google.protobuf.Duration step = 6; + */ + public com.google.protobuf.DurationOrBuilder getStepOrBuilder() { + return getStep(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getTaskTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, taskType_); + } + if (!resourceMeta_.isEmpty()) { + output.writeBytes(2, resourceMeta_); + } + for (int i = 0; i < queries_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, queries_.getRaw(i)); + } + if (startTime_ != null) { + output.writeMessage(4, getStartTime()); + } + if (endTime_ != null) { + output.writeMessage(5, getEndTime()); + } + if (step_ != null) { + output.writeMessage(6, getStep()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getTaskTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, taskType_); + } + if (!resourceMeta_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, resourceMeta_); + } + { + int dataSize = 0; + for (int i = 0; i < queries_.size(); i++) { + dataSize += computeStringSizeNoTag(queries_.getRaw(i)); + } + size += dataSize; + size += 1 * getQueriesList().size(); + } + if (startTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getStartTime()); + } + if (endTime_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, getEndTime()); + } + if (step_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, getStep()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest)) { + return super.equals(obj); + } + flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest other = (flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest) obj; + + if (!getTaskType() + .equals(other.getTaskType())) return false; + if (!getResourceMeta() + .equals(other.getResourceMeta())) return false; + if (!getQueriesList() + .equals(other.getQueriesList())) return false; + if (hasStartTime() != other.hasStartTime()) return false; + if (hasStartTime()) { + if (!getStartTime() + .equals(other.getStartTime())) return false; + } + if (hasEndTime() != other.hasEndTime()) return false; + if (hasEndTime()) { + if (!getEndTime() + .equals(other.getEndTime())) return false; + } + if (hasStep() != other.hasStep()) return false; + if (hasStep()) { + if (!getStep() + .equals(other.getStep())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getTaskType().hashCode(); + hash = (37 * hash) + RESOURCE_META_FIELD_NUMBER; + hash = (53 * hash) + getResourceMeta().hashCode(); + if (getQueriesCount() > 0) { + hash = (37 * hash) + QUERIES_FIELD_NUMBER; + hash = (53 * hash) + getQueriesList().hashCode(); + } + if (hasStartTime()) { + hash = (37 * hash) + START_TIME_FIELD_NUMBER; + hash = (53 * hash) + getStartTime().hashCode(); + } + if (hasEndTime()) { + hash = (37 * hash) + END_TIME_FIELD_NUMBER; + hash = (53 * hash) + getEndTime().hashCode(); + } + if (hasStep()) { + hash = (37 * hash) + STEP_FIELD_NUMBER; + hash = (53 * hash) + getStep().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * A request to get the metrics from a task execution.
+     * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskMetricsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.admin.GetTaskMetricsRequest) + flyteidl.admin.AgentOuterClass.GetTaskMetricsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest.class, flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest.Builder.class); + } + + // Construct using flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + taskType_ = ""; + + resourceMeta_ = com.google.protobuf.ByteString.EMPTY; + + queries_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + if (startTimeBuilder_ == null) { + startTime_ = null; + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + if (endTimeBuilder_ == null) { + endTime_ = null; + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + if (stepBuilder_ == null) { + step_ = null; + } else { + step_ = null; + stepBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsRequest_descriptor; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest getDefaultInstanceForType() { + return flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest build() { + flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest buildPartial() { + flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest result = new flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + result.taskType_ = taskType_; + result.resourceMeta_ = resourceMeta_; + if (((bitField0_ & 0x00000004) != 0)) { + queries_ = queries_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.queries_ = queries_; + if (startTimeBuilder_ == null) { + result.startTime_ = startTime_; + } else { + result.startTime_ = startTimeBuilder_.build(); + } + if (endTimeBuilder_ == null) { + result.endTime_ = endTime_; + } else { + result.endTime_ = endTimeBuilder_.build(); + } + if (stepBuilder_ == null) { + result.step_ = step_; + } else { + result.step_ = stepBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest) { + return mergeFrom((flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest other) { + if (other == flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest.getDefaultInstance()) return this; + if (!other.getTaskType().isEmpty()) { + taskType_ = other.taskType_; + onChanged(); + } + if (other.getResourceMeta() != com.google.protobuf.ByteString.EMPTY) { + setResourceMeta(other.getResourceMeta()); + } + if (!other.queries_.isEmpty()) { + if (queries_.isEmpty()) { + queries_ = other.queries_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureQueriesIsMutable(); + queries_.addAll(other.queries_); + } + onChanged(); + } + if (other.hasStartTime()) { + mergeStartTime(other.getStartTime()); + } + if (other.hasEndTime()) { + mergeEndTime(other.getEndTime()); + } + if (other.hasStep()) { + mergeStep(other.getStep()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.lang.Object taskType_ = ""; + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public java.lang.String getTaskType() { + java.lang.Object ref = taskType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public com.google.protobuf.ByteString + getTaskTypeBytes() { + java.lang.Object ref = taskType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + taskType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public Builder setTaskType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + taskType_ = value; + onChanged(); + return this; + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public Builder clearTaskType() { + + taskType_ = getDefaultInstance().getTaskType(); + onChanged(); + return this; + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public Builder setTaskTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + taskType_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString resourceMeta_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+       * 
+ * + * bytes resource_meta = 2; + */ + public com.google.protobuf.ByteString getResourceMeta() { + return resourceMeta_; + } + /** + *
+       * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+       * 
+ * + * bytes resource_meta = 2; + */ + public Builder setResourceMeta(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + + resourceMeta_ = value; + onChanged(); + return this; + } + /** + *
+       * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+       * 
+ * + * bytes resource_meta = 2; + */ + public Builder clearResourceMeta() { + + resourceMeta_ = getDefaultInstance().getResourceMeta(); + onChanged(); + return this; + } + + private com.google.protobuf.LazyStringList queries_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureQueriesIsMutable() { + if (!((bitField0_ & 0x00000004) != 0)) { + queries_ = new com.google.protobuf.LazyStringArrayList(queries_); + bitField0_ |= 0x00000004; + } + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public com.google.protobuf.ProtocolStringList + getQueriesList() { + return queries_.getUnmodifiableView(); + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public int getQueriesCount() { + return queries_.size(); + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public java.lang.String getQueries(int index) { + return queries_.get(index); + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public com.google.protobuf.ByteString + getQueriesBytes(int index) { + return queries_.getByteString(index); + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public Builder setQueries( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQueriesIsMutable(); + queries_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public Builder addQueries( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQueriesIsMutable(); + queries_.add(value); + onChanged(); + return this; + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public Builder addAllQueries( + java.lang.Iterable values) { + ensureQueriesIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, queries_); + onChanged(); + return this; + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public Builder clearQueries() { + queries_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + return this; + } + /** + *
+       * The metrics to query. If empty, will return a default set of metrics.
+       * e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG
+       * 
+ * + * repeated string queries = 3; + */ + public Builder addQueriesBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureQueriesIsMutable(); + queries_.add(value); + onChanged(); + return this; + } + + private com.google.protobuf.Timestamp startTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_; + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public boolean hasStartTime() { + return startTimeBuilder_ != null || startTime_ != null; + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public com.google.protobuf.Timestamp getStartTime() { + if (startTimeBuilder_ == null) { + return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } else { + return startTimeBuilder_.getMessage(); + } + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public Builder setStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + startTime_ = value; + onChanged(); + } else { + startTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public Builder setStartTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (startTimeBuilder_ == null) { + startTime_ = builderForValue.build(); + onChanged(); + } else { + startTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public Builder mergeStartTime(com.google.protobuf.Timestamp value) { + if (startTimeBuilder_ == null) { + if (startTime_ != null) { + startTime_ = + com.google.protobuf.Timestamp.newBuilder(startTime_).mergeFrom(value).buildPartial(); + } else { + startTime_ = value; + } + onChanged(); + } else { + startTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public Builder clearStartTime() { + if (startTimeBuilder_ == null) { + startTime_ = null; + onChanged(); + } else { + startTime_ = null; + startTimeBuilder_ = null; + } + + return this; + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { + + onChanged(); + return getStartTimeFieldBuilder().getBuilder(); + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { + if (startTimeBuilder_ != null) { + return startTimeBuilder_.getMessageOrBuilder(); + } else { + return startTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; + } + } + /** + *
+       * Start timestamp, inclusive.
+       * 
+ * + * .google.protobuf.Timestamp start_time = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getStartTimeFieldBuilder() { + if (startTimeBuilder_ == null) { + startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getStartTime(), + getParentForChildren(), + isClean()); + startTime_ = null; + } + return startTimeBuilder_; + } + + private com.google.protobuf.Timestamp endTime_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> endTimeBuilder_; + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public boolean hasEndTime() { + return endTimeBuilder_ != null || endTime_ != null; + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public com.google.protobuf.Timestamp getEndTime() { + if (endTimeBuilder_ == null) { + return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } else { + return endTimeBuilder_.getMessage(); + } + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public Builder setEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + endTime_ = value; + onChanged(); + } else { + endTimeBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public Builder setEndTime( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (endTimeBuilder_ == null) { + endTime_ = builderForValue.build(); + onChanged(); + } else { + endTimeBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public Builder mergeEndTime(com.google.protobuf.Timestamp value) { + if (endTimeBuilder_ == null) { + if (endTime_ != null) { + endTime_ = + com.google.protobuf.Timestamp.newBuilder(endTime_).mergeFrom(value).buildPartial(); + } else { + endTime_ = value; + } + onChanged(); + } else { + endTimeBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public Builder clearEndTime() { + if (endTimeBuilder_ == null) { + endTime_ = null; + onChanged(); + } else { + endTime_ = null; + endTimeBuilder_ = null; + } + + return this; + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() { + + onChanged(); + return getEndTimeFieldBuilder().getBuilder(); + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() { + if (endTimeBuilder_ != null) { + return endTimeBuilder_.getMessageOrBuilder(); + } else { + return endTime_ == null ? + com.google.protobuf.Timestamp.getDefaultInstance() : endTime_; + } + } + /** + *
+       * End timestamp, inclusive..
+       * 
+ * + * .google.protobuf.Timestamp end_time = 5; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getEndTimeFieldBuilder() { + if (endTimeBuilder_ == null) { + endTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + getEndTime(), + getParentForChildren(), + isClean()); + endTime_ = null; + } + return endTimeBuilder_; + } + + private com.google.protobuf.Duration step_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> stepBuilder_; + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public boolean hasStep() { + return stepBuilder_ != null || step_ != null; + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public com.google.protobuf.Duration getStep() { + if (stepBuilder_ == null) { + return step_ == null ? com.google.protobuf.Duration.getDefaultInstance() : step_; + } else { + return stepBuilder_.getMessage(); + } + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public Builder setStep(com.google.protobuf.Duration value) { + if (stepBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + step_ = value; + onChanged(); + } else { + stepBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public Builder setStep( + com.google.protobuf.Duration.Builder builderForValue) { + if (stepBuilder_ == null) { + step_ = builderForValue.build(); + onChanged(); + } else { + stepBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public Builder mergeStep(com.google.protobuf.Duration value) { + if (stepBuilder_ == null) { + if (step_ != null) { + step_ = + com.google.protobuf.Duration.newBuilder(step_).mergeFrom(value).buildPartial(); + } else { + step_ = value; + } + onChanged(); + } else { + stepBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public Builder clearStep() { + if (stepBuilder_ == null) { + step_ = null; + onChanged(); + } else { + step_ = null; + stepBuilder_ = null; + } + + return this; + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public com.google.protobuf.Duration.Builder getStepBuilder() { + + onChanged(); + return getStepFieldBuilder().getBuilder(); + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + public com.google.protobuf.DurationOrBuilder getStepOrBuilder() { + if (stepBuilder_ != null) { + return stepBuilder_.getMessageOrBuilder(); + } else { + return step_ == null ? + com.google.protobuf.Duration.getDefaultInstance() : step_; + } + } + /** + *
+       * Query resolution step width in duration format or float number of seconds.
+       * 
+ * + * .google.protobuf.Duration step = 6; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> + getStepFieldBuilder() { + if (stepBuilder_ == null) { + stepBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( + getStep(), + getParentForChildren(), + isClean()); + step_ = null; + } + return stepBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.admin.GetTaskMetricsRequest) + } + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskMetricsRequest) + private static final flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest(); + } + + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetTaskMetricsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTaskMetricsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetTaskMetricsResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.admin.GetTaskMetricsResponse) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + java.util.List + getResultsList(); + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + flyteidl.core.Metrics.ExecutionMetricResult getResults(int index); + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + int getResultsCount(); + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + java.util.List + getResultsOrBuilderList(); + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + flyteidl.core.Metrics.ExecutionMetricResultOrBuilder getResultsOrBuilder( + int index); + } + /** + *
+   * A response containing a list of metrics for a task execution.
+   * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskMetricsResponse} + */ + public static final class GetTaskMetricsResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.admin.GetTaskMetricsResponse) + GetTaskMetricsResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetTaskMetricsResponse.newBuilder() to construct. + private GetTaskMetricsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetTaskMetricsResponse() { + results_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetTaskMetricsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + results_ = new java.util.ArrayList(); + mutable_bitField0_ |= 0x00000001; + } + results_.add( + input.readMessage(flyteidl.core.Metrics.ExecutionMetricResult.parser(), extensionRegistry)); + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + results_ = java.util.Collections.unmodifiableList(results_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse.class, flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse.Builder.class); + } + + public static final int RESULTS_FIELD_NUMBER = 1; + private java.util.List results_; + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public java.util.List getResultsList() { + return results_; + } + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public java.util.List + getResultsOrBuilderList() { + return results_; + } + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public int getResultsCount() { + return results_.size(); + } + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public flyteidl.core.Metrics.ExecutionMetricResult getResults(int index) { + return results_.get(index); + } + /** + *
+     * The execution metric results.
+     * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public flyteidl.core.Metrics.ExecutionMetricResultOrBuilder getResultsOrBuilder( + int index) { + return results_.get(index); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < results_.size(); i++) { + output.writeMessage(1, results_.get(i)); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < results_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, results_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse)) { + return super.equals(obj); + } + flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse other = (flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse) obj; + + if (!getResultsList() + .equals(other.getResultsList())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getResultsCount() > 0) { + hash = (37 * hash) + RESULTS_FIELD_NUMBER; + hash = (53 * hash) + getResultsList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * A response containing a list of metrics for a task execution.
+     * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskMetricsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.admin.GetTaskMetricsResponse) + flyteidl.admin.AgentOuterClass.GetTaskMetricsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse.class, flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse.Builder.class); + } + + // Construct using flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getResultsFieldBuilder(); + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (resultsBuilder_ == null) { + results_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultsBuilder_.clear(); + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskMetricsResponse_descriptor; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse getDefaultInstanceForType() { + return flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse build() { + flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse buildPartial() { + flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse result = new flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse(this); + int from_bitField0_ = bitField0_; + if (resultsBuilder_ == null) { + if (((bitField0_ & 0x00000001) != 0)) { + results_ = java.util.Collections.unmodifiableList(results_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.results_ = results_; + } else { + result.results_ = resultsBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse) { + return mergeFrom((flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse other) { + if (other == flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse.getDefaultInstance()) return this; + if (resultsBuilder_ == null) { + if (!other.results_.isEmpty()) { + if (results_.isEmpty()) { + results_ = other.results_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultsIsMutable(); + results_.addAll(other.results_); + } + onChanged(); + } + } else { + if (!other.results_.isEmpty()) { + if (resultsBuilder_.isEmpty()) { + resultsBuilder_.dispose(); + resultsBuilder_ = null; + results_ = other.results_; + bitField0_ = (bitField0_ & ~0x00000001); + resultsBuilder_ = + com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getResultsFieldBuilder() : null; + } else { + resultsBuilder_.addAllMessages(other.results_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private java.util.List results_ = + java.util.Collections.emptyList(); + private void ensureResultsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + results_ = new java.util.ArrayList(results_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilderV3< + flyteidl.core.Metrics.ExecutionMetricResult, flyteidl.core.Metrics.ExecutionMetricResult.Builder, flyteidl.core.Metrics.ExecutionMetricResultOrBuilder> resultsBuilder_; + + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public java.util.List getResultsList() { + if (resultsBuilder_ == null) { + return java.util.Collections.unmodifiableList(results_); + } else { + return resultsBuilder_.getMessageList(); + } + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public int getResultsCount() { + if (resultsBuilder_ == null) { + return results_.size(); + } else { + return resultsBuilder_.getCount(); + } + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public flyteidl.core.Metrics.ExecutionMetricResult getResults(int index) { + if (resultsBuilder_ == null) { + return results_.get(index); + } else { + return resultsBuilder_.getMessage(index); + } + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder setResults( + int index, flyteidl.core.Metrics.ExecutionMetricResult value) { + if (resultsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultsIsMutable(); + results_.set(index, value); + onChanged(); + } else { + resultsBuilder_.setMessage(index, value); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder setResults( + int index, flyteidl.core.Metrics.ExecutionMetricResult.Builder builderForValue) { + if (resultsBuilder_ == null) { + ensureResultsIsMutable(); + results_.set(index, builderForValue.build()); + onChanged(); + } else { + resultsBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder addResults(flyteidl.core.Metrics.ExecutionMetricResult value) { + if (resultsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultsIsMutable(); + results_.add(value); + onChanged(); + } else { + resultsBuilder_.addMessage(value); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder addResults( + int index, flyteidl.core.Metrics.ExecutionMetricResult value) { + if (resultsBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultsIsMutable(); + results_.add(index, value); + onChanged(); + } else { + resultsBuilder_.addMessage(index, value); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder addResults( + flyteidl.core.Metrics.ExecutionMetricResult.Builder builderForValue) { + if (resultsBuilder_ == null) { + ensureResultsIsMutable(); + results_.add(builderForValue.build()); + onChanged(); + } else { + resultsBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder addResults( + int index, flyteidl.core.Metrics.ExecutionMetricResult.Builder builderForValue) { + if (resultsBuilder_ == null) { + ensureResultsIsMutable(); + results_.add(index, builderForValue.build()); + onChanged(); + } else { + resultsBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder addAllResults( + java.lang.Iterable values) { + if (resultsBuilder_ == null) { + ensureResultsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, results_); + onChanged(); + } else { + resultsBuilder_.addAllMessages(values); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder clearResults() { + if (resultsBuilder_ == null) { + results_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultsBuilder_.clear(); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public Builder removeResults(int index) { + if (resultsBuilder_ == null) { + ensureResultsIsMutable(); + results_.remove(index); + onChanged(); + } else { + resultsBuilder_.remove(index); + } + return this; + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public flyteidl.core.Metrics.ExecutionMetricResult.Builder getResultsBuilder( + int index) { + return getResultsFieldBuilder().getBuilder(index); + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public flyteidl.core.Metrics.ExecutionMetricResultOrBuilder getResultsOrBuilder( + int index) { + if (resultsBuilder_ == null) { + return results_.get(index); } else { + return resultsBuilder_.getMessageOrBuilder(index); + } + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public java.util.List + getResultsOrBuilderList() { + if (resultsBuilder_ != null) { + return resultsBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(results_); + } + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public flyteidl.core.Metrics.ExecutionMetricResult.Builder addResultsBuilder() { + return getResultsFieldBuilder().addBuilder( + flyteidl.core.Metrics.ExecutionMetricResult.getDefaultInstance()); + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public flyteidl.core.Metrics.ExecutionMetricResult.Builder addResultsBuilder( + int index) { + return getResultsFieldBuilder().addBuilder( + index, flyteidl.core.Metrics.ExecutionMetricResult.getDefaultInstance()); + } + /** + *
+       * The execution metric results.
+       * 
+ * + * repeated .flyteidl.core.ExecutionMetricResult results = 1; + */ + public java.util.List + getResultsBuilderList() { + return getResultsFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilderV3< + flyteidl.core.Metrics.ExecutionMetricResult, flyteidl.core.Metrics.ExecutionMetricResult.Builder, flyteidl.core.Metrics.ExecutionMetricResultOrBuilder> + getResultsFieldBuilder() { + if (resultsBuilder_ == null) { + resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< + flyteidl.core.Metrics.ExecutionMetricResult, flyteidl.core.Metrics.ExecutionMetricResult.Builder, flyteidl.core.Metrics.ExecutionMetricResultOrBuilder>( + results_, + ((bitField0_ & 0x00000001) != 0), + getParentForChildren(), + isClean()); + results_ = null; + } + return resultsBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.admin.GetTaskMetricsResponse) + } + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskMetricsResponse) + private static final flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse(); + } + + public static flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetTaskMetricsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTaskMetricsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskMetricsResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetTaskLogsRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.admin.GetTaskLogsRequest) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + java.lang.String getTaskType(); + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + com.google.protobuf.ByteString + getTaskTypeBytes(); + + /** + *
+     * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+     * 
+ * + * bytes resource_meta = 2; + */ + com.google.protobuf.ByteString getResourceMeta(); + + /** + *
+     * Number of lines to return.
+     * 
+ * + * uint64 lines = 3; + */ + long getLines(); + + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 4; + */ + java.lang.String getToken(); + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 4; + */ + com.google.protobuf.ByteString + getTokenBytes(); + } + /** + *
+   * A request to get the log from a task execution.
+   * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskLogsRequest} + */ + public static final class GetTaskLogsRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.admin.GetTaskLogsRequest) + GetTaskLogsRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetTaskLogsRequest.newBuilder() to construct. + private GetTaskLogsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetTaskLogsRequest() { + taskType_ = ""; + resourceMeta_ = com.google.protobuf.ByteString.EMPTY; + token_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetTaskLogsRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + taskType_ = s; + break; + } + case 18: { + + resourceMeta_ = input.readBytes(); + break; + } + case 24: { + + lines_ = input.readUInt64(); + break; + } + case 34: { + java.lang.String s = input.readStringRequireUtf8(); + + token_ = s; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskLogsRequest.class, flyteidl.admin.AgentOuterClass.GetTaskLogsRequest.Builder.class); + } + + public static final int TASK_TYPE_FIELD_NUMBER = 1; + private volatile java.lang.Object taskType_; + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + public java.lang.String getTaskType() { + java.lang.Object ref = taskType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskType_ = s; + return s; + } + } + /** + *
+     * A predefined yet extensible Task type identifier.
+     * 
+ * + * string task_type = 1; + */ + public com.google.protobuf.ByteString + getTaskTypeBytes() { + java.lang.Object ref = taskType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + taskType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int RESOURCE_META_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString resourceMeta_; + /** + *
+     * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+     * 
+ * + * bytes resource_meta = 2; + */ + public com.google.protobuf.ByteString getResourceMeta() { + return resourceMeta_; + } + + public static final int LINES_FIELD_NUMBER = 3; + private long lines_; + /** + *
+     * Number of lines to return.
+     * 
+ * + * uint64 lines = 3; + */ + public long getLines() { + return lines_; + } + + public static final int TOKEN_FIELD_NUMBER = 4; + private volatile java.lang.Object token_; + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 4; + */ + public java.lang.String getToken() { + java.lang.Object ref = token_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + token_ = s; + return s; + } + } + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 4; + */ + public com.google.protobuf.ByteString + getTokenBytes() { + java.lang.Object ref = token_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + token_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getTaskTypeBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, taskType_); + } + if (!resourceMeta_.isEmpty()) { + output.writeBytes(2, resourceMeta_); + } + if (lines_ != 0L) { + output.writeUInt64(3, lines_); + } + if (!getTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, token_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getTaskTypeBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, taskType_); + } + if (!resourceMeta_.isEmpty()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, resourceMeta_); + } + if (lines_ != 0L) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, lines_); + } + if (!getTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, token_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.admin.AgentOuterClass.GetTaskLogsRequest)) { + return super.equals(obj); + } + flyteidl.admin.AgentOuterClass.GetTaskLogsRequest other = (flyteidl.admin.AgentOuterClass.GetTaskLogsRequest) obj; + + if (!getTaskType() + .equals(other.getTaskType())) return false; + if (!getResourceMeta() + .equals(other.getResourceMeta())) return false; + if (getLines() + != other.getLines()) return false; + if (!getToken() + .equals(other.getToken())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + TASK_TYPE_FIELD_NUMBER; + hash = (53 * hash) + getTaskType().hashCode(); + hash = (37 * hash) + RESOURCE_META_FIELD_NUMBER; + hash = (53 * hash) + getResourceMeta().hashCode(); + hash = (37 * hash) + LINES_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getLines()); + hash = (37 * hash) + TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getToken().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.admin.AgentOuterClass.GetTaskLogsRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * A request to get the log from a task execution.
+     * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskLogsRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.admin.GetTaskLogsRequest) + flyteidl.admin.AgentOuterClass.GetTaskLogsRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskLogsRequest.class, flyteidl.admin.AgentOuterClass.GetTaskLogsRequest.Builder.class); + } + + // Construct using flyteidl.admin.AgentOuterClass.GetTaskLogsRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + taskType_ = ""; + + resourceMeta_ = com.google.protobuf.ByteString.EMPTY; + + lines_ = 0L; + + token_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsRequest_descriptor; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsRequest getDefaultInstanceForType() { + return flyteidl.admin.AgentOuterClass.GetTaskLogsRequest.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsRequest build() { + flyteidl.admin.AgentOuterClass.GetTaskLogsRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsRequest buildPartial() { + flyteidl.admin.AgentOuterClass.GetTaskLogsRequest result = new flyteidl.admin.AgentOuterClass.GetTaskLogsRequest(this); + result.taskType_ = taskType_; + result.resourceMeta_ = resourceMeta_; + result.lines_ = lines_; + result.token_ = token_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.admin.AgentOuterClass.GetTaskLogsRequest) { + return mergeFrom((flyteidl.admin.AgentOuterClass.GetTaskLogsRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.admin.AgentOuterClass.GetTaskLogsRequest other) { + if (other == flyteidl.admin.AgentOuterClass.GetTaskLogsRequest.getDefaultInstance()) return this; + if (!other.getTaskType().isEmpty()) { + taskType_ = other.taskType_; + onChanged(); + } + if (other.getResourceMeta() != com.google.protobuf.ByteString.EMPTY) { + setResourceMeta(other.getResourceMeta()); + } + if (other.getLines() != 0L) { + setLines(other.getLines()); + } + if (!other.getToken().isEmpty()) { + token_ = other.token_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.admin.AgentOuterClass.GetTaskLogsRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.admin.AgentOuterClass.GetTaskLogsRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object taskType_ = ""; + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public java.lang.String getTaskType() { + java.lang.Object ref = taskType_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + taskType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public com.google.protobuf.ByteString + getTaskTypeBytes() { + java.lang.Object ref = taskType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + taskType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public Builder setTaskType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + taskType_ = value; + onChanged(); + return this; + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public Builder clearTaskType() { + + taskType_ = getDefaultInstance().getTaskType(); + onChanged(); + return this; + } + /** + *
+       * A predefined yet extensible Task type identifier.
+       * 
+ * + * string task_type = 1; + */ + public Builder setTaskTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + taskType_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.ByteString resourceMeta_ = com.google.protobuf.ByteString.EMPTY; + /** + *
+       * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+       * 
+ * + * bytes resource_meta = 2; + */ + public com.google.protobuf.ByteString getResourceMeta() { + return resourceMeta_; + } + /** + *
+       * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+       * 
+ * + * bytes resource_meta = 2; + */ + public Builder setResourceMeta(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + + resourceMeta_ = value; + onChanged(); + return this; + } + /** + *
+       * Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata).
+       * 
+ * + * bytes resource_meta = 2; + */ + public Builder clearResourceMeta() { + + resourceMeta_ = getDefaultInstance().getResourceMeta(); + onChanged(); + return this; + } + + private long lines_ ; + /** + *
+       * Number of lines to return.
+       * 
+ * + * uint64 lines = 3; + */ + public long getLines() { + return lines_; + } + /** + *
+       * Number of lines to return.
+       * 
+ * + * uint64 lines = 3; + */ + public Builder setLines(long value) { + + lines_ = value; + onChanged(); + return this; + } + /** + *
+       * Number of lines to return.
+       * 
+ * + * uint64 lines = 3; + */ + public Builder clearLines() { + + lines_ = 0L; + onChanged(); + return this; + } + + private java.lang.Object token_ = ""; + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 4; + */ + public java.lang.String getToken() { + java.lang.Object ref = token_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + token_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 4; + */ + public com.google.protobuf.ByteString + getTokenBytes() { + java.lang.Object ref = token_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + token_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 4; + */ + public Builder setToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + token_ = value; + onChanged(); + return this; + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 4; + */ + public Builder clearToken() { + + token_ = getDefaultInstance().getToken(); + onChanged(); + return this; + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 4; + */ + public Builder setTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + token_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.admin.GetTaskLogsRequest) + } + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskLogsRequest) + private static final flyteidl.admin.AgentOuterClass.GetTaskLogsRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.admin.AgentOuterClass.GetTaskLogsRequest(); + } + + public static flyteidl.admin.AgentOuterClass.GetTaskLogsRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetTaskLogsRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTaskLogsRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface GetTaskLogsResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.admin.GetTaskLogsResponse) + com.google.protobuf.MessageOrBuilder { + + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + java.util.List + getResultsList(); + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + int getResultsCount(); + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + java.lang.String getResults(int index); + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + com.google.protobuf.ByteString + getResultsBytes(int index); + + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 2; + */ + java.lang.String getToken(); + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 2; + */ + com.google.protobuf.ByteString + getTokenBytes(); + } + /** + *
+   * A response containing the logs for a task execution.
+   * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskLogsResponse} + */ + public static final class GetTaskLogsResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.admin.GetTaskLogsResponse) + GetTaskLogsResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetTaskLogsResponse.newBuilder() to construct. + private GetTaskLogsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetTaskLogsResponse() { + results_ = com.google.protobuf.LazyStringArrayList.EMPTY; + token_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetTaskLogsResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + if (!((mutable_bitField0_ & 0x00000001) != 0)) { + results_ = new com.google.protobuf.LazyStringArrayList(); + mutable_bitField0_ |= 0x00000001; + } + results_.add(s); + break; + } + case 18: { + java.lang.String s = input.readStringRequireUtf8(); + + token_ = s; + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000001) != 0)) { + results_ = results_.getUnmodifiableView(); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskLogsResponse.class, flyteidl.admin.AgentOuterClass.GetTaskLogsResponse.Builder.class); + } + + private int bitField0_; + public static final int RESULTS_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList results_; + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + public com.google.protobuf.ProtocolStringList + getResultsList() { + return results_; + } + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + public int getResultsCount() { + return results_.size(); + } + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + public java.lang.String getResults(int index) { + return results_.get(index); + } + /** + *
+     * The execution log results.
+     * 
+ * + * repeated string results = 1; + */ + public com.google.protobuf.ByteString + getResultsBytes(int index) { + return results_.getByteString(index); + } + + public static final int TOKEN_FIELD_NUMBER = 2; + private volatile java.lang.Object token_; + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 2; + */ + public java.lang.String getToken() { + java.lang.Object ref = token_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + token_ = s; + return s; + } + } + /** + *
+     * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+     * in a query. If there are no more results, this value will be empty.
+     * 
+ * + * string token = 2; + */ + public com.google.protobuf.ByteString + getTokenBytes() { + java.lang.Object ref = token_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + token_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + for (int i = 0; i < results_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, results_.getRaw(i)); + } + if (!getTokenBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, token_); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < results_.size(); i++) { + dataSize += computeStringSizeNoTag(results_.getRaw(i)); + } + size += dataSize; + size += 1 * getResultsList().size(); + } + if (!getTokenBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, token_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.admin.AgentOuterClass.GetTaskLogsResponse)) { + return super.equals(obj); + } + flyteidl.admin.AgentOuterClass.GetTaskLogsResponse other = (flyteidl.admin.AgentOuterClass.GetTaskLogsResponse) obj; + + if (!getResultsList() + .equals(other.getResultsList())) return false; + if (!getToken() + .equals(other.getToken())) return false; + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (getResultsCount() > 0) { + hash = (37 * hash) + RESULTS_FIELD_NUMBER; + hash = (53 * hash) + getResultsList().hashCode(); + } + hash = (37 * hash) + TOKEN_FIELD_NUMBER; + hash = (53 * hash) + getToken().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.admin.AgentOuterClass.GetTaskLogsResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * A response containing the logs for a task execution.
+     * 
+ * + * Protobuf type {@code flyteidl.admin.GetTaskLogsResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.admin.GetTaskLogsResponse) + flyteidl.admin.AgentOuterClass.GetTaskLogsResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.AgentOuterClass.GetTaskLogsResponse.class, flyteidl.admin.AgentOuterClass.GetTaskLogsResponse.Builder.class); + } + + // Construct using flyteidl.admin.AgentOuterClass.GetTaskLogsResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + results_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + token_ = ""; + + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.admin.AgentOuterClass.internal_static_flyteidl_admin_GetTaskLogsResponse_descriptor; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsResponse getDefaultInstanceForType() { + return flyteidl.admin.AgentOuterClass.GetTaskLogsResponse.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsResponse build() { + flyteidl.admin.AgentOuterClass.GetTaskLogsResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsResponse buildPartial() { + flyteidl.admin.AgentOuterClass.GetTaskLogsResponse result = new flyteidl.admin.AgentOuterClass.GetTaskLogsResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((bitField0_ & 0x00000001) != 0)) { + results_ = results_.getUnmodifiableView(); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.results_ = results_; + result.token_ = token_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.admin.AgentOuterClass.GetTaskLogsResponse) { + return mergeFrom((flyteidl.admin.AgentOuterClass.GetTaskLogsResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.admin.AgentOuterClass.GetTaskLogsResponse other) { + if (other == flyteidl.admin.AgentOuterClass.GetTaskLogsResponse.getDefaultInstance()) return this; + if (!other.results_.isEmpty()) { + if (results_.isEmpty()) { + results_ = other.results_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultsIsMutable(); + results_.addAll(other.results_); + } + onChanged(); + } + if (!other.getToken().isEmpty()) { + token_ = other.token_; + onChanged(); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.admin.AgentOuterClass.GetTaskLogsResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.admin.AgentOuterClass.GetTaskLogsResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private com.google.protobuf.LazyStringList results_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureResultsIsMutable() { + if (!((bitField0_ & 0x00000001) != 0)) { + results_ = new com.google.protobuf.LazyStringArrayList(results_); + bitField0_ |= 0x00000001; + } + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public com.google.protobuf.ProtocolStringList + getResultsList() { + return results_.getUnmodifiableView(); + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public int getResultsCount() { + return results_.size(); + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public java.lang.String getResults(int index) { + return results_.get(index); + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public com.google.protobuf.ByteString + getResultsBytes(int index) { + return results_.getByteString(index); + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public Builder setResults( + int index, java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultsIsMutable(); + results_.set(index, value); + onChanged(); + return this; + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public Builder addResults( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultsIsMutable(); + results_.add(value); + onChanged(); + return this; + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public Builder addAllResults( + java.lang.Iterable values) { + ensureResultsIsMutable(); + com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, results_); + onChanged(); + return this; + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public Builder clearResults() { + results_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + /** + *
+       * The execution log results.
+       * 
+ * + * repeated string results = 1; + */ + public Builder addResultsBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + ensureResultsIsMutable(); + results_.add(value); + onChanged(); + return this; + } + + private java.lang.Object token_ = ""; + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 2; + */ + public java.lang.String getToken() { + java.lang.Object ref = token_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + token_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 2; + */ + public com.google.protobuf.ByteString + getTokenBytes() { + java.lang.Object ref = token_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + token_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 2; + */ + public Builder setToken( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + token_ = value; + onChanged(); + return this; + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 2; + */ + public Builder clearToken() { + + token_ = getDefaultInstance().getToken(); + onChanged(); + return this; + } + /** + *
+       * In the case of multiple pages of results, the server-provided token can be used to fetch the next page
+       * in a query. If there are no more results, this value will be empty.
+       * 
+ * + * string token = 2; + */ + public Builder setTokenBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + token_ = value; + onChanged(); + return this; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.admin.GetTaskLogsResponse) + } + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetTaskLogsResponse) + private static final flyteidl.admin.AgentOuterClass.GetTaskLogsResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.admin.AgentOuterClass.GetTaskLogsResponse(); + } + + public static flyteidl.admin.AgentOuterClass.GetTaskLogsResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetTaskLogsResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetTaskLogsResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.admin.AgentOuterClass.GetTaskLogsResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_TaskExecutionMetadata_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_CreateTaskRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_CreateTaskRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_CreateTaskResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_CreateTaskResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetTaskRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetTaskRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetTaskResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetTaskResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_Resource_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_Resource_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_DeleteTaskRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_DeleteTaskRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_DeleteTaskResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_DeleteTaskResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_Agent_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_Agent_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetAgentRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetAgentRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetAgentResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetAgentResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_ListAgentsRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_ListAgentsRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_ListAgentsResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_ListAgentsResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetTaskMetricsRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetTaskMetricsRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetTaskMetricsResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetTaskMetricsResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetTaskLogsRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetTaskLogsRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetTaskLogsResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetTaskLogsResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\032flyteidl/admin/agent.proto\022\016flyteidl.a" + + "dmin\032\034flyteidl/core/literals.proto\032\031flyt" + + "eidl/core/tasks.proto\032\036flyteidl/core/ide" + + "ntifier.proto\032\035flyteidl/core/execution.p" + + "roto\032\033flyteidl/core/metrics.proto\032\036googl" + + "e/protobuf/duration.proto\032\037google/protob" + + "uf/timestamp.proto\"\232\004\n\025TaskExecutionMeta" + + "data\022A\n\021task_execution_id\030\001 \001(\0132&.flytei" + + "dl.core.TaskExecutionIdentifier\022\021\n\tnames" + + "pace\030\002 \001(\t\022A\n\006labels\030\003 \003(\01321.flyteidl.ad" + + "min.TaskExecutionMetadata.LabelsEntry\022K\n" + + "\013annotations\030\004 \003(\01326.flyteidl.admin.Task" + + "ExecutionMetadata.AnnotationsEntry\022\033\n\023k8" + + "s_service_account\030\005 \001(\t\022^\n\025environment_v" + + "ariables\030\006 \003(\0132?.flyteidl.admin.TaskExec" + + "utionMetadata.EnvironmentVariablesEntry\032" + + "-\n\013LabelsEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001" + + "(\t:\0028\001\0322\n\020AnnotationsEntry\022\013\n\003key\030\001 \001(\t\022" + + "\r\n\005value\030\002 \001(\t:\0028\001\032;\n\031EnvironmentVariabl" + + "esEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"" + + "\314\001\n\021CreateTaskRequest\022)\n\006inputs\030\001 \001(\0132\031." + + "flyteidl.core.LiteralMap\022-\n\010template\030\002 \001" + + "(\0132\033.flyteidl.core.TaskTemplate\022\025\n\routpu" + + "t_prefix\030\003 \001(\t\022F\n\027task_execution_metadat" + + "a\030\004 \001(\0132%.flyteidl.admin.TaskExecutionMe" + + "tadata\"b\n\022CreateTaskResponse\022\027\n\rresource" + + "_meta\030\001 \001(\014H\000\022,\n\010resource\030\002 \001(\0132\030.flytei" + + "dl.admin.ResourceH\000B\005\n\003res\":\n\016GetTaskReq" + + "uest\022\021\n\ttask_type\030\001 \001(\t\022\025\n\rresource_meta" + + "\030\002 \001(\014\"h\n\017GetTaskResponse\022*\n\010resource\030\001 " + + "\001(\0132\030.flyteidl.admin.Resource\022)\n\tlog_lin" + + "ks\030\002 \003(\0132\026.flyteidl.core.TaskLog\"\317\001\n\010Res" + + "ource\022(\n\005state\030\001 \001(\0162\025.flyteidl.admin.St" + + "ateB\002\030\001\022*\n\007outputs\030\002 \001(\0132\031.flyteidl.core" + + ".LiteralMap\022\017\n\007message\030\003 \001(\t\022)\n\tlog_link" + + "s\030\004 \003(\0132\026.flyteidl.core.TaskLog\0221\n\005phase" + + "\030\005 \001(\0162\".flyteidl.core.TaskExecution.Pha" + + "se\"=\n\021DeleteTaskRequest\022\021\n\ttask_type\030\001 \001" + + "(\t\022\025\n\rresource_meta\030\002 \001(\014\"\024\n\022DeleteTaskR" + + "esponse\"3\n\005Agent\022\014\n\004name\030\001 \001(\t\022\034\n\024suppor" + + "ted_task_types\030\002 \003(\t\"\037\n\017GetAgentRequest\022" + + "\014\n\004name\030\001 \001(\t\"8\n\020GetAgentResponse\022$\n\005age" + + "nt\030\001 \001(\0132\025.flyteidl.admin.Agent\"\023\n\021ListA" + + "gentsRequest\";\n\022ListAgentsResponse\022%\n\006ag" + + "ents\030\001 \003(\0132\025.flyteidl.admin.Agent\"\331\001\n\025Ge" + + "tTaskMetricsRequest\022\021\n\ttask_type\030\001 \001(\t\022\025" + + "\n\rresource_meta\030\002 \001(\014\022\017\n\007queries\030\003 \003(\t\022." + + "\n\nstart_time\030\004 \001(\0132\032.google.protobuf.Tim" + + "estamp\022,\n\010end_time\030\005 \001(\0132\032.google.protob" + + "uf.Timestamp\022\'\n\004step\030\006 \001(\0132\031.google.prot" + + "obuf.Duration\"O\n\026GetTaskMetricsResponse\022" + + "5\n\007results\030\001 \003(\0132$.flyteidl.core.Executi" + + "onMetricResult\"\\\n\022GetTaskLogsRequest\022\021\n\t" + + "task_type\030\001 \001(\t\022\025\n\rresource_meta\030\002 \001(\014\022\r" + + "\n\005lines\030\003 \001(\004\022\r\n\005token\030\004 \001(\t\"5\n\023GetTaskL" + + "ogsResponse\022\017\n\007results\030\001 \003(\t\022\r\n\005token\030\002 " + + "\001(\t*^\n\005State\022\025\n\021RETRYABLE_FAILURE\020\000\022\025\n\021P" + + "ERMANENT_FAILURE\020\001\022\013\n\007PENDING\020\002\022\013\n\007RUNNI" + + "NG\020\003\022\r\n\tSUCCEEDED\020\004B=Z;github.com/flyteo" + + "rg/flyte/flyteidl/gen/pb-go/flyteidl/adm" + + "inb\006proto3" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + flyteidl.core.Literals.getDescriptor(), + flyteidl.core.Tasks.getDescriptor(), + flyteidl.core.IdentifierOuterClass.getDescriptor(), + flyteidl.core.Execution.getDescriptor(), + flyteidl.core.Metrics.getDescriptor(), + com.google.protobuf.DurationProto.getDescriptor(), + com.google.protobuf.TimestampProto.getDescriptor(), + }, assigner); + internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_flyteidl_admin_TaskExecutionMetadata_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor, + new java.lang.String[] { "TaskExecutionId", "Namespace", "Labels", "Annotations", "K8SServiceAccount", "EnvironmentVariables", }); + internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_descriptor = + internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor.getNestedTypes().get(0); + internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_TaskExecutionMetadata_LabelsEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_descriptor = + internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor.getNestedTypes().get(1); + internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_TaskExecutionMetadata_AnnotationsEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_descriptor = + internal_static_flyteidl_admin_TaskExecutionMetadata_descriptor.getNestedTypes().get(2); + internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_TaskExecutionMetadata_EnvironmentVariablesEntry_descriptor, + new java.lang.String[] { "Key", "Value", }); + internal_static_flyteidl_admin_CreateTaskRequest_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_flyteidl_admin_CreateTaskRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_CreateTaskRequest_descriptor, + new java.lang.String[] { "Inputs", "Template", "OutputPrefix", "TaskExecutionMetadata", }); + internal_static_flyteidl_admin_CreateTaskResponse_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_flyteidl_admin_CreateTaskResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_CreateTaskResponse_descriptor, + new java.lang.String[] { "ResourceMeta", "Resource", "Res", }); + internal_static_flyteidl_admin_GetTaskRequest_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_flyteidl_admin_GetTaskRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_GetTaskRequest_descriptor, + new java.lang.String[] { "TaskType", "ResourceMeta", }); + internal_static_flyteidl_admin_GetTaskResponse_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_flyteidl_admin_GetTaskResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_GetTaskResponse_descriptor, + new java.lang.String[] { "Resource", "LogLinks", }); + internal_static_flyteidl_admin_Resource_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_flyteidl_admin_Resource_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_Resource_descriptor, + new java.lang.String[] { "State", "Outputs", "Message", "LogLinks", "Phase", }); + internal_static_flyteidl_admin_DeleteTaskRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_flyteidl_admin_DeleteTaskRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_DeleteTaskRequest_descriptor, + new java.lang.String[] { "TaskType", "ResourceMeta", }); + internal_static_flyteidl_admin_DeleteTaskResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_flyteidl_admin_DeleteTaskResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_DeleteTaskResponse_descriptor, + new java.lang.String[] { }); + internal_static_flyteidl_admin_Agent_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_flyteidl_admin_Agent_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_Agent_descriptor, + new java.lang.String[] { "Name", "SupportedTaskTypes", }); internal_static_flyteidl_admin_GetAgentRequest_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_flyteidl_admin_GetAgentRequest_fieldAccessorTable = new @@ -12273,11 +16868,37 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ListAgentsResponse_descriptor, new java.lang.String[] { "Agents", }); + internal_static_flyteidl_admin_GetTaskMetricsRequest_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_flyteidl_admin_GetTaskMetricsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_GetTaskMetricsRequest_descriptor, + new java.lang.String[] { "TaskType", "ResourceMeta", "Queries", "StartTime", "EndTime", "Step", }); + internal_static_flyteidl_admin_GetTaskMetricsResponse_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_flyteidl_admin_GetTaskMetricsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_GetTaskMetricsResponse_descriptor, + new java.lang.String[] { "Results", }); + internal_static_flyteidl_admin_GetTaskLogsRequest_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_flyteidl_admin_GetTaskLogsRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_GetTaskLogsRequest_descriptor, + new java.lang.String[] { "TaskType", "ResourceMeta", "Lines", "Token", }); + internal_static_flyteidl_admin_GetTaskLogsResponse_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_flyteidl_admin_GetTaskLogsResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_GetTaskLogsResponse_descriptor, + new java.lang.String[] { "Results", "Token", }); flyteidl.core.Literals.getDescriptor(); flyteidl.core.Tasks.getDescriptor(); - flyteidl.core.Interface.getDescriptor(); flyteidl.core.IdentifierOuterClass.getDescriptor(); flyteidl.core.Execution.getDescriptor(); + flyteidl.core.Metrics.getDescriptor(); + com.google.protobuf.DurationProto.getDescriptor(); + com.google.protobuf.TimestampProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/flyteidl/gen/pb-java/flyteidl/admin/Common.java b/flyteidl/gen/pb-java/flyteidl/admin/Common.java index 4120abc852..bc18ef3c1e 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/Common.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/Common.java @@ -212,6 +212,24 @@ public interface NamedEntityIdentifierOrBuilder extends */ com.google.protobuf.ByteString getNameBytes(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 4; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 4; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -236,6 +254,7 @@ private NamedEntityIdentifier() {
       project_ = "";
       domain_ = "";
       name_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -280,6 +299,12 @@ private NamedEntityIdentifier(
               name_ = s;
               break;
             }
+            case 34: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -444,6 +469,48 @@ public java.lang.String getName() {
       }
     }
 
+    public static final int ORG_FIELD_NUMBER = 4;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -467,6 +534,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, org_); + } unknownFields.writeTo(output); } @@ -485,6 +555,9 @@ public int getSerializedSize() { if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -506,6 +579,8 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getDomain())) return false; if (!getName() .equals(other.getName())) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -523,6 +598,8 @@ public int hashCode() { hash = (53 * hash) + getDomain().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -669,6 +746,8 @@ public Builder clear() { name_ = ""; + org_ = ""; + return this; } @@ -698,6 +777,7 @@ public flyteidl.admin.Common.NamedEntityIdentifier buildPartial() { result.project_ = project_; result.domain_ = domain_; result.name_ = name_; + result.org_ = org_; onBuilt(); return result; } @@ -758,6 +838,10 @@ public Builder mergeFrom(flyteidl.admin.Common.NamedEntityIdentifier other) { name_ = other.name_; onChanged(); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1068,6 +1152,95 @@ public Builder setNameBytes( onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 4; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 4; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 4; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -3834,6 +4007,24 @@ public interface NamedEntityIdentifierListRequestOrBuilder extends */ com.google.protobuf.ByteString getFiltersBytes(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -3856,6 +4047,7 @@ private NamedEntityIdentifierListRequest() {
       domain_ = "";
       token_ = "";
       filters_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -3924,6 +4116,12 @@ private NamedEntityIdentifierListRequest(
               filters_ = s;
               break;
             }
+            case 58: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -4184,6 +4382,48 @@ public java.lang.String getFilters() {
       }
     }
 
+    public static final int ORG_FIELD_NUMBER = 7;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -4216,6 +4456,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getFiltersBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, filters_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, org_); + } unknownFields.writeTo(output); } @@ -4245,6 +4488,9 @@ public int getSerializedSize() { if (!getFiltersBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, filters_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -4275,6 +4521,8 @@ public boolean equals(final java.lang.Object obj) { } if (!getFilters() .equals(other.getFilters())) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -4300,6 +4548,8 @@ public int hashCode() { } hash = (37 * hash) + FILTERS_FIELD_NUMBER; hash = (53 * hash) + getFilters().hashCode(); + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -4453,6 +4703,8 @@ public Builder clear() { } filters_ = ""; + org_ = ""; + return this; } @@ -4489,6 +4741,7 @@ public flyteidl.admin.Common.NamedEntityIdentifierListRequest buildPartial() { result.sortBy_ = sortByBuilder_.build(); } result.filters_ = filters_; + result.org_ = org_; onBuilt(); return result; } @@ -4559,6 +4812,10 @@ public Builder mergeFrom(flyteidl.admin.Common.NamedEntityIdentifierListRequest filters_ = other.filters_; onChanged(); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -5171,6 +5428,95 @@ public Builder setFiltersBytes( onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -5363,6 +5709,24 @@ public interface NamedEntityListRequestOrBuilder extends */ com.google.protobuf.ByteString getFiltersBytes(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 8; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 8; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -5386,6 +5750,7 @@ private NamedEntityListRequest() {
       domain_ = "";
       token_ = "";
       filters_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -5460,6 +5825,12 @@ private NamedEntityListRequest(
               filters_ = s;
               break;
             }
+            case 66: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -5744,6 +6115,48 @@ public java.lang.String getFilters() {
       }
     }
 
+    public static final int ORG_FIELD_NUMBER = 8;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 8; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 8; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -5779,6 +6192,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getFiltersBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 7, filters_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 8, org_); + } unknownFields.writeTo(output); } @@ -5812,6 +6228,9 @@ public int getSerializedSize() { if (!getFiltersBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, filters_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -5843,6 +6262,8 @@ public boolean equals(final java.lang.Object obj) { } if (!getFilters() .equals(other.getFilters())) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -5870,6 +6291,8 @@ public int hashCode() { } hash = (37 * hash) + FILTERS_FIELD_NUMBER; hash = (53 * hash) + getFilters().hashCode(); + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -6025,6 +6448,8 @@ public Builder clear() { } filters_ = ""; + org_ = ""; + return this; } @@ -6062,6 +6487,7 @@ public flyteidl.admin.Common.NamedEntityListRequest buildPartial() { result.sortBy_ = sortByBuilder_.build(); } result.filters_ = filters_; + result.org_ = org_; onBuilt(); return result; } @@ -6135,6 +6561,10 @@ public Builder mergeFrom(flyteidl.admin.Common.NamedEntityListRequest other) { filters_ = other.filters_; onChanged(); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -6809,6 +7239,95 @@ public Builder setFiltersBytes( onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 8; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 8; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 8; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 8; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 8; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -22622,73 +23141,73 @@ public flyteidl.admin.Common.FlyteURLs getDefaultInstanceForType() { "admin\032\035flyteidl/core/execution.proto\032\036fl" + "yteidl/core/identifier.proto\032\034flyteidl/c" + "ore/literals.proto\032\037google/protobuf/time" + - "stamp.proto\"F\n\025NamedEntityIdentifier\022\017\n\007" + + "stamp.proto\"S\n\025NamedEntityIdentifier\022\017\n\007" + "project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\014\n\004name\030\003 " + - "\001(\t\"[\n\023NamedEntityMetadata\022\023\n\013descriptio" + - "n\030\001 \001(\t\022/\n\005state\030\002 \001(\0162 .flyteidl.admin." + - "NamedEntityState\"\253\001\n\013NamedEntity\0222\n\rreso" + - "urce_type\030\001 \001(\0162\033.flyteidl.core.Resource" + - "Type\0221\n\002id\030\002 \001(\0132%.flyteidl.admin.NamedE" + - "ntityIdentifier\0225\n\010metadata\030\003 \001(\0132#.flyt" + - "eidl.admin.NamedEntityMetadata\"r\n\004Sort\022\013" + - "\n\003key\030\001 \001(\t\0221\n\tdirection\030\002 \001(\0162\036.flyteid" + - "l.admin.Sort.Direction\"*\n\tDirection\022\016\n\nD" + - "ESCENDING\020\000\022\r\n\tASCENDING\020\001\"\231\001\n NamedEnti" + - "tyIdentifierListRequest\022\017\n\007project\030\001 \001(\t" + - "\022\016\n\006domain\030\002 \001(\t\022\r\n\005limit\030\003 \001(\r\022\r\n\005token" + + "\001(\t\022\013\n\003org\030\004 \001(\t\"[\n\023NamedEntityMetadata\022" + + "\023\n\013description\030\001 \001(\t\022/\n\005state\030\002 \001(\0162 .fl" + + "yteidl.admin.NamedEntityState\"\253\001\n\013NamedE" + + "ntity\0222\n\rresource_type\030\001 \001(\0162\033.flyteidl." + + "core.ResourceType\0221\n\002id\030\002 \001(\0132%.flyteidl" + + ".admin.NamedEntityIdentifier\0225\n\010metadata" + + "\030\003 \001(\0132#.flyteidl.admin.NamedEntityMetad" + + "ata\"r\n\004Sort\022\013\n\003key\030\001 \001(\t\0221\n\tdirection\030\002 " + + "\001(\0162\036.flyteidl.admin.Sort.Direction\"*\n\tD" + + "irection\022\016\n\nDESCENDING\020\000\022\r\n\tASCENDING\020\001\"" + + "\246\001\n NamedEntityIdentifierListRequest\022\017\n\007" + + "project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\r\n\005limit\030\003" + + " \001(\r\022\r\n\005token\030\004 \001(\t\022%\n\007sort_by\030\005 \001(\0132\024.f" + + "lyteidl.admin.Sort\022\017\n\007filters\030\006 \001(\t\022\013\n\003o" + + "rg\030\007 \001(\t\"\320\001\n\026NamedEntityListRequest\0222\n\rr" + + "esource_type\030\001 \001(\0162\033.flyteidl.core.Resou" + + "rceType\022\017\n\007project\030\002 \001(\t\022\016\n\006domain\030\003 \001(\t" + + "\022\r\n\005limit\030\004 \001(\r\022\r\n\005token\030\005 \001(\t\022%\n\007sort_b" + + "y\030\006 \001(\0132\024.flyteidl.admin.Sort\022\017\n\007filters" + + "\030\007 \001(\t\022\013\n\003org\030\010 \001(\t\"c\n\031NamedEntityIdenti" + + "fierList\0227\n\010entities\030\001 \003(\0132%.flyteidl.ad" + + "min.NamedEntityIdentifier\022\r\n\005token\030\002 \001(\t" + + "\"O\n\017NamedEntityList\022-\n\010entities\030\001 \003(\0132\033." + + "flyteidl.admin.NamedEntity\022\r\n\005token\030\002 \001(" + + "\t\"~\n\025NamedEntityGetRequest\0222\n\rresource_t" + + "ype\030\001 \001(\0162\033.flyteidl.core.ResourceType\0221" + + "\n\002id\030\002 \001(\0132%.flyteidl.admin.NamedEntityI" + + "dentifier\"\270\001\n\030NamedEntityUpdateRequest\0222" + + "\n\rresource_type\030\001 \001(\0162\033.flyteidl.core.Re" + + "sourceType\0221\n\002id\030\002 \001(\0132%.flyteidl.admin." + + "NamedEntityIdentifier\0225\n\010metadata\030\003 \001(\0132" + + "#.flyteidl.admin.NamedEntityMetadata\"\033\n\031" + + "NamedEntityUpdateResponse\"9\n\020ObjectGetRe" + + "quest\022%\n\002id\030\001 \001(\0132\031.flyteidl.core.Identi" + + "fier\"\236\001\n\023ResourceListRequest\0221\n\002id\030\001 \001(\013" + + "2%.flyteidl.admin.NamedEntityIdentifier\022" + + "\r\n\005limit\030\002 \001(\r\022\r\n\005token\030\003 \001(\t\022\017\n\007filters" + "\030\004 \001(\t\022%\n\007sort_by\030\005 \001(\0132\024.flyteidl.admin" + - ".Sort\022\017\n\007filters\030\006 \001(\t\"\303\001\n\026NamedEntityLi" + - "stRequest\0222\n\rresource_type\030\001 \001(\0162\033.flyte" + - "idl.core.ResourceType\022\017\n\007project\030\002 \001(\t\022\016" + - "\n\006domain\030\003 \001(\t\022\r\n\005limit\030\004 \001(\r\022\r\n\005token\030\005" + - " \001(\t\022%\n\007sort_by\030\006 \001(\0132\024.flyteidl.admin.S" + - "ort\022\017\n\007filters\030\007 \001(\t\"c\n\031NamedEntityIdent" + - "ifierList\0227\n\010entities\030\001 \003(\0132%.flyteidl.a" + - "dmin.NamedEntityIdentifier\022\r\n\005token\030\002 \001(" + - "\t\"O\n\017NamedEntityList\022-\n\010entities\030\001 \003(\0132\033" + - ".flyteidl.admin.NamedEntity\022\r\n\005token\030\002 \001" + - "(\t\"~\n\025NamedEntityGetRequest\0222\n\rresource_" + - "type\030\001 \001(\0162\033.flyteidl.core.ResourceType\022" + - "1\n\002id\030\002 \001(\0132%.flyteidl.admin.NamedEntity" + - "Identifier\"\270\001\n\030NamedEntityUpdateRequest\022" + - "2\n\rresource_type\030\001 \001(\0162\033.flyteidl.core.R" + - "esourceType\0221\n\002id\030\002 \001(\0132%.flyteidl.admin" + - ".NamedEntityIdentifier\0225\n\010metadata\030\003 \001(\013" + - "2#.flyteidl.admin.NamedEntityMetadata\"\033\n" + - "\031NamedEntityUpdateResponse\"9\n\020ObjectGetR" + - "equest\022%\n\002id\030\001 \001(\0132\031.flyteidl.core.Ident" + - "ifier\"\236\001\n\023ResourceListRequest\0221\n\002id\030\001 \001(" + - "\0132%.flyteidl.admin.NamedEntityIdentifier" + - "\022\r\n\005limit\030\002 \001(\r\022\r\n\005token\030\003 \001(\t\022\017\n\007filter" + - "s\030\004 \001(\t\022%\n\007sort_by\030\005 \001(\0132\024.flyteidl.admi" + - "n.Sort\"-\n\021EmailNotification\022\030\n\020recipient" + - "s_email\030\001 \003(\t\"1\n\025PagerDutyNotification\022\030" + - "\n\020recipients_email\030\001 \003(\t\"-\n\021SlackNotific" + - "ation\022\030\n\020recipients_email\030\001 \003(\t\"\363\001\n\014Noti" + - "fication\0226\n\006phases\030\001 \003(\0162&.flyteidl.core" + - ".WorkflowExecution.Phase\0222\n\005email\030\002 \001(\0132" + - "!.flyteidl.admin.EmailNotificationH\000\022;\n\n" + - "pager_duty\030\003 \001(\0132%.flyteidl.admin.PagerD" + - "utyNotificationH\000\0222\n\005slack\030\004 \001(\0132!.flyte" + - "idl.admin.SlackNotificationH\000B\006\n\004type\")\n" + - "\007UrlBlob\022\013\n\003url\030\001 \001(\t\022\r\n\005bytes\030\002 \001(\003:\002\030\001" + - "\"k\n\006Labels\0222\n\006values\030\001 \003(\0132\".flyteidl.ad" + - "min.Labels.ValuesEntry\032-\n\013ValuesEntry\022\013\n" + - "\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"u\n\013Annotat" + - "ions\0227\n\006values\030\001 \003(\0132\'.flyteidl.admin.An" + - "notations.ValuesEntry\032-\n\013ValuesEntry\022\013\n\003" + - "key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"3\n\004Envs\022+\n\006" + - "values\030\001 \003(\0132\033.flyteidl.core.KeyValuePai" + - "r\"N\n\010AuthRole\022\032\n\022assumable_iam_role\030\001 \001(" + - "\t\022\"\n\032kubernetes_service_account\030\002 \001(\t:\002\030" + - "\001\"5\n\023RawOutputDataConfig\022\036\n\026output_locat" + - "ion_prefix\030\001 \001(\t\":\n\tFlyteURLs\022\016\n\006inputs\030" + - "\001 \001(\t\022\017\n\007outputs\030\002 \001(\t\022\014\n\004deck\030\003 \001(\t*\\\n\020" + - "NamedEntityState\022\027\n\023NAMED_ENTITY_ACTIVE\020" + - "\000\022\031\n\025NAMED_ENTITY_ARCHIVED\020\001\022\024\n\020SYSTEM_G" + - "ENERATED\020\002B=Z;github.com/flyteorg/flyte/" + - "flyteidl/gen/pb-go/flyteidl/adminb\006proto" + - "3" + ".Sort\"-\n\021EmailNotification\022\030\n\020recipients" + + "_email\030\001 \003(\t\"1\n\025PagerDutyNotification\022\030\n" + + "\020recipients_email\030\001 \003(\t\"-\n\021SlackNotifica" + + "tion\022\030\n\020recipients_email\030\001 \003(\t\"\363\001\n\014Notif" + + "ication\0226\n\006phases\030\001 \003(\0162&.flyteidl.core." + + "WorkflowExecution.Phase\0222\n\005email\030\002 \001(\0132!" + + ".flyteidl.admin.EmailNotificationH\000\022;\n\np" + + "ager_duty\030\003 \001(\0132%.flyteidl.admin.PagerDu" + + "tyNotificationH\000\0222\n\005slack\030\004 \001(\0132!.flytei" + + "dl.admin.SlackNotificationH\000B\006\n\004type\")\n\007" + + "UrlBlob\022\013\n\003url\030\001 \001(\t\022\r\n\005bytes\030\002 \001(\003:\002\030\001\"" + + "k\n\006Labels\0222\n\006values\030\001 \003(\0132\".flyteidl.adm" + + "in.Labels.ValuesEntry\032-\n\013ValuesEntry\022\013\n\003" + + "key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"u\n\013Annotati" + + "ons\0227\n\006values\030\001 \003(\0132\'.flyteidl.admin.Ann" + + "otations.ValuesEntry\032-\n\013ValuesEntry\022\013\n\003k" + + "ey\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"3\n\004Envs\022+\n\006v" + + "alues\030\001 \003(\0132\033.flyteidl.core.KeyValuePair" + + "\"N\n\010AuthRole\022\032\n\022assumable_iam_role\030\001 \001(\t" + + "\022\"\n\032kubernetes_service_account\030\002 \001(\t:\002\030\001" + + "\"5\n\023RawOutputDataConfig\022\036\n\026output_locati" + + "on_prefix\030\001 \001(\t\":\n\tFlyteURLs\022\016\n\006inputs\030\001" + + " \001(\t\022\017\n\007outputs\030\002 \001(\t\022\014\n\004deck\030\003 \001(\t*\\\n\020N" + + "amedEntityState\022\027\n\023NAMED_ENTITY_ACTIVE\020\000" + + "\022\031\n\025NAMED_ENTITY_ARCHIVED\020\001\022\024\n\020SYSTEM_GE" + + "NERATED\020\002B=Z;github.com/flyteorg/flyte/f" + + "lyteidl/gen/pb-go/flyteidl/adminb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -22711,7 +23230,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_NamedEntityIdentifier_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_NamedEntityIdentifier_descriptor, - new java.lang.String[] { "Project", "Domain", "Name", }); + new java.lang.String[] { "Project", "Domain", "Name", "Org", }); internal_static_flyteidl_admin_NamedEntityMetadata_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_flyteidl_admin_NamedEntityMetadata_fieldAccessorTable = new @@ -22735,13 +23254,13 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_NamedEntityIdentifierListRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_NamedEntityIdentifierListRequest_descriptor, - new java.lang.String[] { "Project", "Domain", "Limit", "Token", "SortBy", "Filters", }); + new java.lang.String[] { "Project", "Domain", "Limit", "Token", "SortBy", "Filters", "Org", }); internal_static_flyteidl_admin_NamedEntityListRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_flyteidl_admin_NamedEntityListRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_NamedEntityListRequest_descriptor, - new java.lang.String[] { "ResourceType", "Project", "Domain", "Limit", "Token", "SortBy", "Filters", }); + new java.lang.String[] { "ResourceType", "Project", "Domain", "Limit", "Token", "SortBy", "Filters", "Org", }); internal_static_flyteidl_admin_NamedEntityIdentifierList_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_flyteidl_admin_NamedEntityIdentifierList_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/admin/ExecutionOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/ExecutionOuterClass.java index 102ff472f0..57ad2a4f24 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/ExecutionOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/ExecutionOuterClass.java @@ -258,6 +258,24 @@ public interface ExecutionCreateRequestOrBuilder extends * .flyteidl.core.LiteralMap inputs = 5; */ flyteidl.core.Literals.LiteralMapOrBuilder getInputsOrBuilder(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -279,6 +297,7 @@ private ExecutionCreateRequest() {
       project_ = "";
       domain_ = "";
       name_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -349,6 +368,12 @@ private ExecutionCreateRequest(
 
               break;
             }
+            case 50: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -592,6 +617,48 @@ public flyteidl.core.Literals.LiteralMapOrBuilder getInputsOrBuilder() {
       return getInputs();
     }
 
+    public static final int ORG_FIELD_NUMBER = 6;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -621,6 +688,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (inputs_ != null) { output.writeMessage(5, getInputs()); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, org_); + } unknownFields.writeTo(output); } @@ -647,6 +717,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, getInputs()); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -678,6 +751,8 @@ public boolean equals(final java.lang.Object obj) { if (!getInputs() .equals(other.getInputs())) return false; } + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -703,6 +778,8 @@ public int hashCode() { hash = (37 * hash) + INPUTS_FIELD_NUMBER; hash = (53 * hash) + getInputs().hashCode(); } + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -858,6 +935,8 @@ public Builder clear() { inputs_ = null; inputsBuilder_ = null; } + org_ = ""; + return this; } @@ -897,6 +976,7 @@ public flyteidl.admin.ExecutionOuterClass.ExecutionCreateRequest buildPartial() } else { result.inputs_ = inputsBuilder_.build(); } + result.org_ = org_; onBuilt(); return result; } @@ -963,6 +1043,10 @@ public Builder mergeFrom(flyteidl.admin.ExecutionOuterClass.ExecutionCreateReque if (other.hasInputs()) { mergeInputs(other.getInputs()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1616,6 +1700,95 @@ public flyteidl.core.Literals.LiteralMapOrBuilder getInputsOrBuilder() { } return inputsBuilder_; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -28956,114 +29129,114 @@ public flyteidl.admin.ExecutionOuterClass.WorkflowExecutionGetMetricsResponse ge "idl/core/security.proto\032\036google/protobuf" + "/duration.proto\032\037google/protobuf/timesta" + "mp.proto\032\036google/protobuf/wrappers.proto" + - "\"\237\001\n\026ExecutionCreateRequest\022\017\n\007project\030\001" + + "\"\254\001\n\026ExecutionCreateRequest\022\017\n\007project\030\001" + " \001(\t\022\016\n\006domain\030\002 \001(\t\022\014\n\004name\030\003 \001(\t\022+\n\004sp" + "ec\030\004 \001(\0132\035.flyteidl.admin.ExecutionSpec\022" + ")\n\006inputs\030\005 \001(\0132\031.flyteidl.core.LiteralM" + - "ap\"\177\n\030ExecutionRelaunchRequest\0226\n\002id\030\001 \001" + - "(\0132*.flyteidl.core.WorkflowExecutionIden" + - "tifier\022\014\n\004name\030\003 \001(\t\022\027\n\017overwrite_cache\030" + - "\004 \001(\010J\004\010\002\020\003\"\224\001\n\027ExecutionRecoverRequest\022" + + "ap\022\013\n\003org\030\006 \001(\t\"\177\n\030ExecutionRelaunchRequ" + + "est\0226\n\002id\030\001 \001(\0132*.flyteidl.core.Workflow" + + "ExecutionIdentifier\022\014\n\004name\030\003 \001(\t\022\027\n\017ove" + + "rwrite_cache\030\004 \001(\010J\004\010\002\020\003\"\224\001\n\027ExecutionRe" + + "coverRequest\0226\n\002id\030\001 \001(\0132*.flyteidl.core" + + ".WorkflowExecutionIdentifier\022\014\n\004name\030\002 \001" + + "(\t\0223\n\010metadata\030\003 \001(\0132!.flyteidl.admin.Ex" + + "ecutionMetadata\"Q\n\027ExecutionCreateRespon" + + "se\0226\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowE" + + "xecutionIdentifier\"U\n\033WorkflowExecutionG" + + "etRequest\0226\n\002id\030\001 \001(\0132*.flyteidl.core.Wo" + + "rkflowExecutionIdentifier\"\243\001\n\tExecution\022" + + "6\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowExec" + + "utionIdentifier\022+\n\004spec\030\002 \001(\0132\035.flyteidl" + + ".admin.ExecutionSpec\0221\n\007closure\030\003 \001(\0132 ." + + "flyteidl.admin.ExecutionClosure\"M\n\rExecu" + + "tionList\022-\n\nexecutions\030\001 \003(\0132\031.flyteidl." + + "admin.Execution\022\r\n\005token\030\002 \001(\t\"X\n\016Litera" + + "lMapBlob\022/\n\006values\030\001 \001(\0132\031.flyteidl.core" + + ".LiteralMapB\002\030\001H\000\022\r\n\003uri\030\002 \001(\tH\000B\006\n\004data" + + "\"1\n\rAbortMetadata\022\r\n\005cause\030\001 \001(\t\022\021\n\tprin" + + "cipal\030\002 \001(\t\"\360\005\n\020ExecutionClosure\0225\n\007outp" + + "uts\030\001 \001(\0132\036.flyteidl.admin.LiteralMapBlo" + + "bB\002\030\001H\000\022.\n\005error\030\002 \001(\0132\035.flyteidl.core.E" + + "xecutionErrorH\000\022\031\n\013abort_cause\030\n \001(\tB\002\030\001" + + "H\000\0227\n\016abort_metadata\030\014 \001(\0132\035.flyteidl.ad" + + "min.AbortMetadataH\000\0224\n\013output_data\030\r \001(\013" + + "2\031.flyteidl.core.LiteralMapB\002\030\001H\000\0226\n\017com" + + "puted_inputs\030\003 \001(\0132\031.flyteidl.core.Liter" + + "alMapB\002\030\001\0225\n\005phase\030\004 \001(\0162&.flyteidl.core" + + ".WorkflowExecution.Phase\022.\n\nstarted_at\030\005" + + " \001(\0132\032.google.protobuf.Timestamp\022+\n\010dura" + + "tion\030\006 \001(\0132\031.google.protobuf.Duration\022.\n" + + "\ncreated_at\030\007 \001(\0132\032.google.protobuf.Time" + + "stamp\022.\n\nupdated_at\030\010 \001(\0132\032.google.proto" + + "buf.Timestamp\0223\n\rnotifications\030\t \003(\0132\034.f" + + "lyteidl.admin.Notification\022.\n\013workflow_i" + + "d\030\013 \001(\0132\031.flyteidl.core.Identifier\022I\n\024st" + + "ate_change_details\030\016 \001(\0132+.flyteidl.admi" + + "n.ExecutionStateChangeDetailsB\017\n\routput_" + + "result\">\n\016SystemMetadata\022\031\n\021execution_cl" + + "uster\030\001 \001(\t\022\021\n\tnamespace\030\002 \001(\t\"\213\004\n\021Execu" + + "tionMetadata\022=\n\004mode\030\001 \001(\0162/.flyteidl.ad" + + "min.ExecutionMetadata.ExecutionMode\022\021\n\tp" + + "rincipal\030\002 \001(\t\022\017\n\007nesting\030\003 \001(\r\0220\n\014sched" + + "uled_at\030\004 \001(\0132\032.google.protobuf.Timestam" + + "p\022E\n\025parent_node_execution\030\005 \001(\0132&.flyte" + + "idl.core.NodeExecutionIdentifier\022G\n\023refe" + + "rence_execution\030\020 \001(\0132*.flyteidl.core.Wo" + + "rkflowExecutionIdentifier\0227\n\017system_meta" + + "data\030\021 \001(\0132\036.flyteidl.admin.SystemMetada" + + "ta\022/\n\014artifact_ids\030\022 \003(\0132\031.flyteidl.core" + + ".ArtifactID\"g\n\rExecutionMode\022\n\n\006MANUAL\020\000" + + "\022\r\n\tSCHEDULED\020\001\022\n\n\006SYSTEM\020\002\022\014\n\010RELAUNCH\020" + + "\003\022\022\n\016CHILD_WORKFLOW\020\004\022\r\n\tRECOVERED\020\005\"G\n\020" + + "NotificationList\0223\n\rnotifications\030\001 \003(\0132" + + "\034.flyteidl.admin.Notification\"\262\006\n\rExecut" + + "ionSpec\022.\n\013launch_plan\030\001 \001(\0132\031.flyteidl." + + "core.Identifier\022-\n\006inputs\030\002 \001(\0132\031.flytei" + + "dl.core.LiteralMapB\002\030\001\0223\n\010metadata\030\003 \001(\013" + + "2!.flyteidl.admin.ExecutionMetadata\0229\n\rn" + + "otifications\030\005 \001(\0132 .flyteidl.admin.Noti" + + "ficationListH\000\022\025\n\013disable_all\030\006 \001(\010H\000\022&\n" + + "\006labels\030\007 \001(\0132\026.flyteidl.admin.Labels\0220\n" + + "\013annotations\030\010 \001(\0132\033.flyteidl.admin.Anno" + + "tations\0228\n\020security_context\030\n \001(\0132\036.flyt" + + "eidl.core.SecurityContext\022/\n\tauth_role\030\020" + + " \001(\0132\030.flyteidl.admin.AuthRoleB\002\030\001\022;\n\022qu" + + "ality_of_service\030\021 \001(\0132\037.flyteidl.core.Q" + + "ualityOfService\022\027\n\017max_parallelism\030\022 \001(\005" + + "\022C\n\026raw_output_data_config\030\023 \001(\0132#.flyte" + + "idl.admin.RawOutputDataConfig\022=\n\022cluster" + + "_assignment\030\024 \001(\0132!.flyteidl.admin.Clust" + + "erAssignment\0221\n\rinterruptible\030\025 \001(\0132\032.go" + + "ogle.protobuf.BoolValue\022\027\n\017overwrite_cac" + + "he\030\026 \001(\010\022\"\n\004envs\030\027 \001(\0132\024.flyteidl.admin." + + "Envs\022\014\n\004tags\030\030 \003(\tB\030\n\026notification_overr" + + "idesJ\004\010\004\020\005\"b\n\031ExecutionTerminateRequest\022" + "6\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowExec" + - "utionIdentifier\022\014\n\004name\030\002 \001(\t\0223\n\010metadat" + - "a\030\003 \001(\0132!.flyteidl.admin.ExecutionMetada" + - "ta\"Q\n\027ExecutionCreateResponse\0226\n\002id\030\001 \001(" + - "\0132*.flyteidl.core.WorkflowExecutionIdent" + - "ifier\"U\n\033WorkflowExecutionGetRequest\0226\n\002" + - "id\030\001 \001(\0132*.flyteidl.core.WorkflowExecuti" + - "onIdentifier\"\243\001\n\tExecution\0226\n\002id\030\001 \001(\0132*" + - ".flyteidl.core.WorkflowExecutionIdentifi" + - "er\022+\n\004spec\030\002 \001(\0132\035.flyteidl.admin.Execut" + - "ionSpec\0221\n\007closure\030\003 \001(\0132 .flyteidl.admi" + - "n.ExecutionClosure\"M\n\rExecutionList\022-\n\ne" + - "xecutions\030\001 \003(\0132\031.flyteidl.admin.Executi" + - "on\022\r\n\005token\030\002 \001(\t\"X\n\016LiteralMapBlob\022/\n\006v" + - "alues\030\001 \001(\0132\031.flyteidl.core.LiteralMapB\002" + - "\030\001H\000\022\r\n\003uri\030\002 \001(\tH\000B\006\n\004data\"1\n\rAbortMeta" + - "data\022\r\n\005cause\030\001 \001(\t\022\021\n\tprincipal\030\002 \001(\t\"\360" + - "\005\n\020ExecutionClosure\0225\n\007outputs\030\001 \001(\0132\036.f" + - "lyteidl.admin.LiteralMapBlobB\002\030\001H\000\022.\n\005er" + - "ror\030\002 \001(\0132\035.flyteidl.core.ExecutionError" + - "H\000\022\031\n\013abort_cause\030\n \001(\tB\002\030\001H\000\0227\n\016abort_m" + - "etadata\030\014 \001(\0132\035.flyteidl.admin.AbortMeta" + - "dataH\000\0224\n\013output_data\030\r \001(\0132\031.flyteidl.c" + - "ore.LiteralMapB\002\030\001H\000\0226\n\017computed_inputs\030" + - "\003 \001(\0132\031.flyteidl.core.LiteralMapB\002\030\001\0225\n\005" + - "phase\030\004 \001(\0162&.flyteidl.core.WorkflowExec" + - "ution.Phase\022.\n\nstarted_at\030\005 \001(\0132\032.google" + - ".protobuf.Timestamp\022+\n\010duration\030\006 \001(\0132\031." + - "google.protobuf.Duration\022.\n\ncreated_at\030\007" + - " \001(\0132\032.google.protobuf.Timestamp\022.\n\nupda" + - "ted_at\030\010 \001(\0132\032.google.protobuf.Timestamp" + - "\0223\n\rnotifications\030\t \003(\0132\034.flyteidl.admin" + - ".Notification\022.\n\013workflow_id\030\013 \001(\0132\031.fly" + - "teidl.core.Identifier\022I\n\024state_change_de" + - "tails\030\016 \001(\0132+.flyteidl.admin.ExecutionSt" + - "ateChangeDetailsB\017\n\routput_result\">\n\016Sys" + - "temMetadata\022\031\n\021execution_cluster\030\001 \001(\t\022\021" + - "\n\tnamespace\030\002 \001(\t\"\213\004\n\021ExecutionMetadata\022" + - "=\n\004mode\030\001 \001(\0162/.flyteidl.admin.Execution" + - "Metadata.ExecutionMode\022\021\n\tprincipal\030\002 \001(" + - "\t\022\017\n\007nesting\030\003 \001(\r\0220\n\014scheduled_at\030\004 \001(\013" + - "2\032.google.protobuf.Timestamp\022E\n\025parent_n" + - "ode_execution\030\005 \001(\0132&.flyteidl.core.Node" + - "ExecutionIdentifier\022G\n\023reference_executi" + - "on\030\020 \001(\0132*.flyteidl.core.WorkflowExecuti" + - "onIdentifier\0227\n\017system_metadata\030\021 \001(\0132\036." + - "flyteidl.admin.SystemMetadata\022/\n\014artifac" + - "t_ids\030\022 \003(\0132\031.flyteidl.core.ArtifactID\"g" + - "\n\rExecutionMode\022\n\n\006MANUAL\020\000\022\r\n\tSCHEDULED" + - "\020\001\022\n\n\006SYSTEM\020\002\022\014\n\010RELAUNCH\020\003\022\022\n\016CHILD_WO" + - "RKFLOW\020\004\022\r\n\tRECOVERED\020\005\"G\n\020NotificationL" + - "ist\0223\n\rnotifications\030\001 \003(\0132\034.flyteidl.ad" + - "min.Notification\"\262\006\n\rExecutionSpec\022.\n\013la" + - "unch_plan\030\001 \001(\0132\031.flyteidl.core.Identifi" + - "er\022-\n\006inputs\030\002 \001(\0132\031.flyteidl.core.Liter" + - "alMapB\002\030\001\0223\n\010metadata\030\003 \001(\0132!.flyteidl.a" + - "dmin.ExecutionMetadata\0229\n\rnotifications\030" + - "\005 \001(\0132 .flyteidl.admin.NotificationListH" + - "\000\022\025\n\013disable_all\030\006 \001(\010H\000\022&\n\006labels\030\007 \001(\013" + - "2\026.flyteidl.admin.Labels\0220\n\013annotations\030" + - "\010 \001(\0132\033.flyteidl.admin.Annotations\0228\n\020se" + - "curity_context\030\n \001(\0132\036.flyteidl.core.Sec" + - "urityContext\022/\n\tauth_role\030\020 \001(\0132\030.flytei" + - "dl.admin.AuthRoleB\002\030\001\022;\n\022quality_of_serv" + - "ice\030\021 \001(\0132\037.flyteidl.core.QualityOfServi" + - "ce\022\027\n\017max_parallelism\030\022 \001(\005\022C\n\026raw_outpu" + - "t_data_config\030\023 \001(\0132#.flyteidl.admin.Raw" + - "OutputDataConfig\022=\n\022cluster_assignment\030\024" + - " \001(\0132!.flyteidl.admin.ClusterAssignment\022" + - "1\n\rinterruptible\030\025 \001(\0132\032.google.protobuf" + - ".BoolValue\022\027\n\017overwrite_cache\030\026 \001(\010\022\"\n\004e" + - "nvs\030\027 \001(\0132\024.flyteidl.admin.Envs\022\014\n\004tags\030" + - "\030 \003(\tB\030\n\026notification_overridesJ\004\010\004\020\005\"b\n" + - "\031ExecutionTerminateRequest\0226\n\002id\030\001 \001(\0132*" + - ".flyteidl.core.WorkflowExecutionIdentifi" + - "er\022\r\n\005cause\030\002 \001(\t\"\034\n\032ExecutionTerminateR" + - "esponse\"Y\n\037WorkflowExecutionGetDataReque" + + "utionIdentifier\022\r\n\005cause\030\002 \001(\t\"\034\n\032Execut" + + "ionTerminateResponse\"Y\n\037WorkflowExecutio" + + "nGetDataRequest\0226\n\002id\030\001 \001(\0132*.flyteidl.c" + + "ore.WorkflowExecutionIdentifier\"\336\001\n Work" + + "flowExecutionGetDataResponse\022,\n\007outputs\030" + + "\001 \001(\0132\027.flyteidl.admin.UrlBlobB\002\030\001\022+\n\006in" + + "puts\030\002 \001(\0132\027.flyteidl.admin.UrlBlobB\002\030\001\022" + + ".\n\013full_inputs\030\003 \001(\0132\031.flyteidl.core.Lit" + + "eralMap\022/\n\014full_outputs\030\004 \001(\0132\031.flyteidl" + + ".core.LiteralMap\"\177\n\026ExecutionUpdateReque" + "st\0226\n\002id\030\001 \001(\0132*.flyteidl.core.WorkflowE" + - "xecutionIdentifier\"\336\001\n WorkflowExecution" + - "GetDataResponse\022,\n\007outputs\030\001 \001(\0132\027.flyte" + - "idl.admin.UrlBlobB\002\030\001\022+\n\006inputs\030\002 \001(\0132\027." + - "flyteidl.admin.UrlBlobB\002\030\001\022.\n\013full_input" + - "s\030\003 \001(\0132\031.flyteidl.core.LiteralMap\022/\n\014fu" + - "ll_outputs\030\004 \001(\0132\031.flyteidl.core.Literal" + - "Map\"\177\n\026ExecutionUpdateRequest\0226\n\002id\030\001 \001(" + - "\0132*.flyteidl.core.WorkflowExecutionIdent" + - "ifier\022-\n\005state\030\002 \001(\0162\036.flyteidl.admin.Ex" + - "ecutionState\"\220\001\n\033ExecutionStateChangeDet" + - "ails\022-\n\005state\030\001 \001(\0162\036.flyteidl.admin.Exe" + - "cutionState\022/\n\013occurred_at\030\002 \001(\0132\032.googl" + - "e.protobuf.Timestamp\022\021\n\tprincipal\030\003 \001(\t\"" + - "\031\n\027ExecutionUpdateResponse\"k\n\"WorkflowEx" + - "ecutionGetMetricsRequest\0226\n\002id\030\001 \001(\0132*.f" + - "lyteidl.core.WorkflowExecutionIdentifier" + - "\022\r\n\005depth\030\002 \001(\005\"H\n#WorkflowExecutionGetM" + - "etricsResponse\022!\n\004span\030\001 \001(\0132\023.flyteidl." + - "core.Span*>\n\016ExecutionState\022\024\n\020EXECUTION" + - "_ACTIVE\020\000\022\026\n\022EXECUTION_ARCHIVED\020\001B=Z;git" + - "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" + - "o/flyteidl/adminb\006proto3" + "xecutionIdentifier\022-\n\005state\030\002 \001(\0162\036.flyt" + + "eidl.admin.ExecutionState\"\220\001\n\033ExecutionS" + + "tateChangeDetails\022-\n\005state\030\001 \001(\0162\036.flyte" + + "idl.admin.ExecutionState\022/\n\013occurred_at\030" + + "\002 \001(\0132\032.google.protobuf.Timestamp\022\021\n\tpri" + + "ncipal\030\003 \001(\t\"\031\n\027ExecutionUpdateResponse\"" + + "k\n\"WorkflowExecutionGetMetricsRequest\0226\n" + + "\002id\030\001 \001(\0132*.flyteidl.core.WorkflowExecut" + + "ionIdentifier\022\r\n\005depth\030\002 \001(\005\"H\n#Workflow" + + "ExecutionGetMetricsResponse\022!\n\004span\030\001 \001(" + + "\0132\023.flyteidl.core.Span*>\n\016ExecutionState" + + "\022\024\n\020EXECUTION_ACTIVE\020\000\022\026\n\022EXECUTION_ARCH" + + "IVED\020\001B=Z;github.com/flyteorg/flyte/flyt" + + "eidl/gen/pb-go/flyteidl/adminb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -29093,7 +29266,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ExecutionCreateRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ExecutionCreateRequest_descriptor, - new java.lang.String[] { "Project", "Domain", "Name", "Spec", "Inputs", }); + new java.lang.String[] { "Project", "Domain", "Name", "Spec", "Inputs", "Org", }); internal_static_flyteidl_admin_ExecutionRelaunchRequest_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_flyteidl_admin_ExecutionRelaunchRequest_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/admin/LaunchPlanOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/LaunchPlanOuterClass.java index ae781f6903..77678695e6 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/LaunchPlanOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/LaunchPlanOuterClass.java @@ -13512,10 +13512,28 @@ public interface ActiveLaunchPlanListRequestOrBuilder extends * .flyteidl.admin.Sort sort_by = 5; */ flyteidl.admin.Common.SortOrBuilder getSortByOrBuilder(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
-   * Represents a request structure to list active launch plans within a project/domain.
+   * Represents a request structure to list active launch plans within a project/domain and optional org.
    * See :ref:`ref_flyteidl.admin.LaunchPlan` for more details
    * 
* @@ -13534,6 +13552,7 @@ private ActiveLaunchPlanListRequest() { project_ = ""; domain_ = ""; token_ = ""; + org_ = ""; } @java.lang.Override @@ -13596,6 +13615,12 @@ private ActiveLaunchPlanListRequest( break; } + case 50: { + java.lang.String s = input.readStringRequireUtf8(); + + org_ = s; + break; + } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { @@ -13812,6 +13837,48 @@ public flyteidl.admin.Common.SortOrBuilder getSortByOrBuilder() { return getSortBy(); } + public static final int ORG_FIELD_NUMBER = 6; + private volatile java.lang.Object org_; + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -13841,6 +13908,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (sortBy_ != null) { output.writeMessage(5, getSortBy()); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, org_); + } unknownFields.writeTo(output); } @@ -13867,6 +13937,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(5, getSortBy()); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -13895,6 +13968,8 @@ public boolean equals(final java.lang.Object obj) { if (!getSortBy() .equals(other.getSortBy())) return false; } + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -13918,6 +13993,8 @@ public int hashCode() { hash = (37 * hash) + SORT_BY_FIELD_NUMBER; hash = (53 * hash) + getSortBy().hashCode(); } + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -14015,7 +14092,7 @@ protected Builder newBuilderForType( } /** *
-     * Represents a request structure to list active launch plans within a project/domain.
+     * Represents a request structure to list active launch plans within a project/domain and optional org.
      * See :ref:`ref_flyteidl.admin.LaunchPlan` for more details
      * 
* @@ -14070,6 +14147,8 @@ public Builder clear() { sortBy_ = null; sortByBuilder_ = null; } + org_ = ""; + return this; } @@ -14105,6 +14184,7 @@ public flyteidl.admin.LaunchPlanOuterClass.ActiveLaunchPlanListRequest buildPart } else { result.sortBy_ = sortByBuilder_.build(); } + result.org_ = org_; onBuilt(); return result; } @@ -14171,6 +14251,10 @@ public Builder mergeFrom(flyteidl.admin.LaunchPlanOuterClass.ActiveLaunchPlanLis if (other.hasSortBy()) { mergeSortBy(other.getSortBy()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -14689,6 +14773,95 @@ public flyteidl.admin.Common.SortOrBuilder getSortByOrBuilder() { } return sortByBuilder_; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -14866,13 +15039,14 @@ public flyteidl.admin.LaunchPlanOuterClass.ActiveLaunchPlanListRequest getDefaul "\001(\0162\037.flyteidl.admin.LaunchPlanState\"\032\n\030" + "LaunchPlanUpdateResponse\"L\n\027ActiveLaunch" + "PlanRequest\0221\n\002id\030\001 \001(\0132%.flyteidl.admin" + - ".NamedEntityIdentifier\"\203\001\n\033ActiveLaunchP" + + ".NamedEntityIdentifier\"\220\001\n\033ActiveLaunchP" + "lanListRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006domai" + "n\030\002 \001(\t\022\r\n\005limit\030\003 \001(\r\022\r\n\005token\030\004 \001(\t\022%\n" + - "\007sort_by\030\005 \001(\0132\024.flyteidl.admin.Sort*+\n\017" + - "LaunchPlanState\022\014\n\010INACTIVE\020\000\022\n\n\006ACTIVE\020" + - "\001B=Z;github.com/flyteorg/flyte/flyteidl/" + - "gen/pb-go/flyteidl/adminb\006proto3" + "\007sort_by\030\005 \001(\0132\024.flyteidl.admin.Sort\022\013\n\003" + + "org\030\006 \001(\t*+\n\017LaunchPlanState\022\014\n\010INACTIVE" + + "\020\000\022\n\n\006ACTIVE\020\001B=Z;github.com/flyteorg/fl" + + "yte/flyteidl/gen/pb-go/flyteidl/adminb\006p" + + "roto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -14967,7 +15141,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ActiveLaunchPlanListRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ActiveLaunchPlanListRequest_descriptor, - new java.lang.String[] { "Project", "Domain", "Limit", "Token", "SortBy", }); + new java.lang.String[] { "Project", "Domain", "Limit", "Token", "SortBy", "Org", }); flyteidl.core.Execution.getDescriptor(); flyteidl.core.Literals.getDescriptor(); flyteidl.core.IdentifierOuterClass.getDescriptor(); diff --git a/flyteidl/gen/pb-java/flyteidl/admin/MatchableResourceOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/MatchableResourceOuterClass.java index 0f4b473a21..18a56d8164 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/MatchableResourceOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/MatchableResourceOuterClass.java @@ -10549,11 +10549,29 @@ public interface MatchableAttributesConfigurationOrBuilder extends */ com.google.protobuf.ByteString getLaunchPlanBytes(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
-   * Represents a custom set of attributes applied for either a domain; a domain and project; or
-   * domain, project and workflow name.
+   * Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org);
+   * or domain, project and workflow name (and optional org).
    * These are used to override system level defaults for kubernetes cluster resource management,
    * default execution values, and more all across different levels of specificity.
    * 
@@ -10574,6 +10592,7 @@ private MatchableAttributesConfiguration() { project_ = ""; workflow_ = ""; launchPlan_ = ""; + org_ = ""; } @java.lang.Override @@ -10637,6 +10656,12 @@ private MatchableAttributesConfiguration( launchPlan_ = s; break; } + case 50: { + java.lang.String s = input.readStringRequireUtf8(); + + org_ = s; + break; + } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { @@ -10826,6 +10851,48 @@ public java.lang.String getLaunchPlan() { } } + public static final int ORG_FIELD_NUMBER = 6; + private volatile java.lang.Object org_; + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -10855,6 +10922,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getLaunchPlanBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, launchPlan_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, org_); + } unknownFields.writeTo(output); } @@ -10880,6 +10950,9 @@ public int getSerializedSize() { if (!getLaunchPlanBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, launchPlan_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -10908,6 +10981,8 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getWorkflow())) return false; if (!getLaunchPlan() .equals(other.getLaunchPlan())) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -10931,6 +11006,8 @@ public int hashCode() { hash = (53 * hash) + getWorkflow().hashCode(); hash = (37 * hash) + LAUNCH_PLAN_FIELD_NUMBER; hash = (53 * hash) + getLaunchPlan().hashCode(); + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -11028,8 +11105,8 @@ protected Builder newBuilderForType( } /** *
-     * Represents a custom set of attributes applied for either a domain; a domain and project; or
-     * domain, project and workflow name.
+     * Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org);
+     * or domain, project and workflow name (and optional org).
      * These are used to override system level defaults for kubernetes cluster resource management,
      * default execution values, and more all across different levels of specificity.
      * 
@@ -11085,6 +11162,8 @@ public Builder clear() { launchPlan_ = ""; + org_ = ""; + return this; } @@ -11120,6 +11199,7 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableAttributesConfigurati result.project_ = project_; result.workflow_ = workflow_; result.launchPlan_ = launchPlan_; + result.org_ = org_; onBuilt(); return result; } @@ -11187,6 +11267,10 @@ public Builder mergeFrom(flyteidl.admin.MatchableResourceOuterClass.MatchableAtt launchPlan_ = other.launchPlan_; onChanged(); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -11608,6 +11692,95 @@ public Builder setLaunchPlanBytes( onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -11681,6 +11854,24 @@ public interface ListMatchableAttributesRequestOrBuilder extends * .flyteidl.admin.MatchableResource resource_type = 1; */ flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceType(); + + /** + *
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 2; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 2; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -11701,6 +11892,7 @@ private ListMatchableAttributesRequest(com.google.protobuf.GeneratedMessageV3.Bu
     }
     private ListMatchableAttributesRequest() {
       resourceType_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -11733,6 +11925,12 @@ private ListMatchableAttributesRequest(
               resourceType_ = rawValue;
               break;
             }
+            case 18: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -11790,6 +11988,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceT
       return result == null ? flyteidl.admin.MatchableResourceOuterClass.MatchableResource.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 2;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 2; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 2; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -11807,6 +12047,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (resourceType_ != flyteidl.admin.MatchableResourceOuterClass.MatchableResource.TASK_RESOURCE.getNumber()) { output.writeEnum(1, resourceType_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, org_); + } unknownFields.writeTo(output); } @@ -11820,6 +12063,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, resourceType_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -11836,6 +12082,8 @@ public boolean equals(final java.lang.Object obj) { flyteidl.admin.MatchableResourceOuterClass.ListMatchableAttributesRequest other = (flyteidl.admin.MatchableResourceOuterClass.ListMatchableAttributesRequest) obj; if (resourceType_ != other.resourceType_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -11849,6 +12097,8 @@ public int hashCode() { hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + resourceType_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -11989,6 +12239,8 @@ public Builder clear() { super.clear(); resourceType_ = 0; + org_ = ""; + return this; } @@ -12016,6 +12268,7 @@ public flyteidl.admin.MatchableResourceOuterClass.ListMatchableAttributesRequest public flyteidl.admin.MatchableResourceOuterClass.ListMatchableAttributesRequest buildPartial() { flyteidl.admin.MatchableResourceOuterClass.ListMatchableAttributesRequest result = new flyteidl.admin.MatchableResourceOuterClass.ListMatchableAttributesRequest(this); result.resourceType_ = resourceType_; + result.org_ = org_; onBuilt(); return result; } @@ -12067,6 +12320,10 @@ public Builder mergeFrom(flyteidl.admin.MatchableResourceOuterClass.ListMatchabl if (other.resourceType_ != 0) { setResourceTypeValue(other.getResourceTypeValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -12160,6 +12417,95 @@ public Builder clearResourceType() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 2; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 2; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 2; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 2; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 2; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -13126,24 +13472,25 @@ public flyteidl.admin.MatchableResourceOuterClass.ListMatchableAttributesRespons "sH\000\022L\n\031workflow_execution_config\030\007 \001(\0132\'" + ".flyteidl.admin.WorkflowExecutionConfigH" + "\000\022?\n\022cluster_assignment\030\010 \001(\0132!.flyteidl" + - ".admin.ClusterAssignmentH\000B\010\n\006target\"\242\001\n" + + ".admin.ClusterAssignmentH\000B\010\n\006target\"\257\001\n" + " MatchableAttributesConfiguration\0226\n\natt" + "ributes\030\001 \001(\0132\".flyteidl.admin.MatchingA" + "ttributes\022\016\n\006domain\030\002 \001(\t\022\017\n\007project\030\003 \001" + "(\t\022\020\n\010workflow\030\004 \001(\t\022\023\n\013launch_plan\030\005 \001(" + - "\t\"Z\n\036ListMatchableAttributesRequest\0228\n\rr" + - "esource_type\030\001 \001(\0162!.flyteidl.admin.Matc" + - "hableResource\"k\n\037ListMatchableAttributes" + - "Response\022H\n\016configurations\030\001 \003(\01320.flyte" + - "idl.admin.MatchableAttributesConfigurati" + - "on*\340\001\n\021MatchableResource\022\021\n\rTASK_RESOURC" + - "E\020\000\022\024\n\020CLUSTER_RESOURCE\020\001\022\023\n\017EXECUTION_Q" + - "UEUE\020\002\022\033\n\027EXECUTION_CLUSTER_LABEL\020\003\022$\n Q" + - "UALITY_OF_SERVICE_SPECIFICATION\020\004\022\023\n\017PLU" + - "GIN_OVERRIDE\020\005\022\035\n\031WORKFLOW_EXECUTION_CON" + - "FIG\020\006\022\026\n\022CLUSTER_ASSIGNMENT\020\007B=Z;github." + - "com/flyteorg/flyte/flyteidl/gen/pb-go/fl" + - "yteidl/adminb\006proto3" + "\t\022\013\n\003org\030\006 \001(\t\"g\n\036ListMatchableAttribute" + + "sRequest\0228\n\rresource_type\030\001 \001(\0162!.flytei" + + "dl.admin.MatchableResource\022\013\n\003org\030\002 \001(\t\"" + + "k\n\037ListMatchableAttributesResponse\022H\n\016co" + + "nfigurations\030\001 \003(\01320.flyteidl.admin.Matc" + + "hableAttributesConfiguration*\340\001\n\021Matchab" + + "leResource\022\021\n\rTASK_RESOURCE\020\000\022\024\n\020CLUSTER" + + "_RESOURCE\020\001\022\023\n\017EXECUTION_QUEUE\020\002\022\033\n\027EXEC" + + "UTION_CLUSTER_LABEL\020\003\022$\n QUALITY_OF_SERV" + + "ICE_SPECIFICATION\020\004\022\023\n\017PLUGIN_OVERRIDE\020\005" + + "\022\035\n\031WORKFLOW_EXECUTION_CONFIG\020\006\022\026\n\022CLUST" + + "ER_ASSIGNMENT\020\007B=Z;github.com/flyteorg/f" + + "lyte/flyteidl/gen/pb-go/flyteidl/adminb\006" + + "proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -13227,13 +13574,13 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_MatchableAttributesConfiguration_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_MatchableAttributesConfiguration_descriptor, - new java.lang.String[] { "Attributes", "Domain", "Project", "Workflow", "LaunchPlan", }); + new java.lang.String[] { "Attributes", "Domain", "Project", "Workflow", "LaunchPlan", "Org", }); internal_static_flyteidl_admin_ListMatchableAttributesRequest_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_flyteidl_admin_ListMatchableAttributesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ListMatchableAttributesRequest_descriptor, - new java.lang.String[] { "ResourceType", }); + new java.lang.String[] { "ResourceType", "Org", }); internal_static_flyteidl_admin_ListMatchableAttributesResponse_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_flyteidl_admin_ListMatchableAttributesResponse_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/admin/NodeExecutionOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/NodeExecutionOuterClass.java index b1301715d6..da63f51ffc 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/NodeExecutionOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/NodeExecutionOuterClass.java @@ -15710,6 +15710,1218 @@ public flyteidl.admin.NodeExecutionOuterClass.NodeExecutionGetDataResponse getDe } + public interface GetDynamicNodeWorkflowRequestOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.admin.GetDynamicNodeWorkflowRequest) + com.google.protobuf.MessageOrBuilder { + + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + boolean hasId(); + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier getId(); + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifierOrBuilder getIdOrBuilder(); + } + /** + * Protobuf type {@code flyteidl.admin.GetDynamicNodeWorkflowRequest} + */ + public static final class GetDynamicNodeWorkflowRequest extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.admin.GetDynamicNodeWorkflowRequest) + GetDynamicNodeWorkflowRequestOrBuilder { + private static final long serialVersionUID = 0L; + // Use GetDynamicNodeWorkflowRequest.newBuilder() to construct. + private GetDynamicNodeWorkflowRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private GetDynamicNodeWorkflowRequest() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private GetDynamicNodeWorkflowRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.Builder subBuilder = null; + if (id_ != null) { + subBuilder = id_.toBuilder(); + } + id_ = input.readMessage(flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(id_); + id_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest.class, flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest.Builder.class); + } + + public static final int ID_FIELD_NUMBER = 1; + private flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier id_; + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public boolean hasId() { + return id_ != null; + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier getId() { + return id_ == null ? flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.getDefaultInstance() : id_; + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifierOrBuilder getIdOrBuilder() { + return getId(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (id_ != null) { + output.writeMessage(1, getId()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (id_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getId()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest)) { + return super.equals(obj); + } + flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest other = (flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest) obj; + + if (hasId() != other.hasId()) return false; + if (hasId()) { + if (!getId() + .equals(other.getId())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasId()) { + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code flyteidl.admin.GetDynamicNodeWorkflowRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.admin.GetDynamicNodeWorkflowRequest) + flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest.class, flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest.Builder.class); + } + + // Construct using flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (idBuilder_ == null) { + id_ = null; + } else { + id_ = null; + idBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_descriptor; + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest getDefaultInstanceForType() { + return flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest build() { + flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest buildPartial() { + flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest result = new flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest(this); + if (idBuilder_ == null) { + result.id_ = id_; + } else { + result.id_ = idBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest) { + return mergeFrom((flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest other) { + if (other == flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest.getDefaultInstance()) return this; + if (other.hasId()) { + mergeId(other.getId()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier id_; + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier, flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifierOrBuilder> idBuilder_; + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public boolean hasId() { + return idBuilder_ != null || id_ != null; + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier getId() { + if (idBuilder_ == null) { + return id_ == null ? flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.getDefaultInstance() : id_; + } else { + return idBuilder_.getMessage(); + } + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public Builder setId(flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier value) { + if (idBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + id_ = value; + onChanged(); + } else { + idBuilder_.setMessage(value); + } + + return this; + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public Builder setId( + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.Builder builderForValue) { + if (idBuilder_ == null) { + id_ = builderForValue.build(); + onChanged(); + } else { + idBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public Builder mergeId(flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier value) { + if (idBuilder_ == null) { + if (id_ != null) { + id_ = + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.newBuilder(id_).mergeFrom(value).buildPartial(); + } else { + id_ = value; + } + onChanged(); + } else { + idBuilder_.mergeFrom(value); + } + + return this; + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public Builder clearId() { + if (idBuilder_ == null) { + id_ = null; + onChanged(); + } else { + id_ = null; + idBuilder_ = null; + } + + return this; + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.Builder getIdBuilder() { + + onChanged(); + return getIdFieldBuilder().getBuilder(); + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + public flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifierOrBuilder getIdOrBuilder() { + if (idBuilder_ != null) { + return idBuilder_.getMessageOrBuilder(); + } else { + return id_ == null ? + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.getDefaultInstance() : id_; + } + } + /** + * .flyteidl.core.NodeExecutionIdentifier id = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier, flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifierOrBuilder> + getIdFieldBuilder() { + if (idBuilder_ == null) { + idBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier, flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.NodeExecutionIdentifierOrBuilder>( + getId(), + getParentForChildren(), + isClean()); + id_ = null; + } + return idBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.admin.GetDynamicNodeWorkflowRequest) + } + + // @@protoc_insertion_point(class_scope:flyteidl.admin.GetDynamicNodeWorkflowRequest) + private static final flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest(); + } + + public static flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public GetDynamicNodeWorkflowRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new GetDynamicNodeWorkflowRequest(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.GetDynamicNodeWorkflowRequest getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface DynamicNodeWorkflowResponseOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.admin.DynamicNodeWorkflowResponse) + com.google.protobuf.MessageOrBuilder { + + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + boolean hasCompiledWorkflow(); + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + flyteidl.core.Compiler.CompiledWorkflowClosure getCompiledWorkflow(); + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + flyteidl.core.Compiler.CompiledWorkflowClosureOrBuilder getCompiledWorkflowOrBuilder(); + } + /** + * Protobuf type {@code flyteidl.admin.DynamicNodeWorkflowResponse} + */ + public static final class DynamicNodeWorkflowResponse extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.admin.DynamicNodeWorkflowResponse) + DynamicNodeWorkflowResponseOrBuilder { + private static final long serialVersionUID = 0L; + // Use DynamicNodeWorkflowResponse.newBuilder() to construct. + private DynamicNodeWorkflowResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private DynamicNodeWorkflowResponse() { + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private DynamicNodeWorkflowResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + flyteidl.core.Compiler.CompiledWorkflowClosure.Builder subBuilder = null; + if (compiledWorkflow_ != null) { + subBuilder = compiledWorkflow_.toBuilder(); + } + compiledWorkflow_ = input.readMessage(flyteidl.core.Compiler.CompiledWorkflowClosure.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(compiledWorkflow_); + compiledWorkflow_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse.class, flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse.Builder.class); + } + + public static final int COMPILED_WORKFLOW_FIELD_NUMBER = 1; + private flyteidl.core.Compiler.CompiledWorkflowClosure compiledWorkflow_; + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public boolean hasCompiledWorkflow() { + return compiledWorkflow_ != null; + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public flyteidl.core.Compiler.CompiledWorkflowClosure getCompiledWorkflow() { + return compiledWorkflow_ == null ? flyteidl.core.Compiler.CompiledWorkflowClosure.getDefaultInstance() : compiledWorkflow_; + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public flyteidl.core.Compiler.CompiledWorkflowClosureOrBuilder getCompiledWorkflowOrBuilder() { + return getCompiledWorkflow(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (compiledWorkflow_ != null) { + output.writeMessage(1, getCompiledWorkflow()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (compiledWorkflow_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getCompiledWorkflow()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse)) { + return super.equals(obj); + } + flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse other = (flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse) obj; + + if (hasCompiledWorkflow() != other.hasCompiledWorkflow()) return false; + if (hasCompiledWorkflow()) { + if (!getCompiledWorkflow() + .equals(other.getCompiledWorkflow())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasCompiledWorkflow()) { + hash = (37 * hash) + COMPILED_WORKFLOW_FIELD_NUMBER; + hash = (53 * hash) + getCompiledWorkflow().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code flyteidl.admin.DynamicNodeWorkflowResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.admin.DynamicNodeWorkflowResponse) + flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse.class, flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse.Builder.class); + } + + // Construct using flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + if (compiledWorkflowBuilder_ == null) { + compiledWorkflow_ = null; + } else { + compiledWorkflow_ = null; + compiledWorkflowBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.admin.NodeExecutionOuterClass.internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_descriptor; + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse getDefaultInstanceForType() { + return flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse build() { + flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse buildPartial() { + flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse result = new flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse(this); + if (compiledWorkflowBuilder_ == null) { + result.compiledWorkflow_ = compiledWorkflow_; + } else { + result.compiledWorkflow_ = compiledWorkflowBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse) { + return mergeFrom((flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse other) { + if (other == flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse.getDefaultInstance()) return this; + if (other.hasCompiledWorkflow()) { + mergeCompiledWorkflow(other.getCompiledWorkflow()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private flyteidl.core.Compiler.CompiledWorkflowClosure compiledWorkflow_; + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Compiler.CompiledWorkflowClosure, flyteidl.core.Compiler.CompiledWorkflowClosure.Builder, flyteidl.core.Compiler.CompiledWorkflowClosureOrBuilder> compiledWorkflowBuilder_; + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public boolean hasCompiledWorkflow() { + return compiledWorkflowBuilder_ != null || compiledWorkflow_ != null; + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public flyteidl.core.Compiler.CompiledWorkflowClosure getCompiledWorkflow() { + if (compiledWorkflowBuilder_ == null) { + return compiledWorkflow_ == null ? flyteidl.core.Compiler.CompiledWorkflowClosure.getDefaultInstance() : compiledWorkflow_; + } else { + return compiledWorkflowBuilder_.getMessage(); + } + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public Builder setCompiledWorkflow(flyteidl.core.Compiler.CompiledWorkflowClosure value) { + if (compiledWorkflowBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + compiledWorkflow_ = value; + onChanged(); + } else { + compiledWorkflowBuilder_.setMessage(value); + } + + return this; + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public Builder setCompiledWorkflow( + flyteidl.core.Compiler.CompiledWorkflowClosure.Builder builderForValue) { + if (compiledWorkflowBuilder_ == null) { + compiledWorkflow_ = builderForValue.build(); + onChanged(); + } else { + compiledWorkflowBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public Builder mergeCompiledWorkflow(flyteidl.core.Compiler.CompiledWorkflowClosure value) { + if (compiledWorkflowBuilder_ == null) { + if (compiledWorkflow_ != null) { + compiledWorkflow_ = + flyteidl.core.Compiler.CompiledWorkflowClosure.newBuilder(compiledWorkflow_).mergeFrom(value).buildPartial(); + } else { + compiledWorkflow_ = value; + } + onChanged(); + } else { + compiledWorkflowBuilder_.mergeFrom(value); + } + + return this; + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public Builder clearCompiledWorkflow() { + if (compiledWorkflowBuilder_ == null) { + compiledWorkflow_ = null; + onChanged(); + } else { + compiledWorkflow_ = null; + compiledWorkflowBuilder_ = null; + } + + return this; + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public flyteidl.core.Compiler.CompiledWorkflowClosure.Builder getCompiledWorkflowBuilder() { + + onChanged(); + return getCompiledWorkflowFieldBuilder().getBuilder(); + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + public flyteidl.core.Compiler.CompiledWorkflowClosureOrBuilder getCompiledWorkflowOrBuilder() { + if (compiledWorkflowBuilder_ != null) { + return compiledWorkflowBuilder_.getMessageOrBuilder(); + } else { + return compiledWorkflow_ == null ? + flyteidl.core.Compiler.CompiledWorkflowClosure.getDefaultInstance() : compiledWorkflow_; + } + } + /** + * .flyteidl.core.CompiledWorkflowClosure compiled_workflow = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Compiler.CompiledWorkflowClosure, flyteidl.core.Compiler.CompiledWorkflowClosure.Builder, flyteidl.core.Compiler.CompiledWorkflowClosureOrBuilder> + getCompiledWorkflowFieldBuilder() { + if (compiledWorkflowBuilder_ == null) { + compiledWorkflowBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.Compiler.CompiledWorkflowClosure, flyteidl.core.Compiler.CompiledWorkflowClosure.Builder, flyteidl.core.Compiler.CompiledWorkflowClosureOrBuilder>( + getCompiledWorkflow(), + getParentForChildren(), + isClean()); + compiledWorkflow_ = null; + } + return compiledWorkflowBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.admin.DynamicNodeWorkflowResponse) + } + + // @@protoc_insertion_point(class_scope:flyteidl.admin.DynamicNodeWorkflowResponse) + private static final flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse(); + } + + public static flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public DynamicNodeWorkflowResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new DynamicNodeWorkflowResponse(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.admin.NodeExecutionOuterClass.DynamicNodeWorkflowResponse getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + private static final com.google.protobuf.Descriptors.Descriptor internal_static_flyteidl_admin_NodeExecutionGetRequest_descriptor; private static final @@ -15770,6 +16982,16 @@ public flyteidl.admin.NodeExecutionOuterClass.NodeExecutionGetDataResponse getDe private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_flyteidl_admin_NodeExecutionGetDataResponse_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -15845,9 +17067,13 @@ public flyteidl.admin.NodeExecutionOuterClass.NodeExecutionGetDataResponse getDe ".core.LiteralMap\022E\n\020dynamic_workflow\030\020 \001" + "(\0132+.flyteidl.admin.DynamicWorkflowNodeM" + "etadata\022-\n\nflyte_urls\030\021 \001(\0132\031.flyteidl.a" + - "dmin.FlyteURLsB=Z;github.com/flyteorg/fl" + - "yte/flyteidl/gen/pb-go/flyteidl/adminb\006p" + - "roto3" + "dmin.FlyteURLs\"S\n\035GetDynamicNodeWorkflow" + + "Request\0222\n\002id\030\001 \001(\0132&.flyteidl.core.Node" + + "ExecutionIdentifier\"`\n\033DynamicNodeWorkfl" + + "owResponse\022A\n\021compiled_workflow\030\001 \001(\0132&." + + "flyteidl.core.CompiledWorkflowClosureB=Z" + + ";github.com/flyteorg/flyte/flyteidl/gen/" + + "pb-go/flyteidl/adminb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -15941,6 +17167,18 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_NodeExecutionGetDataResponse_descriptor, new java.lang.String[] { "Inputs", "Outputs", "FullInputs", "FullOutputs", "DynamicWorkflow", "FlyteUrls", }); + internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_GetDynamicNodeWorkflowRequest_descriptor, + new java.lang.String[] { "Id", }); + internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( + internal_static_flyteidl_admin_DynamicNodeWorkflowResponse_descriptor, + new java.lang.String[] { "CompiledWorkflow", }); flyteidl.admin.Common.getDescriptor(); flyteidl.core.Execution.getDescriptor(); flyteidl.core.Catalog.getDescriptor(); diff --git a/flyteidl/gen/pb-java/flyteidl/admin/ProjectAttributesOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/ProjectAttributesOuterClass.java index 4af2acdb48..c124b52d9d 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/ProjectAttributesOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/ProjectAttributesOuterClass.java @@ -48,6 +48,24 @@ public interface ProjectAttributesOrBuilder extends * .flyteidl.admin.MatchingAttributes matching_attributes = 2; */ flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder getMatchingAttributesOrBuilder(); + + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -68,6 +86,7 @@ private ProjectAttributes(com.google.protobuf.GeneratedMessageV3.Builder buil
     }
     private ProjectAttributes() {
       project_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -113,6 +132,12 @@ private ProjectAttributes(
 
               break;
             }
+            case 26: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -208,6 +233,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder ge
       return getMatchingAttributes();
     }
 
+    public static final int ORG_FIELD_NUMBER = 3;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -228,6 +295,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (matchingAttributes_ != null) { output.writeMessage(2, getMatchingAttributes()); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, org_); + } unknownFields.writeTo(output); } @@ -244,6 +314,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getMatchingAttributes()); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -266,6 +339,8 @@ public boolean equals(final java.lang.Object obj) { if (!getMatchingAttributes() .equals(other.getMatchingAttributes())) return false; } + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -283,6 +358,8 @@ public int hashCode() { hash = (37 * hash) + MATCHING_ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getMatchingAttributes().hashCode(); } + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -429,6 +506,8 @@ public Builder clear() { matchingAttributes_ = null; matchingAttributesBuilder_ = null; } + org_ = ""; + return this; } @@ -461,6 +540,7 @@ public flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributes buildPartial } else { result.matchingAttributes_ = matchingAttributesBuilder_.build(); } + result.org_ = org_; onBuilt(); return result; } @@ -516,6 +596,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectAttributesOuterClass.ProjectAttri if (other.hasMatchingAttributes()) { mergeMatchingAttributes(other.getMatchingAttributes()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -750,6 +834,95 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder ge } return matchingAttributesBuilder_; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -1940,6 +2113,24 @@ public interface ProjectAttributesGetRequestOrBuilder extends * .flyteidl.admin.MatchableResource resource_type = 2; */ flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceType(); + + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -1961,6 +2152,7 @@ private ProjectAttributesGetRequest(com.google.protobuf.GeneratedMessageV3.Build
     private ProjectAttributesGetRequest() {
       project_ = "";
       resourceType_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -1999,6 +2191,12 @@ private ProjectAttributesGetRequest(
               resourceType_ = rawValue;
               break;
             }
+            case 26: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -2102,6 +2300,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceT
       return result == null ? flyteidl.admin.MatchableResourceOuterClass.MatchableResource.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 3;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -2122,6 +2362,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (resourceType_ != flyteidl.admin.MatchableResourceOuterClass.MatchableResource.TASK_RESOURCE.getNumber()) { output.writeEnum(2, resourceType_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, org_); + } unknownFields.writeTo(output); } @@ -2138,6 +2381,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, resourceType_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -2156,6 +2402,8 @@ public boolean equals(final java.lang.Object obj) { if (!getProject() .equals(other.getProject())) return false; if (resourceType_ != other.resourceType_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -2171,6 +2419,8 @@ public int hashCode() { hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + resourceType_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -2313,6 +2563,8 @@ public Builder clear() { resourceType_ = 0; + org_ = ""; + return this; } @@ -2341,6 +2593,7 @@ public flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributesGetRequest bu flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributesGetRequest result = new flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributesGetRequest(this); result.project_ = project_; result.resourceType_ = resourceType_; + result.org_ = org_; onBuilt(); return result; } @@ -2396,6 +2649,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectAttributesOuterClass.ProjectAttri if (other.resourceType_ != 0) { setResourceTypeValue(other.getResourceTypeValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2588,6 +2845,95 @@ public Builder clearResourceType() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -3299,6 +3645,24 @@ public interface ProjectAttributesDeleteRequestOrBuilder extends * .flyteidl.admin.MatchableResource resource_type = 2; */ flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceType(); + + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -3320,6 +3684,7 @@ private ProjectAttributesDeleteRequest(com.google.protobuf.GeneratedMessageV3.Bu
     private ProjectAttributesDeleteRequest() {
       project_ = "";
       resourceType_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -3358,6 +3723,12 @@ private ProjectAttributesDeleteRequest(
               resourceType_ = rawValue;
               break;
             }
+            case 26: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -3461,6 +3832,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceT
       return result == null ? flyteidl.admin.MatchableResourceOuterClass.MatchableResource.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 3;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the project.
+     * 
+ * + * string org = 3; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -3481,6 +3894,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (resourceType_ != flyteidl.admin.MatchableResourceOuterClass.MatchableResource.TASK_RESOURCE.getNumber()) { output.writeEnum(2, resourceType_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, org_); + } unknownFields.writeTo(output); } @@ -3497,6 +3913,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, resourceType_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -3515,6 +3934,8 @@ public boolean equals(final java.lang.Object obj) { if (!getProject() .equals(other.getProject())) return false; if (resourceType_ != other.resourceType_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -3530,6 +3951,8 @@ public int hashCode() { hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + resourceType_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -3672,6 +4095,8 @@ public Builder clear() { resourceType_ = 0; + org_ = ""; + return this; } @@ -3700,6 +4125,7 @@ public flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributesDeleteRequest flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributesDeleteRequest result = new flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributesDeleteRequest(this); result.project_ = project_; result.resourceType_ = resourceType_; + result.org_ = org_; onBuilt(); return result; } @@ -3755,6 +4181,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectAttributesOuterClass.ProjectAttri if (other.resourceType_ != 0) { setResourceTypeValue(other.getResourceTypeValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -3947,6 +4377,95 @@ public Builder clearResourceType() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the project.
+       * 
+ * + * string org = 3; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -4465,23 +4984,24 @@ public flyteidl.admin.ProjectAttributesOuterClass.ProjectAttributesDeleteRespons java.lang.String[] descriptorData = { "\n\'flyteidl/admin/project_attributes.prot" + "o\022\016flyteidl.admin\032\'flyteidl/admin/matcha" + - "ble_resource.proto\"e\n\021ProjectAttributes\022" + + "ble_resource.proto\"r\n\021ProjectAttributes\022" + "\017\n\007project\030\001 \001(\t\022?\n\023matching_attributes\030" + "\002 \001(\0132\".flyteidl.admin.MatchingAttribute" + - "s\"W\n\036ProjectAttributesUpdateRequest\0225\n\na" + - "ttributes\030\001 \001(\0132!.flyteidl.admin.Project" + - "Attributes\"!\n\037ProjectAttributesUpdateRes" + - "ponse\"h\n\033ProjectAttributesGetRequest\022\017\n\007" + - "project\030\001 \001(\t\0228\n\rresource_type\030\002 \001(\0162!.f" + - "lyteidl.admin.MatchableResource\"U\n\034Proje" + - "ctAttributesGetResponse\0225\n\nattributes\030\001 " + - "\001(\0132!.flyteidl.admin.ProjectAttributes\"k" + - "\n\036ProjectAttributesDeleteRequest\022\017\n\007proj" + - "ect\030\001 \001(\t\0228\n\rresource_type\030\002 \001(\0162!.flyte" + - "idl.admin.MatchableResource\"!\n\037ProjectAt" + - "tributesDeleteResponseB=Z;github.com/fly" + - "teorg/flyte/flyteidl/gen/pb-go/flyteidl/" + - "adminb\006proto3" + "s\022\013\n\003org\030\003 \001(\t\"W\n\036ProjectAttributesUpdat" + + "eRequest\0225\n\nattributes\030\001 \001(\0132!.flyteidl." + + "admin.ProjectAttributes\"!\n\037ProjectAttrib" + + "utesUpdateResponse\"u\n\033ProjectAttributesG" + + "etRequest\022\017\n\007project\030\001 \001(\t\0228\n\rresource_t" + + "ype\030\002 \001(\0162!.flyteidl.admin.MatchableReso" + + "urce\022\013\n\003org\030\003 \001(\t\"U\n\034ProjectAttributesGe" + + "tResponse\0225\n\nattributes\030\001 \001(\0132!.flyteidl" + + ".admin.ProjectAttributes\"x\n\036ProjectAttri" + + "butesDeleteRequest\022\017\n\007project\030\001 \001(\t\0228\n\rr" + + "esource_type\030\002 \001(\0162!.flyteidl.admin.Matc" + + "hableResource\022\013\n\003org\030\003 \001(\t\"!\n\037ProjectAtt" + + "ributesDeleteResponseB=Z;github.com/flyt" + + "eorg/flyte/flyteidl/gen/pb-go/flyteidl/a" + + "dminb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -4501,7 +5021,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ProjectAttributes_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ProjectAttributes_descriptor, - new java.lang.String[] { "Project", "MatchingAttributes", }); + new java.lang.String[] { "Project", "MatchingAttributes", "Org", }); internal_static_flyteidl_admin_ProjectAttributesUpdateRequest_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_flyteidl_admin_ProjectAttributesUpdateRequest_fieldAccessorTable = new @@ -4519,7 +5039,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ProjectAttributesGetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ProjectAttributesGetRequest_descriptor, - new java.lang.String[] { "Project", "ResourceType", }); + new java.lang.String[] { "Project", "ResourceType", "Org", }); internal_static_flyteidl_admin_ProjectAttributesGetResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_flyteidl_admin_ProjectAttributesGetResponse_fieldAccessorTable = new @@ -4531,7 +5051,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ProjectAttributesDeleteRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ProjectAttributesDeleteRequest_descriptor, - new java.lang.String[] { "Project", "ResourceType", }); + new java.lang.String[] { "Project", "ResourceType", "Org", }); internal_static_flyteidl_admin_ProjectAttributesDeleteResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_flyteidl_admin_ProjectAttributesDeleteResponse_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/admin/ProjectDomainAttributesOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/ProjectDomainAttributesOuterClass.java index d3512369cd..22d73ff719 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/ProjectDomainAttributesOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/ProjectDomainAttributesOuterClass.java @@ -66,6 +66,24 @@ public interface ProjectDomainAttributesOrBuilder extends * .flyteidl.admin.MatchingAttributes matching_attributes = 3; */ flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder getMatchingAttributesOrBuilder(); + + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -87,6 +105,7 @@ private ProjectDomainAttributes(com.google.protobuf.GeneratedMessageV3.Builder
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -298,6 +365,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (matchingAttributes_ != null) { output.writeMessage(3, getMatchingAttributes()); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, org_); + } unknownFields.writeTo(output); } @@ -317,6 +387,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, getMatchingAttributes()); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -341,6 +414,8 @@ public boolean equals(final java.lang.Object obj) { if (!getMatchingAttributes() .equals(other.getMatchingAttributes())) return false; } + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -360,6 +435,8 @@ public int hashCode() { hash = (37 * hash) + MATCHING_ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getMatchingAttributes().hashCode(); } + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -508,6 +585,8 @@ public Builder clear() { matchingAttributes_ = null; matchingAttributesBuilder_ = null; } + org_ = ""; + return this; } @@ -541,6 +620,7 @@ public flyteidl.admin.ProjectDomainAttributesOuterClass.ProjectDomainAttributes } else { result.matchingAttributes_ = matchingAttributesBuilder_.build(); } + result.org_ = org_; onBuilt(); return result; } @@ -600,6 +680,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectDomainAttributesOuterClass.Projec if (other.hasMatchingAttributes()) { mergeMatchingAttributes(other.getMatchingAttributes()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -923,6 +1007,95 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder ge } return matchingAttributesBuilder_; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -2133,6 +2306,24 @@ public interface ProjectDomainAttributesGetRequestOrBuilder extends * .flyteidl.admin.MatchableResource resource_type = 3; */ flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceType(); + + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -2155,6 +2346,7 @@ private ProjectDomainAttributesGetRequest() {
       project_ = "";
       domain_ = "";
       resourceType_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -2199,6 +2391,12 @@ private ProjectDomainAttributesGetRequest(
               resourceType_ = rawValue;
               break;
             }
+            case 34: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -2346,6 +2544,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceT
       return result == null ? flyteidl.admin.MatchableResourceOuterClass.MatchableResource.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 4;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -2369,6 +2609,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (resourceType_ != flyteidl.admin.MatchableResourceOuterClass.MatchableResource.TASK_RESOURCE.getNumber()) { output.writeEnum(3, resourceType_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, org_); + } unknownFields.writeTo(output); } @@ -2388,6 +2631,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, resourceType_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -2408,6 +2654,8 @@ public boolean equals(final java.lang.Object obj) { if (!getDomain() .equals(other.getDomain())) return false; if (resourceType_ != other.resourceType_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -2425,6 +2673,8 @@ public int hashCode() { hash = (53 * hash) + getDomain().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + resourceType_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -2569,6 +2819,8 @@ public Builder clear() { resourceType_ = 0; + org_ = ""; + return this; } @@ -2598,6 +2850,7 @@ public flyteidl.admin.ProjectDomainAttributesOuterClass.ProjectDomainAttributesG result.project_ = project_; result.domain_ = domain_; result.resourceType_ = resourceType_; + result.org_ = org_; onBuilt(); return result; } @@ -2657,6 +2910,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectDomainAttributesOuterClass.Projec if (other.resourceType_ != 0) { setResourceTypeValue(other.getResourceTypeValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2943,6 +3200,95 @@ public Builder clearResourceType() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -3674,6 +4020,24 @@ public interface ProjectDomainAttributesDeleteRequestOrBuilder extends * .flyteidl.admin.MatchableResource resource_type = 3; */ flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceType(); + + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -3696,6 +4060,7 @@ private ProjectDomainAttributesDeleteRequest() {
       project_ = "";
       domain_ = "";
       resourceType_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -3740,6 +4105,12 @@ private ProjectDomainAttributesDeleteRequest(
               resourceType_ = rawValue;
               break;
             }
+            case 34: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -3887,6 +4258,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceT
       return result == null ? flyteidl.admin.MatchableResourceOuterClass.MatchableResource.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 4;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -3910,6 +4323,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (resourceType_ != flyteidl.admin.MatchableResourceOuterClass.MatchableResource.TASK_RESOURCE.getNumber()) { output.writeEnum(3, resourceType_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, org_); + } unknownFields.writeTo(output); } @@ -3929,6 +4345,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, resourceType_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -3949,6 +4368,8 @@ public boolean equals(final java.lang.Object obj) { if (!getDomain() .equals(other.getDomain())) return false; if (resourceType_ != other.resourceType_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -3966,6 +4387,8 @@ public int hashCode() { hash = (53 * hash) + getDomain().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + resourceType_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -4110,6 +4533,8 @@ public Builder clear() { resourceType_ = 0; + org_ = ""; + return this; } @@ -4139,6 +4564,7 @@ public flyteidl.admin.ProjectDomainAttributesOuterClass.ProjectDomainAttributesD result.project_ = project_; result.domain_ = domain_; result.resourceType_ = resourceType_; + result.org_ = org_; onBuilt(); return result; } @@ -4198,6 +4624,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectDomainAttributesOuterClass.Projec if (other.resourceType_ != 0) { setResourceTypeValue(other.getResourceTypeValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -4484,6 +4914,95 @@ public Builder clearResourceType() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 4; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -5002,26 +5521,27 @@ public flyteidl.admin.ProjectDomainAttributesOuterClass.ProjectDomainAttributesD java.lang.String[] descriptorData = { "\n.flyteidl/admin/project_domain_attribut" + "es.proto\022\016flyteidl.admin\032\'flyteidl/admin" + - "/matchable_resource.proto\"{\n\027ProjectDoma" + - "inAttributes\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030" + - "\002 \001(\t\022?\n\023matching_attributes\030\003 \001(\0132\".fly" + - "teidl.admin.MatchingAttributes\"c\n$Projec" + - "tDomainAttributesUpdateRequest\022;\n\nattrib" + - "utes\030\001 \001(\0132\'.flyteidl.admin.ProjectDomai" + - "nAttributes\"\'\n%ProjectDomainAttributesUp" + - "dateResponse\"~\n!ProjectDomainAttributesG" + - "etRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001" + - "(\t\0228\n\rresource_type\030\003 \001(\0162!.flyteidl.adm" + - "in.MatchableResource\"a\n\"ProjectDomainAtt" + - "ributesGetResponse\022;\n\nattributes\030\001 \001(\0132\'" + - ".flyteidl.admin.ProjectDomainAttributes\"" + - "\201\001\n$ProjectDomainAttributesDeleteRequest" + - "\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\0228\n\rres" + - "ource_type\030\003 \001(\0162!.flyteidl.admin.Matcha" + - "bleResource\"\'\n%ProjectDomainAttributesDe" + - "leteResponseB=Z;github.com/flyteorg/flyt" + - "e/flyteidl/gen/pb-go/flyteidl/adminb\006pro" + - "to3" + "/matchable_resource.proto\"\210\001\n\027ProjectDom" + + "ainAttributes\022\017\n\007project\030\001 \001(\t\022\016\n\006domain" + + "\030\002 \001(\t\022?\n\023matching_attributes\030\003 \001(\0132\".fl" + + "yteidl.admin.MatchingAttributes\022\013\n\003org\030\004" + + " \001(\t\"c\n$ProjectDomainAttributesUpdateReq" + + "uest\022;\n\nattributes\030\001 \001(\0132\'.flyteidl.admi" + + "n.ProjectDomainAttributes\"\'\n%ProjectDoma" + + "inAttributesUpdateResponse\"\213\001\n!ProjectDo" + + "mainAttributesGetRequest\022\017\n\007project\030\001 \001(" + + "\t\022\016\n\006domain\030\002 \001(\t\0228\n\rresource_type\030\003 \001(\016" + + "2!.flyteidl.admin.MatchableResource\022\013\n\003o" + + "rg\030\004 \001(\t\"a\n\"ProjectDomainAttributesGetRe" + + "sponse\022;\n\nattributes\030\001 \001(\0132\'.flyteidl.ad" + + "min.ProjectDomainAttributes\"\216\001\n$ProjectD" + + "omainAttributesDeleteRequest\022\017\n\007project\030" + + "\001 \001(\t\022\016\n\006domain\030\002 \001(\t\0228\n\rresource_type\030\003" + + " \001(\0162!.flyteidl.admin.MatchableResource\022" + + "\013\n\003org\030\004 \001(\t\"\'\n%ProjectDomainAttributesD" + + "eleteResponseB=Z;github.com/flyteorg/fly" + + "te/flyteidl/gen/pb-go/flyteidl/adminb\006pr" + + "oto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -5041,7 +5561,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ProjectDomainAttributes_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ProjectDomainAttributes_descriptor, - new java.lang.String[] { "Project", "Domain", "MatchingAttributes", }); + new java.lang.String[] { "Project", "Domain", "MatchingAttributes", "Org", }); internal_static_flyteidl_admin_ProjectDomainAttributesUpdateRequest_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_flyteidl_admin_ProjectDomainAttributesUpdateRequest_fieldAccessorTable = new @@ -5059,7 +5579,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ProjectDomainAttributesGetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ProjectDomainAttributesGetRequest_descriptor, - new java.lang.String[] { "Project", "Domain", "ResourceType", }); + new java.lang.String[] { "Project", "Domain", "ResourceType", "Org", }); internal_static_flyteidl_admin_ProjectDomainAttributesGetResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_flyteidl_admin_ProjectDomainAttributesGetResponse_fieldAccessorTable = new @@ -5071,7 +5591,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ProjectDomainAttributesDeleteRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ProjectDomainAttributesDeleteRequest_descriptor, - new java.lang.String[] { "Project", "Domain", "ResourceType", }); + new java.lang.String[] { "Project", "Domain", "ResourceType", "Org", }); internal_static_flyteidl_admin_ProjectDomainAttributesDeleteResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_flyteidl_admin_ProjectDomainAttributesDeleteResponse_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/admin/ProjectOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/ProjectOuterClass.java index 518b844a41..f00680e1b0 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/ProjectOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/ProjectOuterClass.java @@ -892,6 +892,24 @@ flyteidl.admin.ProjectOuterClass.DomainOrBuilder getDomainsOrBuilder( * .flyteidl.admin.Project.ProjectState state = 6; */ flyteidl.admin.ProjectOuterClass.Project.ProjectState getState(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -915,6 +933,7 @@ private Project() {
       domains_ = java.util.Collections.emptyList();
       description_ = "";
       state_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -987,6 +1006,12 @@ private Project(
               state_ = rawValue;
               break;
             }
+            case 58: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -1364,6 +1389,48 @@ public flyteidl.admin.ProjectOuterClass.Project.ProjectState getState() {
       return result == null ? flyteidl.admin.ProjectOuterClass.Project.ProjectState.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 7;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 7; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -1396,6 +1463,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (state_ != flyteidl.admin.ProjectOuterClass.Project.ProjectState.ACTIVE.getNumber()) { output.writeEnum(6, state_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 7, org_); + } unknownFields.writeTo(output); } @@ -1426,6 +1496,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(6, state_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1455,6 +1528,8 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getLabels())) return false; } if (state_ != other.state_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1482,6 +1557,8 @@ public int hashCode() { } hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1640,6 +1717,8 @@ public Builder clear() { } state_ = 0; + org_ = ""; + return this; } @@ -1686,6 +1765,7 @@ public flyteidl.admin.ProjectOuterClass.Project buildPartial() { result.labels_ = labelsBuilder_.build(); } result.state_ = state_; + result.org_ = org_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -1779,6 +1859,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectOuterClass.Project other) { if (other.state_ != 0) { setStateValue(other.getStateValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2502,6 +2586,95 @@ public Builder clearState() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 7; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -3616,6 +3789,24 @@ public interface ProjectListRequestOrBuilder extends * .flyteidl.admin.Sort sort_by = 4; */ flyteidl.admin.Common.SortOrBuilder getSortByOrBuilder(); + + /** + *
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 5; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 5; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -3637,6 +3828,7 @@ private ProjectListRequest(com.google.protobuf.GeneratedMessageV3.Builder bui
     private ProjectListRequest() {
       token_ = "";
       filters_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -3693,6 +3885,12 @@ private ProjectListRequest(
 
               break;
             }
+            case 42: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -3867,6 +4065,48 @@ public flyteidl.admin.Common.SortOrBuilder getSortByOrBuilder() {
       return getSortBy();
     }
 
+    public static final int ORG_FIELD_NUMBER = 5;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org filter applied to list project requests.
+     * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -3893,6 +4133,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (sortBy_ != null) { output.writeMessage(4, getSortBy()); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, org_); + } unknownFields.writeTo(output); } @@ -3916,6 +4159,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, getSortBy()); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -3942,6 +4188,8 @@ public boolean equals(final java.lang.Object obj) { if (!getSortBy() .equals(other.getSortBy())) return false; } + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -3963,6 +4211,8 @@ public int hashCode() { hash = (37 * hash) + SORT_BY_FIELD_NUMBER; hash = (53 * hash) + getSortBy().hashCode(); } + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -4113,6 +4363,8 @@ public Builder clear() { sortBy_ = null; sortByBuilder_ = null; } + org_ = ""; + return this; } @@ -4147,6 +4399,7 @@ public flyteidl.admin.ProjectOuterClass.ProjectListRequest buildPartial() { } else { result.sortBy_ = sortByBuilder_.build(); } + result.org_ = org_; onBuilt(); return result; } @@ -4209,6 +4462,10 @@ public Builder mergeFrom(flyteidl.admin.ProjectOuterClass.ProjectListRequest oth if (other.hasSortBy()) { mergeSortBy(other.getSortBy()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -4638,6 +4895,95 @@ public flyteidl.admin.Common.SortOrBuilder getSortByOrBuilder() { } return sortByBuilder_; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 5; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 5; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org filter applied to list project requests.
+       * 
+ * + * string org = 5; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -6251,23 +6597,24 @@ public flyteidl.admin.ProjectOuterClass.ProjectUpdateResponse getDefaultInstance java.lang.String[] descriptorData = { "\n\034flyteidl/admin/project.proto\022\016flyteidl" + ".admin\032\033flyteidl/admin/common.proto\"\"\n\006D" + - "omain\022\n\n\002id\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\"\376\001\n\007Proj" + + "omain\022\n\n\002id\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\"\213\002\n\007Proj" + "ect\022\n\n\002id\030\001 \001(\t\022\014\n\004name\030\002 \001(\t\022\'\n\007domains" + "\030\003 \003(\0132\026.flyteidl.admin.Domain\022\023\n\013descri" + "ption\030\004 \001(\t\022&\n\006labels\030\005 \001(\0132\026.flyteidl.a" + "dmin.Labels\0223\n\005state\030\006 \001(\0162$.flyteidl.ad" + - "min.Project.ProjectState\">\n\014ProjectState" + - "\022\n\n\006ACTIVE\020\000\022\014\n\010ARCHIVED\020\001\022\024\n\020SYSTEM_GEN" + - "ERATED\020\002\"D\n\010Projects\022)\n\010projects\030\001 \003(\0132\027" + - ".flyteidl.admin.Project\022\r\n\005token\030\002 \001(\t\"j" + - "\n\022ProjectListRequest\022\r\n\005limit\030\001 \001(\r\022\r\n\005t" + - "oken\030\002 \001(\t\022\017\n\007filters\030\003 \001(\t\022%\n\007sort_by\030\004" + - " \001(\0132\024.flyteidl.admin.Sort\"B\n\026ProjectReg" + - "isterRequest\022(\n\007project\030\001 \001(\0132\027.flyteidl" + - ".admin.Project\"\031\n\027ProjectRegisterRespons" + - "e\"\027\n\025ProjectUpdateResponseB=Z;github.com" + - "/flyteorg/flyte/flyteidl/gen/pb-go/flyte" + - "idl/adminb\006proto3" + "min.Project.ProjectState\022\013\n\003org\030\007 \001(\t\">\n" + + "\014ProjectState\022\n\n\006ACTIVE\020\000\022\014\n\010ARCHIVED\020\001\022" + + "\024\n\020SYSTEM_GENERATED\020\002\"D\n\010Projects\022)\n\010pro" + + "jects\030\001 \003(\0132\027.flyteidl.admin.Project\022\r\n\005" + + "token\030\002 \001(\t\"w\n\022ProjectListRequest\022\r\n\005lim" + + "it\030\001 \001(\r\022\r\n\005token\030\002 \001(\t\022\017\n\007filters\030\003 \001(\t" + + "\022%\n\007sort_by\030\004 \001(\0132\024.flyteidl.admin.Sort\022" + + "\013\n\003org\030\005 \001(\t\"B\n\026ProjectRegisterRequest\022(" + + "\n\007project\030\001 \001(\0132\027.flyteidl.admin.Project" + + "\"\031\n\027ProjectRegisterResponse\"\027\n\025ProjectUp" + + "dateResponseB=Z;github.com/flyteorg/flyt" + + "e/flyteidl/gen/pb-go/flyteidl/adminb\006pro" + + "to3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -6293,7 +6640,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_Project_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_Project_descriptor, - new java.lang.String[] { "Id", "Name", "Domains", "Description", "Labels", "State", }); + new java.lang.String[] { "Id", "Name", "Domains", "Description", "Labels", "State", "Org", }); internal_static_flyteidl_admin_Projects_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_flyteidl_admin_Projects_fieldAccessorTable = new @@ -6305,7 +6652,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_ProjectListRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_ProjectListRequest_descriptor, - new java.lang.String[] { "Limit", "Token", "Filters", "SortBy", }); + new java.lang.String[] { "Limit", "Token", "Filters", "SortBy", "Org", }); internal_static_flyteidl_admin_ProjectRegisterRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_flyteidl_admin_ProjectRegisterRequest_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/admin/WorkflowAttributesOuterClass.java b/flyteidl/gen/pb-java/flyteidl/admin/WorkflowAttributesOuterClass.java index 21be6d20a5..9c5bba8ad3 100644 --- a/flyteidl/gen/pb-java/flyteidl/admin/WorkflowAttributesOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/admin/WorkflowAttributesOuterClass.java @@ -84,6 +84,24 @@ public interface WorkflowAttributesOrBuilder extends * .flyteidl.admin.MatchingAttributes matching_attributes = 4; */ flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder getMatchingAttributesOrBuilder(); + + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -106,6 +124,7 @@ private WorkflowAttributes() {
       project_ = "";
       domain_ = "";
       workflow_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -163,6 +182,12 @@ private WorkflowAttributes(
 
               break;
             }
+            case 42: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -342,6 +367,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder ge
       return getMatchingAttributes();
     }
 
+    public static final int ORG_FIELD_NUMBER = 5;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -368,6 +435,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (matchingAttributes_ != null) { output.writeMessage(4, getMatchingAttributes()); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, org_); + } unknownFields.writeTo(output); } @@ -390,6 +460,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, getMatchingAttributes()); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -416,6 +489,8 @@ public boolean equals(final java.lang.Object obj) { if (!getMatchingAttributes() .equals(other.getMatchingAttributes())) return false; } + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -437,6 +512,8 @@ public int hashCode() { hash = (37 * hash) + MATCHING_ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + getMatchingAttributes().hashCode(); } + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -587,6 +664,8 @@ public Builder clear() { matchingAttributes_ = null; matchingAttributesBuilder_ = null; } + org_ = ""; + return this; } @@ -621,6 +700,7 @@ public flyteidl.admin.WorkflowAttributesOuterClass.WorkflowAttributes buildParti } else { result.matchingAttributes_ = matchingAttributesBuilder_.build(); } + result.org_ = org_; onBuilt(); return result; } @@ -684,6 +764,10 @@ public Builder mergeFrom(flyteidl.admin.WorkflowAttributesOuterClass.WorkflowAtt if (other.hasMatchingAttributes()) { mergeMatchingAttributes(other.getMatchingAttributes()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1096,6 +1180,95 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchingAttributesOrBuilder ge } return matchingAttributesBuilder_; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -2266,6 +2439,24 @@ public interface WorkflowAttributesGetRequestOrBuilder extends * .flyteidl.admin.MatchableResource resource_type = 4; */ flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceType(); + + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -2289,6 +2480,7 @@ private WorkflowAttributesGetRequest() {
       domain_ = "";
       workflow_ = "";
       resourceType_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -2339,6 +2531,12 @@ private WorkflowAttributesGetRequest(
               resourceType_ = rawValue;
               break;
             }
+            case 42: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -2530,6 +2728,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceT
       return result == null ? flyteidl.admin.MatchableResourceOuterClass.MatchableResource.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 5;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -2556,6 +2796,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (resourceType_ != flyteidl.admin.MatchableResourceOuterClass.MatchableResource.TASK_RESOURCE.getNumber()) { output.writeEnum(4, resourceType_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, org_); + } unknownFields.writeTo(output); } @@ -2578,6 +2821,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, resourceType_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -2600,6 +2846,8 @@ public boolean equals(final java.lang.Object obj) { if (!getWorkflow() .equals(other.getWorkflow())) return false; if (resourceType_ != other.resourceType_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -2619,6 +2867,8 @@ public int hashCode() { hash = (53 * hash) + getWorkflow().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + resourceType_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -2765,6 +3015,8 @@ public Builder clear() { resourceType_ = 0; + org_ = ""; + return this; } @@ -2795,6 +3047,7 @@ public flyteidl.admin.WorkflowAttributesOuterClass.WorkflowAttributesGetRequest result.domain_ = domain_; result.workflow_ = workflow_; result.resourceType_ = resourceType_; + result.org_ = org_; onBuilt(); return result; } @@ -2858,6 +3111,10 @@ public Builder mergeFrom(flyteidl.admin.WorkflowAttributesOuterClass.WorkflowAtt if (other.resourceType_ != 0) { setResourceTypeValue(other.getResourceTypeValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -3238,6 +3495,95 @@ public Builder clearResourceType() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -3987,6 +4333,24 @@ public interface WorkflowAttributesDeleteRequestOrBuilder extends * .flyteidl.admin.MatchableResource resource_type = 4; */ flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceType(); + + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -4010,6 +4374,7 @@ private WorkflowAttributesDeleteRequest() {
       domain_ = "";
       workflow_ = "";
       resourceType_ = 0;
+      org_ = "";
     }
 
     @java.lang.Override
@@ -4060,6 +4425,12 @@ private WorkflowAttributesDeleteRequest(
               resourceType_ = rawValue;
               break;
             }
+            case 42: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -4251,6 +4622,48 @@ public flyteidl.admin.MatchableResourceOuterClass.MatchableResource getResourceT
       return result == null ? flyteidl.admin.MatchableResourceOuterClass.MatchableResource.UNRECOGNIZED : result;
     }
 
+    public static final int ORG_FIELD_NUMBER = 5;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the attributes.
+     * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -4277,6 +4690,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (resourceType_ != flyteidl.admin.MatchableResourceOuterClass.MatchableResource.TASK_RESOURCE.getNumber()) { output.writeEnum(4, resourceType_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, org_); + } unknownFields.writeTo(output); } @@ -4299,6 +4715,9 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, resourceType_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -4321,6 +4740,8 @@ public boolean equals(final java.lang.Object obj) { if (!getWorkflow() .equals(other.getWorkflow())) return false; if (resourceType_ != other.resourceType_) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -4340,6 +4761,8 @@ public int hashCode() { hash = (53 * hash) + getWorkflow().hashCode(); hash = (37 * hash) + RESOURCE_TYPE_FIELD_NUMBER; hash = (53 * hash) + resourceType_; + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -4486,6 +4909,8 @@ public Builder clear() { resourceType_ = 0; + org_ = ""; + return this; } @@ -4516,6 +4941,7 @@ public flyteidl.admin.WorkflowAttributesOuterClass.WorkflowAttributesDeleteReque result.domain_ = domain_; result.workflow_ = workflow_; result.resourceType_ = resourceType_; + result.org_ = org_; onBuilt(); return result; } @@ -4579,6 +5005,10 @@ public Builder mergeFrom(flyteidl.admin.WorkflowAttributesOuterClass.WorkflowAtt if (other.resourceType_ != 0) { setResourceTypeValue(other.getResourceTypeValue()); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -4959,6 +5389,95 @@ public Builder clearResourceType() { onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the attributes.
+       * 
+ * + * string org = 5; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -5477,26 +5996,27 @@ public flyteidl.admin.WorkflowAttributesOuterClass.WorkflowAttributesDeleteRespo java.lang.String[] descriptorData = { "\n(flyteidl/admin/workflow_attributes.pro" + "to\022\016flyteidl.admin\032\'flyteidl/admin/match" + - "able_resource.proto\"\210\001\n\022WorkflowAttribut" + + "able_resource.proto\"\225\001\n\022WorkflowAttribut" + "es\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\020\n\010w" + "orkflow\030\003 \001(\t\022?\n\023matching_attributes\030\004 \001" + - "(\0132\".flyteidl.admin.MatchingAttributes\"Y" + - "\n\037WorkflowAttributesUpdateRequest\0226\n\natt" + - "ributes\030\001 \001(\0132\".flyteidl.admin.WorkflowA" + - "ttributes\"\"\n WorkflowAttributesUpdateRes" + - "ponse\"\213\001\n\034WorkflowAttributesGetRequest\022\017" + - "\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\020\n\010workf" + - "low\030\003 \001(\t\0228\n\rresource_type\030\004 \001(\0162!.flyte" + - "idl.admin.MatchableResource\"W\n\035WorkflowA" + - "ttributesGetResponse\0226\n\nattributes\030\001 \001(\013" + - "2\".flyteidl.admin.WorkflowAttributes\"\216\001\n" + - "\037WorkflowAttributesDeleteRequest\022\017\n\007proj" + - "ect\030\001 \001(\t\022\016\n\006domain\030\002 \001(\t\022\020\n\010workflow\030\003 " + - "\001(\t\0228\n\rresource_type\030\004 \001(\0162!.flyteidl.ad" + - "min.MatchableResource\"\"\n WorkflowAttribu" + - "tesDeleteResponseB=Z;github.com/flyteorg" + - "/flyte/flyteidl/gen/pb-go/flyteidl/admin" + - "b\006proto3" + "(\0132\".flyteidl.admin.MatchingAttributes\022\013" + + "\n\003org\030\005 \001(\t\"Y\n\037WorkflowAttributesUpdateR" + + "equest\0226\n\nattributes\030\001 \001(\0132\".flyteidl.ad" + + "min.WorkflowAttributes\"\"\n WorkflowAttrib" + + "utesUpdateResponse\"\230\001\n\034WorkflowAttribute" + + "sGetRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002" + + " \001(\t\022\020\n\010workflow\030\003 \001(\t\0228\n\rresource_type\030" + + "\004 \001(\0162!.flyteidl.admin.MatchableResource" + + "\022\013\n\003org\030\005 \001(\t\"W\n\035WorkflowAttributesGetRe" + + "sponse\0226\n\nattributes\030\001 \001(\0132\".flyteidl.ad" + + "min.WorkflowAttributes\"\233\001\n\037WorkflowAttri" + + "butesDeleteRequest\022\017\n\007project\030\001 \001(\t\022\016\n\006d" + + "omain\030\002 \001(\t\022\020\n\010workflow\030\003 \001(\t\0228\n\rresourc" + + "e_type\030\004 \001(\0162!.flyteidl.admin.MatchableR" + + "esource\022\013\n\003org\030\005 \001(\t\"\"\n WorkflowAttribut" + + "esDeleteResponseB=Z;github.com/flyteorg/" + + "flyte/flyteidl/gen/pb-go/flyteidl/adminb" + + "\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -5516,7 +6036,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_WorkflowAttributes_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_WorkflowAttributes_descriptor, - new java.lang.String[] { "Project", "Domain", "Workflow", "MatchingAttributes", }); + new java.lang.String[] { "Project", "Domain", "Workflow", "MatchingAttributes", "Org", }); internal_static_flyteidl_admin_WorkflowAttributesUpdateRequest_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_flyteidl_admin_WorkflowAttributesUpdateRequest_fieldAccessorTable = new @@ -5534,7 +6054,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_WorkflowAttributesGetRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_WorkflowAttributesGetRequest_descriptor, - new java.lang.String[] { "Project", "Domain", "Workflow", "ResourceType", }); + new java.lang.String[] { "Project", "Domain", "Workflow", "ResourceType", "Org", }); internal_static_flyteidl_admin_WorkflowAttributesGetResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_flyteidl_admin_WorkflowAttributesGetResponse_fieldAccessorTable = new @@ -5546,7 +6066,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_admin_WorkflowAttributesDeleteRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_admin_WorkflowAttributesDeleteRequest_descriptor, - new java.lang.String[] { "Project", "Domain", "Workflow", "ResourceType", }); + new java.lang.String[] { "Project", "Domain", "Workflow", "ResourceType", "Org", }); internal_static_flyteidl_admin_WorkflowAttributesDeleteResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_flyteidl_admin_WorkflowAttributesDeleteResponse_fieldAccessorTable = new diff --git a/flyteidl/gen/pb-java/flyteidl/artifact/Artifacts.java b/flyteidl/gen/pb-java/flyteidl/artifact/Artifacts.java deleted file mode 100644 index 9d3cbf73cb..0000000000 --- a/flyteidl/gen/pb-java/flyteidl/artifact/Artifacts.java +++ /dev/null @@ -1,20259 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: flyteidl/artifact/artifacts.proto - -package flyteidl.artifact; - -public final class Artifacts { - private Artifacts() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistryLite registry) { - } - - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (com.google.protobuf.ExtensionRegistryLite) registry); - } - public interface ArtifactOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.Artifact) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - boolean hasArtifactId(); - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - flyteidl.core.ArtifactId.ArtifactID getArtifactId(); - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder(); - - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - boolean hasSpec(); - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - flyteidl.artifact.Artifacts.ArtifactSpec getSpec(); - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder getSpecOrBuilder(); - - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - java.util.List - getTagsList(); - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - int getTagsCount(); - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - java.lang.String getTags(int index); - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - com.google.protobuf.ByteString - getTagsBytes(int index); - - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - boolean hasSource(); - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - flyteidl.artifact.Artifacts.ArtifactSource getSource(); - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder getSourceOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.Artifact} - */ - public static final class Artifact extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.Artifact) - ArtifactOrBuilder { - private static final long serialVersionUID = 0L; - // Use Artifact.newBuilder() to construct. - private Artifact(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private Artifact() { - tags_ = com.google.protobuf.LazyStringArrayList.EMPTY; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private Artifact( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.ArtifactId.ArtifactID.Builder subBuilder = null; - if (artifactId_ != null) { - subBuilder = artifactId_.toBuilder(); - } - artifactId_ = input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(artifactId_); - artifactId_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - flyteidl.artifact.Artifacts.ArtifactSpec.Builder subBuilder = null; - if (spec_ != null) { - subBuilder = spec_.toBuilder(); - } - spec_ = input.readMessage(flyteidl.artifact.Artifacts.ArtifactSpec.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(spec_); - spec_ = subBuilder.buildPartial(); - } - - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - if (!((mutable_bitField0_ & 0x00000004) != 0)) { - tags_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000004; - } - tags_.add(s); - break; - } - case 34: { - flyteidl.artifact.Artifacts.ArtifactSource.Builder subBuilder = null; - if (source_ != null) { - subBuilder = source_.toBuilder(); - } - source_ = input.readMessage(flyteidl.artifact.Artifacts.ArtifactSource.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(source_); - source_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000004) != 0)) { - tags_ = tags_.getUnmodifiableView(); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_Artifact_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_Artifact_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.Artifact.class, flyteidl.artifact.Artifacts.Artifact.Builder.class); - } - - private int bitField0_; - public static final int ARTIFACT_ID_FIELD_NUMBER = 1; - private flyteidl.core.ArtifactId.ArtifactID artifactId_; - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public boolean hasArtifactId() { - return artifactId_ != null; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { - return artifactId_ == null ? flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance() : artifactId_; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { - return getArtifactId(); - } - - public static final int SPEC_FIELD_NUMBER = 2; - private flyteidl.artifact.Artifacts.ArtifactSpec spec_; - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public boolean hasSpec() { - return spec_ != null; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpec getSpec() { - return spec_ == null ? flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance() : spec_; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder getSpecOrBuilder() { - return getSpec(); - } - - public static final int TAGS_FIELD_NUMBER = 3; - private com.google.protobuf.LazyStringList tags_; - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - public com.google.protobuf.ProtocolStringList - getTagsList() { - return tags_; - } - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - public int getTagsCount() { - return tags_.size(); - } - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - public java.lang.String getTags(int index) { - return tags_.get(index); - } - /** - *
-     * references the tag field in ArtifactTag
-     * 
- * - * repeated string tags = 3; - */ - public com.google.protobuf.ByteString - getTagsBytes(int index) { - return tags_.getByteString(index); - } - - public static final int SOURCE_FIELD_NUMBER = 4; - private flyteidl.artifact.Artifacts.ArtifactSource source_; - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public boolean hasSource() { - return source_ != null; - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public flyteidl.artifact.Artifacts.ArtifactSource getSource() { - return source_ == null ? flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance() : source_; - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder getSourceOrBuilder() { - return getSource(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (artifactId_ != null) { - output.writeMessage(1, getArtifactId()); - } - if (spec_ != null) { - output.writeMessage(2, getSpec()); - } - for (int i = 0; i < tags_.size(); i++) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, tags_.getRaw(i)); - } - if (source_ != null) { - output.writeMessage(4, getSource()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (artifactId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getArtifactId()); - } - if (spec_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getSpec()); - } - { - int dataSize = 0; - for (int i = 0; i < tags_.size(); i++) { - dataSize += computeStringSizeNoTag(tags_.getRaw(i)); - } - size += dataSize; - size += 1 * getTagsList().size(); - } - if (source_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, getSource()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.Artifact)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.Artifact other = (flyteidl.artifact.Artifacts.Artifact) obj; - - if (hasArtifactId() != other.hasArtifactId()) return false; - if (hasArtifactId()) { - if (!getArtifactId() - .equals(other.getArtifactId())) return false; - } - if (hasSpec() != other.hasSpec()) return false; - if (hasSpec()) { - if (!getSpec() - .equals(other.getSpec())) return false; - } - if (!getTagsList() - .equals(other.getTagsList())) return false; - if (hasSource() != other.hasSource()) return false; - if (hasSource()) { - if (!getSource() - .equals(other.getSource())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasArtifactId()) { - hash = (37 * hash) + ARTIFACT_ID_FIELD_NUMBER; - hash = (53 * hash) + getArtifactId().hashCode(); - } - if (hasSpec()) { - hash = (37 * hash) + SPEC_FIELD_NUMBER; - hash = (53 * hash) + getSpec().hashCode(); - } - if (getTagsCount() > 0) { - hash = (37 * hash) + TAGS_FIELD_NUMBER; - hash = (53 * hash) + getTagsList().hashCode(); - } - if (hasSource()) { - hash = (37 * hash) + SOURCE_FIELD_NUMBER; - hash = (53 * hash) + getSource().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.Artifact parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.Artifact parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.Artifact parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.Artifact prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.Artifact} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.Artifact) - flyteidl.artifact.Artifacts.ArtifactOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_Artifact_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_Artifact_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.Artifact.class, flyteidl.artifact.Artifacts.Artifact.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.Artifact.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (artifactIdBuilder_ == null) { - artifactId_ = null; - } else { - artifactId_ = null; - artifactIdBuilder_ = null; - } - if (specBuilder_ == null) { - spec_ = null; - } else { - spec_ = null; - specBuilder_ = null; - } - tags_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - if (sourceBuilder_ == null) { - source_ = null; - } else { - source_ = null; - sourceBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_Artifact_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.Artifact getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.Artifact.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.Artifact build() { - flyteidl.artifact.Artifacts.Artifact result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.Artifact buildPartial() { - flyteidl.artifact.Artifacts.Artifact result = new flyteidl.artifact.Artifacts.Artifact(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (artifactIdBuilder_ == null) { - result.artifactId_ = artifactId_; - } else { - result.artifactId_ = artifactIdBuilder_.build(); - } - if (specBuilder_ == null) { - result.spec_ = spec_; - } else { - result.spec_ = specBuilder_.build(); - } - if (((bitField0_ & 0x00000004) != 0)) { - tags_ = tags_.getUnmodifiableView(); - bitField0_ = (bitField0_ & ~0x00000004); - } - result.tags_ = tags_; - if (sourceBuilder_ == null) { - result.source_ = source_; - } else { - result.source_ = sourceBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.Artifact) { - return mergeFrom((flyteidl.artifact.Artifacts.Artifact)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.Artifact other) { - if (other == flyteidl.artifact.Artifacts.Artifact.getDefaultInstance()) return this; - if (other.hasArtifactId()) { - mergeArtifactId(other.getArtifactId()); - } - if (other.hasSpec()) { - mergeSpec(other.getSpec()); - } - if (!other.tags_.isEmpty()) { - if (tags_.isEmpty()) { - tags_ = other.tags_; - bitField0_ = (bitField0_ & ~0x00000004); - } else { - ensureTagsIsMutable(); - tags_.addAll(other.tags_); - } - onChanged(); - } - if (other.hasSource()) { - mergeSource(other.getSource()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.Artifact parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.Artifact) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private flyteidl.core.ArtifactId.ArtifactID artifactId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> artifactIdBuilder_; - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public boolean hasArtifactId() { - return artifactIdBuilder_ != null || artifactId_ != null; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { - if (artifactIdBuilder_ == null) { - return artifactId_ == null ? flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance() : artifactId_; - } else { - return artifactIdBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder setArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { - if (artifactIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - artifactId_ = value; - onChanged(); - } else { - artifactIdBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder setArtifactId( - flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (artifactIdBuilder_ == null) { - artifactId_ = builderForValue.build(); - onChanged(); - } else { - artifactIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder mergeArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { - if (artifactIdBuilder_ == null) { - if (artifactId_ != null) { - artifactId_ = - flyteidl.core.ArtifactId.ArtifactID.newBuilder(artifactId_).mergeFrom(value).buildPartial(); - } else { - artifactId_ = value; - } - onChanged(); - } else { - artifactIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder clearArtifactId() { - if (artifactIdBuilder_ == null) { - artifactId_ = null; - onChanged(); - } else { - artifactId_ = null; - artifactIdBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdBuilder() { - - onChanged(); - return getArtifactIdFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { - if (artifactIdBuilder_ != null) { - return artifactIdBuilder_.getMessageOrBuilder(); - } else { - return artifactId_ == null ? - flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance() : artifactId_; - } - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> - getArtifactIdFieldBuilder() { - if (artifactIdBuilder_ == null) { - artifactIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( - getArtifactId(), - getParentForChildren(), - isClean()); - artifactId_ = null; - } - return artifactIdBuilder_; - } - - private flyteidl.artifact.Artifacts.ArtifactSpec spec_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSpec, flyteidl.artifact.Artifacts.ArtifactSpec.Builder, flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder> specBuilder_; - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public boolean hasSpec() { - return specBuilder_ != null || spec_ != null; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpec getSpec() { - if (specBuilder_ == null) { - return spec_ == null ? flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance() : spec_; - } else { - return specBuilder_.getMessage(); - } - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder setSpec(flyteidl.artifact.Artifacts.ArtifactSpec value) { - if (specBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - spec_ = value; - onChanged(); - } else { - specBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder setSpec( - flyteidl.artifact.Artifacts.ArtifactSpec.Builder builderForValue) { - if (specBuilder_ == null) { - spec_ = builderForValue.build(); - onChanged(); - } else { - specBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder mergeSpec(flyteidl.artifact.Artifacts.ArtifactSpec value) { - if (specBuilder_ == null) { - if (spec_ != null) { - spec_ = - flyteidl.artifact.Artifacts.ArtifactSpec.newBuilder(spec_).mergeFrom(value).buildPartial(); - } else { - spec_ = value; - } - onChanged(); - } else { - specBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder clearSpec() { - if (specBuilder_ == null) { - spec_ = null; - onChanged(); - } else { - spec_ = null; - specBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpec.Builder getSpecBuilder() { - - onChanged(); - return getSpecFieldBuilder().getBuilder(); - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder getSpecOrBuilder() { - if (specBuilder_ != null) { - return specBuilder_.getMessageOrBuilder(); - } else { - return spec_ == null ? - flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance() : spec_; - } - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSpec, flyteidl.artifact.Artifacts.ArtifactSpec.Builder, flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder> - getSpecFieldBuilder() { - if (specBuilder_ == null) { - specBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSpec, flyteidl.artifact.Artifacts.ArtifactSpec.Builder, flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder>( - getSpec(), - getParentForChildren(), - isClean()); - spec_ = null; - } - return specBuilder_; - } - - private com.google.protobuf.LazyStringList tags_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureTagsIsMutable() { - if (!((bitField0_ & 0x00000004) != 0)) { - tags_ = new com.google.protobuf.LazyStringArrayList(tags_); - bitField0_ |= 0x00000004; - } - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public com.google.protobuf.ProtocolStringList - getTagsList() { - return tags_.getUnmodifiableView(); - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public int getTagsCount() { - return tags_.size(); - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public java.lang.String getTags(int index) { - return tags_.get(index); - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public com.google.protobuf.ByteString - getTagsBytes(int index) { - return tags_.getByteString(index); - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public Builder setTags( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureTagsIsMutable(); - tags_.set(index, value); - onChanged(); - return this; - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public Builder addTags( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureTagsIsMutable(); - tags_.add(value); - onChanged(); - return this; - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public Builder addAllTags( - java.lang.Iterable values) { - ensureTagsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, tags_); - onChanged(); - return this; - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public Builder clearTags() { - tags_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - onChanged(); - return this; - } - /** - *
-       * references the tag field in ArtifactTag
-       * 
- * - * repeated string tags = 3; - */ - public Builder addTagsBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - ensureTagsIsMutable(); - tags_.add(value); - onChanged(); - return this; - } - - private flyteidl.artifact.Artifacts.ArtifactSource source_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSource, flyteidl.artifact.Artifacts.ArtifactSource.Builder, flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder> sourceBuilder_; - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public boolean hasSource() { - return sourceBuilder_ != null || source_ != null; - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public flyteidl.artifact.Artifacts.ArtifactSource getSource() { - if (sourceBuilder_ == null) { - return source_ == null ? flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance() : source_; - } else { - return sourceBuilder_.getMessage(); - } - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public Builder setSource(flyteidl.artifact.Artifacts.ArtifactSource value) { - if (sourceBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - source_ = value; - onChanged(); - } else { - sourceBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public Builder setSource( - flyteidl.artifact.Artifacts.ArtifactSource.Builder builderForValue) { - if (sourceBuilder_ == null) { - source_ = builderForValue.build(); - onChanged(); - } else { - sourceBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public Builder mergeSource(flyteidl.artifact.Artifacts.ArtifactSource value) { - if (sourceBuilder_ == null) { - if (source_ != null) { - source_ = - flyteidl.artifact.Artifacts.ArtifactSource.newBuilder(source_).mergeFrom(value).buildPartial(); - } else { - source_ = value; - } - onChanged(); - } else { - sourceBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public Builder clearSource() { - if (sourceBuilder_ == null) { - source_ = null; - onChanged(); - } else { - source_ = null; - sourceBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public flyteidl.artifact.Artifacts.ArtifactSource.Builder getSourceBuilder() { - - onChanged(); - return getSourceFieldBuilder().getBuilder(); - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - public flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder getSourceOrBuilder() { - if (sourceBuilder_ != null) { - return sourceBuilder_.getMessageOrBuilder(); - } else { - return source_ == null ? - flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance() : source_; - } - } - /** - * .flyteidl.artifact.ArtifactSource source = 4; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSource, flyteidl.artifact.Artifacts.ArtifactSource.Builder, flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder> - getSourceFieldBuilder() { - if (sourceBuilder_ == null) { - sourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSource, flyteidl.artifact.Artifacts.ArtifactSource.Builder, flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder>( - getSource(), - getParentForChildren(), - isClean()); - source_ = null; - } - return sourceBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.Artifact) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.Artifact) - private static final flyteidl.artifact.Artifacts.Artifact DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.Artifact(); - } - - public static flyteidl.artifact.Artifacts.Artifact getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public Artifact parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new Artifact(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.Artifact getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface CreateArtifactRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.CreateArtifactRequest) - com.google.protobuf.MessageOrBuilder { - - /** - *
-     * Specify just project/domain on creation
-     * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - boolean hasArtifactKey(); - /** - *
-     * Specify just project/domain on creation
-     * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - flyteidl.core.ArtifactId.ArtifactKey getArtifactKey(); - /** - *
-     * Specify just project/domain on creation
-     * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder(); - - /** - * string version = 3; - */ - java.lang.String getVersion(); - /** - * string version = 3; - */ - com.google.protobuf.ByteString - getVersionBytes(); - - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - boolean hasSpec(); - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - flyteidl.artifact.Artifacts.ArtifactSpec getSpec(); - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder getSpecOrBuilder(); - - /** - * map<string, string> partitions = 4; - */ - int getPartitionsCount(); - /** - * map<string, string> partitions = 4; - */ - boolean containsPartitions( - java.lang.String key); - /** - * Use {@link #getPartitionsMap()} instead. - */ - @java.lang.Deprecated - java.util.Map - getPartitions(); - /** - * map<string, string> partitions = 4; - */ - java.util.Map - getPartitionsMap(); - /** - * map<string, string> partitions = 4; - */ - - java.lang.String getPartitionsOrDefault( - java.lang.String key, - java.lang.String defaultValue); - /** - * map<string, string> partitions = 4; - */ - - java.lang.String getPartitionsOrThrow( - java.lang.String key); - - /** - * string tag = 5; - */ - java.lang.String getTag(); - /** - * string tag = 5; - */ - com.google.protobuf.ByteString - getTagBytes(); - - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - boolean hasSource(); - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - flyteidl.artifact.Artifacts.ArtifactSource getSource(); - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder getSourceOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.CreateArtifactRequest} - */ - public static final class CreateArtifactRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.CreateArtifactRequest) - CreateArtifactRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use CreateArtifactRequest.newBuilder() to construct. - private CreateArtifactRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private CreateArtifactRequest() { - version_ = ""; - tag_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CreateArtifactRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.ArtifactId.ArtifactKey.Builder subBuilder = null; - if (artifactKey_ != null) { - subBuilder = artifactKey_.toBuilder(); - } - artifactKey_ = input.readMessage(flyteidl.core.ArtifactId.ArtifactKey.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(artifactKey_); - artifactKey_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - flyteidl.artifact.Artifacts.ArtifactSpec.Builder subBuilder = null; - if (spec_ != null) { - subBuilder = spec_.toBuilder(); - } - spec_ = input.readMessage(flyteidl.artifact.Artifacts.ArtifactSpec.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(spec_); - spec_ = subBuilder.buildPartial(); - } - - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - - version_ = s; - break; - } - case 34: { - if (!((mutable_bitField0_ & 0x00000008) != 0)) { - partitions_ = com.google.protobuf.MapField.newMapField( - PartitionsDefaultEntryHolder.defaultEntry); - mutable_bitField0_ |= 0x00000008; - } - com.google.protobuf.MapEntry - partitions__ = input.readMessage( - PartitionsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); - partitions_.getMutableMap().put( - partitions__.getKey(), partitions__.getValue()); - break; - } - case 42: { - java.lang.String s = input.readStringRequireUtf8(); - - tag_ = s; - break; - } - case 50: { - flyteidl.artifact.Artifacts.ArtifactSource.Builder subBuilder = null; - if (source_ != null) { - subBuilder = source_.toBuilder(); - } - source_ = input.readMessage(flyteidl.artifact.Artifacts.ArtifactSource.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(source_); - source_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactRequest_descriptor; - } - - @SuppressWarnings({"rawtypes"}) - @java.lang.Override - protected com.google.protobuf.MapField internalGetMapField( - int number) { - switch (number) { - case 4: - return internalGetPartitions(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateArtifactRequest.class, flyteidl.artifact.Artifacts.CreateArtifactRequest.Builder.class); - } - - private int bitField0_; - public static final int ARTIFACT_KEY_FIELD_NUMBER = 1; - private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; - /** - *
-     * Specify just project/domain on creation
-     * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public boolean hasArtifactKey() { - return artifactKey_ != null; - } - /** - *
-     * Specify just project/domain on creation
-     * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { - return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } - /** - *
-     * Specify just project/domain on creation
-     * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { - return getArtifactKey(); - } - - public static final int VERSION_FIELD_NUMBER = 3; - private volatile java.lang.Object version_; - /** - * string version = 3; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } - } - /** - * string version = 3; - */ - public com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int SPEC_FIELD_NUMBER = 2; - private flyteidl.artifact.Artifacts.ArtifactSpec spec_; - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public boolean hasSpec() { - return spec_ != null; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpec getSpec() { - return spec_ == null ? flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance() : spec_; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder getSpecOrBuilder() { - return getSpec(); - } - - public static final int PARTITIONS_FIELD_NUMBER = 4; - private static final class PartitionsDefaultEntryHolder { - static final com.google.protobuf.MapEntry< - java.lang.String, java.lang.String> defaultEntry = - com.google.protobuf.MapEntry - .newDefaultInstance( - flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactRequest_PartitionsEntry_descriptor, - com.google.protobuf.WireFormat.FieldType.STRING, - "", - com.google.protobuf.WireFormat.FieldType.STRING, - ""); - } - private com.google.protobuf.MapField< - java.lang.String, java.lang.String> partitions_; - private com.google.protobuf.MapField - internalGetPartitions() { - if (partitions_ == null) { - return com.google.protobuf.MapField.emptyMapField( - PartitionsDefaultEntryHolder.defaultEntry); - } - return partitions_; - } - - public int getPartitionsCount() { - return internalGetPartitions().getMap().size(); - } - /** - * map<string, string> partitions = 4; - */ - - public boolean containsPartitions( - java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } - return internalGetPartitions().getMap().containsKey(key); - } - /** - * Use {@link #getPartitionsMap()} instead. - */ - @java.lang.Deprecated - public java.util.Map getPartitions() { - return getPartitionsMap(); - } - /** - * map<string, string> partitions = 4; - */ - - public java.util.Map getPartitionsMap() { - return internalGetPartitions().getMap(); - } - /** - * map<string, string> partitions = 4; - */ - - public java.lang.String getPartitionsOrDefault( - java.lang.String key, - java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } - java.util.Map map = - internalGetPartitions().getMap(); - return map.containsKey(key) ? map.get(key) : defaultValue; - } - /** - * map<string, string> partitions = 4; - */ - - public java.lang.String getPartitionsOrThrow( - java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } - java.util.Map map = - internalGetPartitions().getMap(); - if (!map.containsKey(key)) { - throw new java.lang.IllegalArgumentException(); - } - return map.get(key); - } - - public static final int TAG_FIELD_NUMBER = 5; - private volatile java.lang.Object tag_; - /** - * string tag = 5; - */ - public java.lang.String getTag() { - java.lang.Object ref = tag_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - tag_ = s; - return s; - } - } - /** - * string tag = 5; - */ - public com.google.protobuf.ByteString - getTagBytes() { - java.lang.Object ref = tag_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - tag_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int SOURCE_FIELD_NUMBER = 6; - private flyteidl.artifact.Artifacts.ArtifactSource source_; - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public boolean hasSource() { - return source_ != null; - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public flyteidl.artifact.Artifacts.ArtifactSource getSource() { - return source_ == null ? flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance() : source_; - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder getSourceOrBuilder() { - return getSource(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (artifactKey_ != null) { - output.writeMessage(1, getArtifactKey()); - } - if (spec_ != null) { - output.writeMessage(2, getSpec()); - } - if (!getVersionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, version_); - } - com.google.protobuf.GeneratedMessageV3 - .serializeStringMapTo( - output, - internalGetPartitions(), - PartitionsDefaultEntryHolder.defaultEntry, - 4); - if (!getTagBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 5, tag_); - } - if (source_ != null) { - output.writeMessage(6, getSource()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (artifactKey_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getArtifactKey()); - } - if (spec_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getSpec()); - } - if (!getVersionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, version_); - } - for (java.util.Map.Entry entry - : internalGetPartitions().getMap().entrySet()) { - com.google.protobuf.MapEntry - partitions__ = PartitionsDefaultEntryHolder.defaultEntry.newBuilderForType() - .setKey(entry.getKey()) - .setValue(entry.getValue()) - .build(); - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, partitions__); - } - if (!getTagBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, tag_); - } - if (source_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, getSource()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.CreateArtifactRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.CreateArtifactRequest other = (flyteidl.artifact.Artifacts.CreateArtifactRequest) obj; - - if (hasArtifactKey() != other.hasArtifactKey()) return false; - if (hasArtifactKey()) { - if (!getArtifactKey() - .equals(other.getArtifactKey())) return false; - } - if (!getVersion() - .equals(other.getVersion())) return false; - if (hasSpec() != other.hasSpec()) return false; - if (hasSpec()) { - if (!getSpec() - .equals(other.getSpec())) return false; - } - if (!internalGetPartitions().equals( - other.internalGetPartitions())) return false; - if (!getTag() - .equals(other.getTag())) return false; - if (hasSource() != other.hasSource()) return false; - if (hasSource()) { - if (!getSource() - .equals(other.getSource())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasArtifactKey()) { - hash = (37 * hash) + ARTIFACT_KEY_FIELD_NUMBER; - hash = (53 * hash) + getArtifactKey().hashCode(); - } - hash = (37 * hash) + VERSION_FIELD_NUMBER; - hash = (53 * hash) + getVersion().hashCode(); - if (hasSpec()) { - hash = (37 * hash) + SPEC_FIELD_NUMBER; - hash = (53 * hash) + getSpec().hashCode(); - } - if (!internalGetPartitions().getMap().isEmpty()) { - hash = (37 * hash) + PARTITIONS_FIELD_NUMBER; - hash = (53 * hash) + internalGetPartitions().hashCode(); - } - hash = (37 * hash) + TAG_FIELD_NUMBER; - hash = (53 * hash) + getTag().hashCode(); - if (hasSource()) { - hash = (37 * hash) + SOURCE_FIELD_NUMBER; - hash = (53 * hash) + getSource().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateArtifactRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.CreateArtifactRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.CreateArtifactRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.CreateArtifactRequest) - flyteidl.artifact.Artifacts.CreateArtifactRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactRequest_descriptor; - } - - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMapField( - int number) { - switch (number) { - case 4: - return internalGetPartitions(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - @SuppressWarnings({"rawtypes"}) - protected com.google.protobuf.MapField internalGetMutableMapField( - int number) { - switch (number) { - case 4: - return internalGetMutablePartitions(); - default: - throw new RuntimeException( - "Invalid map field number: " + number); - } - } - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateArtifactRequest.class, flyteidl.artifact.Artifacts.CreateArtifactRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.CreateArtifactRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (artifactKeyBuilder_ == null) { - artifactKey_ = null; - } else { - artifactKey_ = null; - artifactKeyBuilder_ = null; - } - version_ = ""; - - if (specBuilder_ == null) { - spec_ = null; - } else { - spec_ = null; - specBuilder_ = null; - } - internalGetMutablePartitions().clear(); - tag_ = ""; - - if (sourceBuilder_ == null) { - source_ = null; - } else { - source_ = null; - sourceBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.CreateArtifactRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactRequest build() { - flyteidl.artifact.Artifacts.CreateArtifactRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactRequest buildPartial() { - flyteidl.artifact.Artifacts.CreateArtifactRequest result = new flyteidl.artifact.Artifacts.CreateArtifactRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (artifactKeyBuilder_ == null) { - result.artifactKey_ = artifactKey_; - } else { - result.artifactKey_ = artifactKeyBuilder_.build(); - } - result.version_ = version_; - if (specBuilder_ == null) { - result.spec_ = spec_; - } else { - result.spec_ = specBuilder_.build(); - } - result.partitions_ = internalGetPartitions(); - result.partitions_.makeImmutable(); - result.tag_ = tag_; - if (sourceBuilder_ == null) { - result.source_ = source_; - } else { - result.source_ = sourceBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.CreateArtifactRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.CreateArtifactRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.CreateArtifactRequest other) { - if (other == flyteidl.artifact.Artifacts.CreateArtifactRequest.getDefaultInstance()) return this; - if (other.hasArtifactKey()) { - mergeArtifactKey(other.getArtifactKey()); - } - if (!other.getVersion().isEmpty()) { - version_ = other.version_; - onChanged(); - } - if (other.hasSpec()) { - mergeSpec(other.getSpec()); - } - internalGetMutablePartitions().mergeFrom( - other.internalGetPartitions()); - if (!other.getTag().isEmpty()) { - tag_ = other.tag_; - onChanged(); - } - if (other.hasSource()) { - mergeSource(other.getSource()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.CreateArtifactRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.CreateArtifactRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> artifactKeyBuilder_; - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public boolean hasArtifactKey() { - return artifactKeyBuilder_ != null || artifactKey_ != null; - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { - if (artifactKeyBuilder_ == null) { - return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } else { - return artifactKeyBuilder_.getMessage(); - } - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder setArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { - if (artifactKeyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - artifactKey_ = value; - onChanged(); - } else { - artifactKeyBuilder_.setMessage(value); - } - - return this; - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder setArtifactKey( - flyteidl.core.ArtifactId.ArtifactKey.Builder builderForValue) { - if (artifactKeyBuilder_ == null) { - artifactKey_ = builderForValue.build(); - onChanged(); - } else { - artifactKeyBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder mergeArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { - if (artifactKeyBuilder_ == null) { - if (artifactKey_ != null) { - artifactKey_ = - flyteidl.core.ArtifactId.ArtifactKey.newBuilder(artifactKey_).mergeFrom(value).buildPartial(); - } else { - artifactKey_ = value; - } - onChanged(); - } else { - artifactKeyBuilder_.mergeFrom(value); - } - - return this; - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder clearArtifactKey() { - if (artifactKeyBuilder_ == null) { - artifactKey_ = null; - onChanged(); - } else { - artifactKey_ = null; - artifactKeyBuilder_ = null; - } - - return this; - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey.Builder getArtifactKeyBuilder() { - - onChanged(); - return getArtifactKeyFieldBuilder().getBuilder(); - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { - if (artifactKeyBuilder_ != null) { - return artifactKeyBuilder_.getMessageOrBuilder(); - } else { - return artifactKey_ == null ? - flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } - } - /** - *
-       * Specify just project/domain on creation
-       * 
- * - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> - getArtifactKeyFieldBuilder() { - if (artifactKeyBuilder_ == null) { - artifactKeyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder>( - getArtifactKey(), - getParentForChildren(), - isClean()); - artifactKey_ = null; - } - return artifactKeyBuilder_; - } - - private java.lang.Object version_ = ""; - /** - * string version = 3; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string version = 3; - */ - public com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string version = 3; - */ - public Builder setVersion( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - version_ = value; - onChanged(); - return this; - } - /** - * string version = 3; - */ - public Builder clearVersion() { - - version_ = getDefaultInstance().getVersion(); - onChanged(); - return this; - } - /** - * string version = 3; - */ - public Builder setVersionBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - version_ = value; - onChanged(); - return this; - } - - private flyteidl.artifact.Artifacts.ArtifactSpec spec_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSpec, flyteidl.artifact.Artifacts.ArtifactSpec.Builder, flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder> specBuilder_; - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public boolean hasSpec() { - return specBuilder_ != null || spec_ != null; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpec getSpec() { - if (specBuilder_ == null) { - return spec_ == null ? flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance() : spec_; - } else { - return specBuilder_.getMessage(); - } - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder setSpec(flyteidl.artifact.Artifacts.ArtifactSpec value) { - if (specBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - spec_ = value; - onChanged(); - } else { - specBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder setSpec( - flyteidl.artifact.Artifacts.ArtifactSpec.Builder builderForValue) { - if (specBuilder_ == null) { - spec_ = builderForValue.build(); - onChanged(); - } else { - specBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder mergeSpec(flyteidl.artifact.Artifacts.ArtifactSpec value) { - if (specBuilder_ == null) { - if (spec_ != null) { - spec_ = - flyteidl.artifact.Artifacts.ArtifactSpec.newBuilder(spec_).mergeFrom(value).buildPartial(); - } else { - spec_ = value; - } - onChanged(); - } else { - specBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public Builder clearSpec() { - if (specBuilder_ == null) { - spec_ = null; - onChanged(); - } else { - spec_ = null; - specBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpec.Builder getSpecBuilder() { - - onChanged(); - return getSpecFieldBuilder().getBuilder(); - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - public flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder getSpecOrBuilder() { - if (specBuilder_ != null) { - return specBuilder_.getMessageOrBuilder(); - } else { - return spec_ == null ? - flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance() : spec_; - } - } - /** - * .flyteidl.artifact.ArtifactSpec spec = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSpec, flyteidl.artifact.Artifacts.ArtifactSpec.Builder, flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder> - getSpecFieldBuilder() { - if (specBuilder_ == null) { - specBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSpec, flyteidl.artifact.Artifacts.ArtifactSpec.Builder, flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder>( - getSpec(), - getParentForChildren(), - isClean()); - spec_ = null; - } - return specBuilder_; - } - - private com.google.protobuf.MapField< - java.lang.String, java.lang.String> partitions_; - private com.google.protobuf.MapField - internalGetPartitions() { - if (partitions_ == null) { - return com.google.protobuf.MapField.emptyMapField( - PartitionsDefaultEntryHolder.defaultEntry); - } - return partitions_; - } - private com.google.protobuf.MapField - internalGetMutablePartitions() { - onChanged();; - if (partitions_ == null) { - partitions_ = com.google.protobuf.MapField.newMapField( - PartitionsDefaultEntryHolder.defaultEntry); - } - if (!partitions_.isMutable()) { - partitions_ = partitions_.copy(); - } - return partitions_; - } - - public int getPartitionsCount() { - return internalGetPartitions().getMap().size(); - } - /** - * map<string, string> partitions = 4; - */ - - public boolean containsPartitions( - java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } - return internalGetPartitions().getMap().containsKey(key); - } - /** - * Use {@link #getPartitionsMap()} instead. - */ - @java.lang.Deprecated - public java.util.Map getPartitions() { - return getPartitionsMap(); - } - /** - * map<string, string> partitions = 4; - */ - - public java.util.Map getPartitionsMap() { - return internalGetPartitions().getMap(); - } - /** - * map<string, string> partitions = 4; - */ - - public java.lang.String getPartitionsOrDefault( - java.lang.String key, - java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } - java.util.Map map = - internalGetPartitions().getMap(); - return map.containsKey(key) ? map.get(key) : defaultValue; - } - /** - * map<string, string> partitions = 4; - */ - - public java.lang.String getPartitionsOrThrow( - java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } - java.util.Map map = - internalGetPartitions().getMap(); - if (!map.containsKey(key)) { - throw new java.lang.IllegalArgumentException(); - } - return map.get(key); - } - - public Builder clearPartitions() { - internalGetMutablePartitions().getMutableMap() - .clear(); - return this; - } - /** - * map<string, string> partitions = 4; - */ - - public Builder removePartitions( - java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } - internalGetMutablePartitions().getMutableMap() - .remove(key); - return this; - } - /** - * Use alternate mutation accessors instead. - */ - @java.lang.Deprecated - public java.util.Map - getMutablePartitions() { - return internalGetMutablePartitions().getMutableMap(); - } - /** - * map<string, string> partitions = 4; - */ - public Builder putPartitions( - java.lang.String key, - java.lang.String value) { - if (key == null) { throw new java.lang.NullPointerException(); } - if (value == null) { throw new java.lang.NullPointerException(); } - internalGetMutablePartitions().getMutableMap() - .put(key, value); - return this; - } - /** - * map<string, string> partitions = 4; - */ - - public Builder putAllPartitions( - java.util.Map values) { - internalGetMutablePartitions().getMutableMap() - .putAll(values); - return this; - } - - private java.lang.Object tag_ = ""; - /** - * string tag = 5; - */ - public java.lang.String getTag() { - java.lang.Object ref = tag_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - tag_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string tag = 5; - */ - public com.google.protobuf.ByteString - getTagBytes() { - java.lang.Object ref = tag_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - tag_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string tag = 5; - */ - public Builder setTag( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - tag_ = value; - onChanged(); - return this; - } - /** - * string tag = 5; - */ - public Builder clearTag() { - - tag_ = getDefaultInstance().getTag(); - onChanged(); - return this; - } - /** - * string tag = 5; - */ - public Builder setTagBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - tag_ = value; - onChanged(); - return this; - } - - private flyteidl.artifact.Artifacts.ArtifactSource source_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSource, flyteidl.artifact.Artifacts.ArtifactSource.Builder, flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder> sourceBuilder_; - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public boolean hasSource() { - return sourceBuilder_ != null || source_ != null; - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public flyteidl.artifact.Artifacts.ArtifactSource getSource() { - if (sourceBuilder_ == null) { - return source_ == null ? flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance() : source_; - } else { - return sourceBuilder_.getMessage(); - } - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public Builder setSource(flyteidl.artifact.Artifacts.ArtifactSource value) { - if (sourceBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - source_ = value; - onChanged(); - } else { - sourceBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public Builder setSource( - flyteidl.artifact.Artifacts.ArtifactSource.Builder builderForValue) { - if (sourceBuilder_ == null) { - source_ = builderForValue.build(); - onChanged(); - } else { - sourceBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public Builder mergeSource(flyteidl.artifact.Artifacts.ArtifactSource value) { - if (sourceBuilder_ == null) { - if (source_ != null) { - source_ = - flyteidl.artifact.Artifacts.ArtifactSource.newBuilder(source_).mergeFrom(value).buildPartial(); - } else { - source_ = value; - } - onChanged(); - } else { - sourceBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public Builder clearSource() { - if (sourceBuilder_ == null) { - source_ = null; - onChanged(); - } else { - source_ = null; - sourceBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public flyteidl.artifact.Artifacts.ArtifactSource.Builder getSourceBuilder() { - - onChanged(); - return getSourceFieldBuilder().getBuilder(); - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - public flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder getSourceOrBuilder() { - if (sourceBuilder_ != null) { - return sourceBuilder_.getMessageOrBuilder(); - } else { - return source_ == null ? - flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance() : source_; - } - } - /** - * .flyteidl.artifact.ArtifactSource source = 6; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSource, flyteidl.artifact.Artifacts.ArtifactSource.Builder, flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder> - getSourceFieldBuilder() { - if (sourceBuilder_ == null) { - sourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactSource, flyteidl.artifact.Artifacts.ArtifactSource.Builder, flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder>( - getSource(), - getParentForChildren(), - isClean()); - source_ = null; - } - return sourceBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.CreateArtifactRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateArtifactRequest) - private static final flyteidl.artifact.Artifacts.CreateArtifactRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.CreateArtifactRequest(); - } - - public static flyteidl.artifact.Artifacts.CreateArtifactRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public CreateArtifactRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateArtifactRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface ArtifactSourceOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.ArtifactSource) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - boolean hasWorkflowExecution(); - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getWorkflowExecution(); - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getWorkflowExecutionOrBuilder(); - - /** - * string node_id = 2; - */ - java.lang.String getNodeId(); - /** - * string node_id = 2; - */ - com.google.protobuf.ByteString - getNodeIdBytes(); - - /** - * .flyteidl.core.Identifier task_id = 3; - */ - boolean hasTaskId(); - /** - * .flyteidl.core.Identifier task_id = 3; - */ - flyteidl.core.IdentifierOuterClass.Identifier getTaskId(); - /** - * .flyteidl.core.Identifier task_id = 3; - */ - flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTaskIdOrBuilder(); - - /** - * uint32 retry_attempt = 4; - */ - int getRetryAttempt(); - - /** - *
-     * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-     * 
- * - * string principal = 5; - */ - java.lang.String getPrincipal(); - /** - *
-     * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-     * 
- * - * string principal = 5; - */ - com.google.protobuf.ByteString - getPrincipalBytes(); - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactSource} - */ - public static final class ArtifactSource extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.ArtifactSource) - ArtifactSourceOrBuilder { - private static final long serialVersionUID = 0L; - // Use ArtifactSource.newBuilder() to construct. - private ArtifactSource(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ArtifactSource() { - nodeId_ = ""; - principal_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ArtifactSource( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder subBuilder = null; - if (workflowExecution_ != null) { - subBuilder = workflowExecution_.toBuilder(); - } - workflowExecution_ = input.readMessage(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(workflowExecution_); - workflowExecution_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - nodeId_ = s; - break; - } - case 26: { - flyteidl.core.IdentifierOuterClass.Identifier.Builder subBuilder = null; - if (taskId_ != null) { - subBuilder = taskId_.toBuilder(); - } - taskId_ = input.readMessage(flyteidl.core.IdentifierOuterClass.Identifier.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(taskId_); - taskId_ = subBuilder.buildPartial(); - } - - break; - } - case 32: { - - retryAttempt_ = input.readUInt32(); - break; - } - case 42: { - java.lang.String s = input.readStringRequireUtf8(); - - principal_ = s; - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSource_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSource_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactSource.class, flyteidl.artifact.Artifacts.ArtifactSource.Builder.class); - } - - public static final int WORKFLOW_EXECUTION_FIELD_NUMBER = 1; - private flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier workflowExecution_; - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public boolean hasWorkflowExecution() { - return workflowExecution_ != null; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getWorkflowExecution() { - return workflowExecution_ == null ? flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : workflowExecution_; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getWorkflowExecutionOrBuilder() { - return getWorkflowExecution(); - } - - public static final int NODE_ID_FIELD_NUMBER = 2; - private volatile java.lang.Object nodeId_; - /** - * string node_id = 2; - */ - public java.lang.String getNodeId() { - java.lang.Object ref = nodeId_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nodeId_ = s; - return s; - } - } - /** - * string node_id = 2; - */ - public com.google.protobuf.ByteString - getNodeIdBytes() { - java.lang.Object ref = nodeId_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nodeId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int TASK_ID_FIELD_NUMBER = 3; - private flyteidl.core.IdentifierOuterClass.Identifier taskId_; - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public boolean hasTaskId() { - return taskId_ != null; - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getTaskId() { - return taskId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : taskId_; - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTaskIdOrBuilder() { - return getTaskId(); - } - - public static final int RETRY_ATTEMPT_FIELD_NUMBER = 4; - private int retryAttempt_; - /** - * uint32 retry_attempt = 4; - */ - public int getRetryAttempt() { - return retryAttempt_; - } - - public static final int PRINCIPAL_FIELD_NUMBER = 5; - private volatile java.lang.Object principal_; - /** - *
-     * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-     * 
- * - * string principal = 5; - */ - public java.lang.String getPrincipal() { - java.lang.Object ref = principal_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - principal_ = s; - return s; - } - } - /** - *
-     * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-     * 
- * - * string principal = 5; - */ - public com.google.protobuf.ByteString - getPrincipalBytes() { - java.lang.Object ref = principal_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - principal_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (workflowExecution_ != null) { - output.writeMessage(1, getWorkflowExecution()); - } - if (!getNodeIdBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nodeId_); - } - if (taskId_ != null) { - output.writeMessage(3, getTaskId()); - } - if (retryAttempt_ != 0) { - output.writeUInt32(4, retryAttempt_); - } - if (!getPrincipalBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 5, principal_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (workflowExecution_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getWorkflowExecution()); - } - if (!getNodeIdBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nodeId_); - } - if (taskId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, getTaskId()); - } - if (retryAttempt_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(4, retryAttempt_); - } - if (!getPrincipalBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, principal_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.ArtifactSource)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.ArtifactSource other = (flyteidl.artifact.Artifacts.ArtifactSource) obj; - - if (hasWorkflowExecution() != other.hasWorkflowExecution()) return false; - if (hasWorkflowExecution()) { - if (!getWorkflowExecution() - .equals(other.getWorkflowExecution())) return false; - } - if (!getNodeId() - .equals(other.getNodeId())) return false; - if (hasTaskId() != other.hasTaskId()) return false; - if (hasTaskId()) { - if (!getTaskId() - .equals(other.getTaskId())) return false; - } - if (getRetryAttempt() - != other.getRetryAttempt()) return false; - if (!getPrincipal() - .equals(other.getPrincipal())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasWorkflowExecution()) { - hash = (37 * hash) + WORKFLOW_EXECUTION_FIELD_NUMBER; - hash = (53 * hash) + getWorkflowExecution().hashCode(); - } - hash = (37 * hash) + NODE_ID_FIELD_NUMBER; - hash = (53 * hash) + getNodeId().hashCode(); - if (hasTaskId()) { - hash = (37 * hash) + TASK_ID_FIELD_NUMBER; - hash = (53 * hash) + getTaskId().hashCode(); - } - hash = (37 * hash) + RETRY_ATTEMPT_FIELD_NUMBER; - hash = (53 * hash) + getRetryAttempt(); - hash = (37 * hash) + PRINCIPAL_FIELD_NUMBER; - hash = (53 * hash) + getPrincipal().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactSource parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.ArtifactSource prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactSource} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.ArtifactSource) - flyteidl.artifact.Artifacts.ArtifactSourceOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSource_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSource_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactSource.class, flyteidl.artifact.Artifacts.ArtifactSource.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.ArtifactSource.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (workflowExecutionBuilder_ == null) { - workflowExecution_ = null; - } else { - workflowExecution_ = null; - workflowExecutionBuilder_ = null; - } - nodeId_ = ""; - - if (taskIdBuilder_ == null) { - taskId_ = null; - } else { - taskId_ = null; - taskIdBuilder_ = null; - } - retryAttempt_ = 0; - - principal_ = ""; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSource_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSource getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSource build() { - flyteidl.artifact.Artifacts.ArtifactSource result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSource buildPartial() { - flyteidl.artifact.Artifacts.ArtifactSource result = new flyteidl.artifact.Artifacts.ArtifactSource(this); - if (workflowExecutionBuilder_ == null) { - result.workflowExecution_ = workflowExecution_; - } else { - result.workflowExecution_ = workflowExecutionBuilder_.build(); - } - result.nodeId_ = nodeId_; - if (taskIdBuilder_ == null) { - result.taskId_ = taskId_; - } else { - result.taskId_ = taskIdBuilder_.build(); - } - result.retryAttempt_ = retryAttempt_; - result.principal_ = principal_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.ArtifactSource) { - return mergeFrom((flyteidl.artifact.Artifacts.ArtifactSource)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.ArtifactSource other) { - if (other == flyteidl.artifact.Artifacts.ArtifactSource.getDefaultInstance()) return this; - if (other.hasWorkflowExecution()) { - mergeWorkflowExecution(other.getWorkflowExecution()); - } - if (!other.getNodeId().isEmpty()) { - nodeId_ = other.nodeId_; - onChanged(); - } - if (other.hasTaskId()) { - mergeTaskId(other.getTaskId()); - } - if (other.getRetryAttempt() != 0) { - setRetryAttempt(other.getRetryAttempt()); - } - if (!other.getPrincipal().isEmpty()) { - principal_ = other.principal_; - onChanged(); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.ArtifactSource parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.ArtifactSource) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier workflowExecution_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> workflowExecutionBuilder_; - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public boolean hasWorkflowExecution() { - return workflowExecutionBuilder_ != null || workflowExecution_ != null; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getWorkflowExecution() { - if (workflowExecutionBuilder_ == null) { - return workflowExecution_ == null ? flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : workflowExecution_; - } else { - return workflowExecutionBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public Builder setWorkflowExecution(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { - if (workflowExecutionBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - workflowExecution_ = value; - onChanged(); - } else { - workflowExecutionBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public Builder setWorkflowExecution( - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder builderForValue) { - if (workflowExecutionBuilder_ == null) { - workflowExecution_ = builderForValue.build(); - onChanged(); - } else { - workflowExecutionBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public Builder mergeWorkflowExecution(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { - if (workflowExecutionBuilder_ == null) { - if (workflowExecution_ != null) { - workflowExecution_ = - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.newBuilder(workflowExecution_).mergeFrom(value).buildPartial(); - } else { - workflowExecution_ = value; - } - onChanged(); - } else { - workflowExecutionBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public Builder clearWorkflowExecution() { - if (workflowExecutionBuilder_ == null) { - workflowExecution_ = null; - onChanged(); - } else { - workflowExecution_ = null; - workflowExecutionBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder getWorkflowExecutionBuilder() { - - onChanged(); - return getWorkflowExecutionFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getWorkflowExecutionOrBuilder() { - if (workflowExecutionBuilder_ != null) { - return workflowExecutionBuilder_.getMessageOrBuilder(); - } else { - return workflowExecution_ == null ? - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : workflowExecution_; - } - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier workflow_execution = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> - getWorkflowExecutionFieldBuilder() { - if (workflowExecutionBuilder_ == null) { - workflowExecutionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder>( - getWorkflowExecution(), - getParentForChildren(), - isClean()); - workflowExecution_ = null; - } - return workflowExecutionBuilder_; - } - - private java.lang.Object nodeId_ = ""; - /** - * string node_id = 2; - */ - public java.lang.String getNodeId() { - java.lang.Object ref = nodeId_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - nodeId_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string node_id = 2; - */ - public com.google.protobuf.ByteString - getNodeIdBytes() { - java.lang.Object ref = nodeId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - nodeId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string node_id = 2; - */ - public Builder setNodeId( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - nodeId_ = value; - onChanged(); - return this; - } - /** - * string node_id = 2; - */ - public Builder clearNodeId() { - - nodeId_ = getDefaultInstance().getNodeId(); - onChanged(); - return this; - } - /** - * string node_id = 2; - */ - public Builder setNodeIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - nodeId_ = value; - onChanged(); - return this; - } - - private flyteidl.core.IdentifierOuterClass.Identifier taskId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> taskIdBuilder_; - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public boolean hasTaskId() { - return taskIdBuilder_ != null || taskId_ != null; - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getTaskId() { - if (taskIdBuilder_ == null) { - return taskId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : taskId_; - } else { - return taskIdBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public Builder setTaskId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (taskIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - taskId_ = value; - onChanged(); - } else { - taskIdBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public Builder setTaskId( - flyteidl.core.IdentifierOuterClass.Identifier.Builder builderForValue) { - if (taskIdBuilder_ == null) { - taskId_ = builderForValue.build(); - onChanged(); - } else { - taskIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public Builder mergeTaskId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (taskIdBuilder_ == null) { - if (taskId_ != null) { - taskId_ = - flyteidl.core.IdentifierOuterClass.Identifier.newBuilder(taskId_).mergeFrom(value).buildPartial(); - } else { - taskId_ = value; - } - onChanged(); - } else { - taskIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public Builder clearTaskId() { - if (taskIdBuilder_ == null) { - taskId_ = null; - onChanged(); - } else { - taskId_ = null; - taskIdBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public flyteidl.core.IdentifierOuterClass.Identifier.Builder getTaskIdBuilder() { - - onChanged(); - return getTaskIdFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTaskIdOrBuilder() { - if (taskIdBuilder_ != null) { - return taskIdBuilder_.getMessageOrBuilder(); - } else { - return taskId_ == null ? - flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : taskId_; - } - } - /** - * .flyteidl.core.Identifier task_id = 3; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> - getTaskIdFieldBuilder() { - if (taskIdBuilder_ == null) { - taskIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder>( - getTaskId(), - getParentForChildren(), - isClean()); - taskId_ = null; - } - return taskIdBuilder_; - } - - private int retryAttempt_ ; - /** - * uint32 retry_attempt = 4; - */ - public int getRetryAttempt() { - return retryAttempt_; - } - /** - * uint32 retry_attempt = 4; - */ - public Builder setRetryAttempt(int value) { - - retryAttempt_ = value; - onChanged(); - return this; - } - /** - * uint32 retry_attempt = 4; - */ - public Builder clearRetryAttempt() { - - retryAttempt_ = 0; - onChanged(); - return this; - } - - private java.lang.Object principal_ = ""; - /** - *
-       * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-       * 
- * - * string principal = 5; - */ - public java.lang.String getPrincipal() { - java.lang.Object ref = principal_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - principal_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-       * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-       * 
- * - * string principal = 5; - */ - public com.google.protobuf.ByteString - getPrincipalBytes() { - java.lang.Object ref = principal_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - principal_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - *
-       * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-       * 
- * - * string principal = 5; - */ - public Builder setPrincipal( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - principal_ = value; - onChanged(); - return this; - } - /** - *
-       * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-       * 
- * - * string principal = 5; - */ - public Builder clearPrincipal() { - - principal_ = getDefaultInstance().getPrincipal(); - onChanged(); - return this; - } - /** - *
-       * Uploads, either from the UI or from the CLI, or FlyteRemote, will have this.
-       * 
- * - * string principal = 5; - */ - public Builder setPrincipalBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - principal_ = value; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.ArtifactSource) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactSource) - private static final flyteidl.artifact.Artifacts.ArtifactSource DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.ArtifactSource(); - } - - public static flyteidl.artifact.Artifacts.ArtifactSource getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public ArtifactSource parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ArtifactSource(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSource getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface ArtifactSpecOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.ArtifactSpec) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.Literal value = 1; - */ - boolean hasValue(); - /** - * .flyteidl.core.Literal value = 1; - */ - flyteidl.core.Literals.Literal getValue(); - /** - * .flyteidl.core.Literal value = 1; - */ - flyteidl.core.Literals.LiteralOrBuilder getValueOrBuilder(); - - /** - *
-     * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-     * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-     * type to all Literals is a lot of work.
-     * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - boolean hasType(); - /** - *
-     * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-     * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-     * type to all Literals is a lot of work.
-     * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - flyteidl.core.Types.LiteralType getType(); - /** - *
-     * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-     * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-     * type to all Literals is a lot of work.
-     * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - flyteidl.core.Types.LiteralTypeOrBuilder getTypeOrBuilder(); - - /** - * string short_description = 3; - */ - java.lang.String getShortDescription(); - /** - * string short_description = 3; - */ - com.google.protobuf.ByteString - getShortDescriptionBytes(); - - /** - *
-     * Additional user metadata
-     * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - boolean hasUserMetadata(); - /** - *
-     * Additional user metadata
-     * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - com.google.protobuf.Any getUserMetadata(); - /** - *
-     * Additional user metadata
-     * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - com.google.protobuf.AnyOrBuilder getUserMetadataOrBuilder(); - - /** - * string metadata_type = 5; - */ - java.lang.String getMetadataType(); - /** - * string metadata_type = 5; - */ - com.google.protobuf.ByteString - getMetadataTypeBytes(); - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactSpec} - */ - public static final class ArtifactSpec extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.ArtifactSpec) - ArtifactSpecOrBuilder { - private static final long serialVersionUID = 0L; - // Use ArtifactSpec.newBuilder() to construct. - private ArtifactSpec(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ArtifactSpec() { - shortDescription_ = ""; - metadataType_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ArtifactSpec( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.Literals.Literal.Builder subBuilder = null; - if (value_ != null) { - subBuilder = value_.toBuilder(); - } - value_ = input.readMessage(flyteidl.core.Literals.Literal.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(value_); - value_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - flyteidl.core.Types.LiteralType.Builder subBuilder = null; - if (type_ != null) { - subBuilder = type_.toBuilder(); - } - type_ = input.readMessage(flyteidl.core.Types.LiteralType.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(type_); - type_ = subBuilder.buildPartial(); - } - - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - - shortDescription_ = s; - break; - } - case 34: { - com.google.protobuf.Any.Builder subBuilder = null; - if (userMetadata_ != null) { - subBuilder = userMetadata_.toBuilder(); - } - userMetadata_ = input.readMessage(com.google.protobuf.Any.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(userMetadata_); - userMetadata_ = subBuilder.buildPartial(); - } - - break; - } - case 42: { - java.lang.String s = input.readStringRequireUtf8(); - - metadataType_ = s; - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSpec_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSpec_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactSpec.class, flyteidl.artifact.Artifacts.ArtifactSpec.Builder.class); - } - - public static final int VALUE_FIELD_NUMBER = 1; - private flyteidl.core.Literals.Literal value_; - /** - * .flyteidl.core.Literal value = 1; - */ - public boolean hasValue() { - return value_ != null; - } - /** - * .flyteidl.core.Literal value = 1; - */ - public flyteidl.core.Literals.Literal getValue() { - return value_ == null ? flyteidl.core.Literals.Literal.getDefaultInstance() : value_; - } - /** - * .flyteidl.core.Literal value = 1; - */ - public flyteidl.core.Literals.LiteralOrBuilder getValueOrBuilder() { - return getValue(); - } - - public static final int TYPE_FIELD_NUMBER = 2; - private flyteidl.core.Types.LiteralType type_; - /** - *
-     * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-     * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-     * type to all Literals is a lot of work.
-     * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public boolean hasType() { - return type_ != null; - } - /** - *
-     * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-     * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-     * type to all Literals is a lot of work.
-     * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public flyteidl.core.Types.LiteralType getType() { - return type_ == null ? flyteidl.core.Types.LiteralType.getDefaultInstance() : type_; - } - /** - *
-     * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-     * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-     * type to all Literals is a lot of work.
-     * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public flyteidl.core.Types.LiteralTypeOrBuilder getTypeOrBuilder() { - return getType(); - } - - public static final int SHORT_DESCRIPTION_FIELD_NUMBER = 3; - private volatile java.lang.Object shortDescription_; - /** - * string short_description = 3; - */ - public java.lang.String getShortDescription() { - java.lang.Object ref = shortDescription_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - shortDescription_ = s; - return s; - } - } - /** - * string short_description = 3; - */ - public com.google.protobuf.ByteString - getShortDescriptionBytes() { - java.lang.Object ref = shortDescription_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - shortDescription_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int USER_METADATA_FIELD_NUMBER = 4; - private com.google.protobuf.Any userMetadata_; - /** - *
-     * Additional user metadata
-     * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public boolean hasUserMetadata() { - return userMetadata_ != null; - } - /** - *
-     * Additional user metadata
-     * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public com.google.protobuf.Any getUserMetadata() { - return userMetadata_ == null ? com.google.protobuf.Any.getDefaultInstance() : userMetadata_; - } - /** - *
-     * Additional user metadata
-     * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public com.google.protobuf.AnyOrBuilder getUserMetadataOrBuilder() { - return getUserMetadata(); - } - - public static final int METADATA_TYPE_FIELD_NUMBER = 5; - private volatile java.lang.Object metadataType_; - /** - * string metadata_type = 5; - */ - public java.lang.String getMetadataType() { - java.lang.Object ref = metadataType_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metadataType_ = s; - return s; - } - } - /** - * string metadata_type = 5; - */ - public com.google.protobuf.ByteString - getMetadataTypeBytes() { - java.lang.Object ref = metadataType_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metadataType_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (value_ != null) { - output.writeMessage(1, getValue()); - } - if (type_ != null) { - output.writeMessage(2, getType()); - } - if (!getShortDescriptionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, shortDescription_); - } - if (userMetadata_ != null) { - output.writeMessage(4, getUserMetadata()); - } - if (!getMetadataTypeBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 5, metadataType_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (value_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getValue()); - } - if (type_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getType()); - } - if (!getShortDescriptionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, shortDescription_); - } - if (userMetadata_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, getUserMetadata()); - } - if (!getMetadataTypeBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, metadataType_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.ArtifactSpec)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.ArtifactSpec other = (flyteidl.artifact.Artifacts.ArtifactSpec) obj; - - if (hasValue() != other.hasValue()) return false; - if (hasValue()) { - if (!getValue() - .equals(other.getValue())) return false; - } - if (hasType() != other.hasType()) return false; - if (hasType()) { - if (!getType() - .equals(other.getType())) return false; - } - if (!getShortDescription() - .equals(other.getShortDescription())) return false; - if (hasUserMetadata() != other.hasUserMetadata()) return false; - if (hasUserMetadata()) { - if (!getUserMetadata() - .equals(other.getUserMetadata())) return false; - } - if (!getMetadataType() - .equals(other.getMetadataType())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - } - if (hasType()) { - hash = (37 * hash) + TYPE_FIELD_NUMBER; - hash = (53 * hash) + getType().hashCode(); - } - hash = (37 * hash) + SHORT_DESCRIPTION_FIELD_NUMBER; - hash = (53 * hash) + getShortDescription().hashCode(); - if (hasUserMetadata()) { - hash = (37 * hash) + USER_METADATA_FIELD_NUMBER; - hash = (53 * hash) + getUserMetadata().hashCode(); - } - hash = (37 * hash) + METADATA_TYPE_FIELD_NUMBER; - hash = (53 * hash) + getMetadataType().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactSpec parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.ArtifactSpec prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactSpec} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.ArtifactSpec) - flyteidl.artifact.Artifacts.ArtifactSpecOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSpec_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSpec_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactSpec.class, flyteidl.artifact.Artifacts.ArtifactSpec.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.ArtifactSpec.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (valueBuilder_ == null) { - value_ = null; - } else { - value_ = null; - valueBuilder_ = null; - } - if (typeBuilder_ == null) { - type_ = null; - } else { - type_ = null; - typeBuilder_ = null; - } - shortDescription_ = ""; - - if (userMetadataBuilder_ == null) { - userMetadata_ = null; - } else { - userMetadata_ = null; - userMetadataBuilder_ = null; - } - metadataType_ = ""; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactSpec_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSpec getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSpec build() { - flyteidl.artifact.Artifacts.ArtifactSpec result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSpec buildPartial() { - flyteidl.artifact.Artifacts.ArtifactSpec result = new flyteidl.artifact.Artifacts.ArtifactSpec(this); - if (valueBuilder_ == null) { - result.value_ = value_; - } else { - result.value_ = valueBuilder_.build(); - } - if (typeBuilder_ == null) { - result.type_ = type_; - } else { - result.type_ = typeBuilder_.build(); - } - result.shortDescription_ = shortDescription_; - if (userMetadataBuilder_ == null) { - result.userMetadata_ = userMetadata_; - } else { - result.userMetadata_ = userMetadataBuilder_.build(); - } - result.metadataType_ = metadataType_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.ArtifactSpec) { - return mergeFrom((flyteidl.artifact.Artifacts.ArtifactSpec)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.ArtifactSpec other) { - if (other == flyteidl.artifact.Artifacts.ArtifactSpec.getDefaultInstance()) return this; - if (other.hasValue()) { - mergeValue(other.getValue()); - } - if (other.hasType()) { - mergeType(other.getType()); - } - if (!other.getShortDescription().isEmpty()) { - shortDescription_ = other.shortDescription_; - onChanged(); - } - if (other.hasUserMetadata()) { - mergeUserMetadata(other.getUserMetadata()); - } - if (!other.getMetadataType().isEmpty()) { - metadataType_ = other.metadataType_; - onChanged(); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.ArtifactSpec parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.ArtifactSpec) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.Literals.Literal value_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.Literal, flyteidl.core.Literals.Literal.Builder, flyteidl.core.Literals.LiteralOrBuilder> valueBuilder_; - /** - * .flyteidl.core.Literal value = 1; - */ - public boolean hasValue() { - return valueBuilder_ != null || value_ != null; - } - /** - * .flyteidl.core.Literal value = 1; - */ - public flyteidl.core.Literals.Literal getValue() { - if (valueBuilder_ == null) { - return value_ == null ? flyteidl.core.Literals.Literal.getDefaultInstance() : value_; - } else { - return valueBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.Literal value = 1; - */ - public Builder setValue(flyteidl.core.Literals.Literal value) { - if (valueBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - value_ = value; - onChanged(); - } else { - valueBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.Literal value = 1; - */ - public Builder setValue( - flyteidl.core.Literals.Literal.Builder builderForValue) { - if (valueBuilder_ == null) { - value_ = builderForValue.build(); - onChanged(); - } else { - valueBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.Literal value = 1; - */ - public Builder mergeValue(flyteidl.core.Literals.Literal value) { - if (valueBuilder_ == null) { - if (value_ != null) { - value_ = - flyteidl.core.Literals.Literal.newBuilder(value_).mergeFrom(value).buildPartial(); - } else { - value_ = value; - } - onChanged(); - } else { - valueBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.Literal value = 1; - */ - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = null; - onChanged(); - } else { - value_ = null; - valueBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.Literal value = 1; - */ - public flyteidl.core.Literals.Literal.Builder getValueBuilder() { - - onChanged(); - return getValueFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.Literal value = 1; - */ - public flyteidl.core.Literals.LiteralOrBuilder getValueOrBuilder() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilder(); - } else { - return value_ == null ? - flyteidl.core.Literals.Literal.getDefaultInstance() : value_; - } - } - /** - * .flyteidl.core.Literal value = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.Literal, flyteidl.core.Literals.Literal.Builder, flyteidl.core.Literals.LiteralOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.Literal, flyteidl.core.Literals.Literal.Builder, flyteidl.core.Literals.LiteralOrBuilder>( - getValue(), - getParentForChildren(), - isClean()); - value_ = null; - } - return valueBuilder_; - } - - private flyteidl.core.Types.LiteralType type_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Types.LiteralType, flyteidl.core.Types.LiteralType.Builder, flyteidl.core.Types.LiteralTypeOrBuilder> typeBuilder_; - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public boolean hasType() { - return typeBuilder_ != null || type_ != null; - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public flyteidl.core.Types.LiteralType getType() { - if (typeBuilder_ == null) { - return type_ == null ? flyteidl.core.Types.LiteralType.getDefaultInstance() : type_; - } else { - return typeBuilder_.getMessage(); - } - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public Builder setType(flyteidl.core.Types.LiteralType value) { - if (typeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - type_ = value; - onChanged(); - } else { - typeBuilder_.setMessage(value); - } - - return this; - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public Builder setType( - flyteidl.core.Types.LiteralType.Builder builderForValue) { - if (typeBuilder_ == null) { - type_ = builderForValue.build(); - onChanged(); - } else { - typeBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public Builder mergeType(flyteidl.core.Types.LiteralType value) { - if (typeBuilder_ == null) { - if (type_ != null) { - type_ = - flyteidl.core.Types.LiteralType.newBuilder(type_).mergeFrom(value).buildPartial(); - } else { - type_ = value; - } - onChanged(); - } else { - typeBuilder_.mergeFrom(value); - } - - return this; - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public Builder clearType() { - if (typeBuilder_ == null) { - type_ = null; - onChanged(); - } else { - type_ = null; - typeBuilder_ = null; - } - - return this; - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public flyteidl.core.Types.LiteralType.Builder getTypeBuilder() { - - onChanged(); - return getTypeFieldBuilder().getBuilder(); - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - public flyteidl.core.Types.LiteralTypeOrBuilder getTypeOrBuilder() { - if (typeBuilder_ != null) { - return typeBuilder_.getMessageOrBuilder(); - } else { - return type_ == null ? - flyteidl.core.Types.LiteralType.getDefaultInstance() : type_; - } - } - /** - *
-       * This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but
-       * forgets to change the name, that is okay. And the reason why this is a separate field is because adding the
-       * type to all Literals is a lot of work.
-       * 
- * - * .flyteidl.core.LiteralType type = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Types.LiteralType, flyteidl.core.Types.LiteralType.Builder, flyteidl.core.Types.LiteralTypeOrBuilder> - getTypeFieldBuilder() { - if (typeBuilder_ == null) { - typeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Types.LiteralType, flyteidl.core.Types.LiteralType.Builder, flyteidl.core.Types.LiteralTypeOrBuilder>( - getType(), - getParentForChildren(), - isClean()); - type_ = null; - } - return typeBuilder_; - } - - private java.lang.Object shortDescription_ = ""; - /** - * string short_description = 3; - */ - public java.lang.String getShortDescription() { - java.lang.Object ref = shortDescription_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - shortDescription_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string short_description = 3; - */ - public com.google.protobuf.ByteString - getShortDescriptionBytes() { - java.lang.Object ref = shortDescription_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - shortDescription_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string short_description = 3; - */ - public Builder setShortDescription( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - shortDescription_ = value; - onChanged(); - return this; - } - /** - * string short_description = 3; - */ - public Builder clearShortDescription() { - - shortDescription_ = getDefaultInstance().getShortDescription(); - onChanged(); - return this; - } - /** - * string short_description = 3; - */ - public Builder setShortDescriptionBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - shortDescription_ = value; - onChanged(); - return this; - } - - private com.google.protobuf.Any userMetadata_; - private com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder> userMetadataBuilder_; - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public boolean hasUserMetadata() { - return userMetadataBuilder_ != null || userMetadata_ != null; - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public com.google.protobuf.Any getUserMetadata() { - if (userMetadataBuilder_ == null) { - return userMetadata_ == null ? com.google.protobuf.Any.getDefaultInstance() : userMetadata_; - } else { - return userMetadataBuilder_.getMessage(); - } - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public Builder setUserMetadata(com.google.protobuf.Any value) { - if (userMetadataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - userMetadata_ = value; - onChanged(); - } else { - userMetadataBuilder_.setMessage(value); - } - - return this; - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public Builder setUserMetadata( - com.google.protobuf.Any.Builder builderForValue) { - if (userMetadataBuilder_ == null) { - userMetadata_ = builderForValue.build(); - onChanged(); - } else { - userMetadataBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public Builder mergeUserMetadata(com.google.protobuf.Any value) { - if (userMetadataBuilder_ == null) { - if (userMetadata_ != null) { - userMetadata_ = - com.google.protobuf.Any.newBuilder(userMetadata_).mergeFrom(value).buildPartial(); - } else { - userMetadata_ = value; - } - onChanged(); - } else { - userMetadataBuilder_.mergeFrom(value); - } - - return this; - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public Builder clearUserMetadata() { - if (userMetadataBuilder_ == null) { - userMetadata_ = null; - onChanged(); - } else { - userMetadata_ = null; - userMetadataBuilder_ = null; - } - - return this; - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public com.google.protobuf.Any.Builder getUserMetadataBuilder() { - - onChanged(); - return getUserMetadataFieldBuilder().getBuilder(); - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - public com.google.protobuf.AnyOrBuilder getUserMetadataOrBuilder() { - if (userMetadataBuilder_ != null) { - return userMetadataBuilder_.getMessageOrBuilder(); - } else { - return userMetadata_ == null ? - com.google.protobuf.Any.getDefaultInstance() : userMetadata_; - } - } - /** - *
-       * Additional user metadata
-       * 
- * - * .google.protobuf.Any user_metadata = 4; - */ - private com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder> - getUserMetadataFieldBuilder() { - if (userMetadataBuilder_ == null) { - userMetadataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - com.google.protobuf.Any, com.google.protobuf.Any.Builder, com.google.protobuf.AnyOrBuilder>( - getUserMetadata(), - getParentForChildren(), - isClean()); - userMetadata_ = null; - } - return userMetadataBuilder_; - } - - private java.lang.Object metadataType_ = ""; - /** - * string metadata_type = 5; - */ - public java.lang.String getMetadataType() { - java.lang.Object ref = metadataType_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - metadataType_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string metadata_type = 5; - */ - public com.google.protobuf.ByteString - getMetadataTypeBytes() { - java.lang.Object ref = metadataType_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - metadataType_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string metadata_type = 5; - */ - public Builder setMetadataType( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - metadataType_ = value; - onChanged(); - return this; - } - /** - * string metadata_type = 5; - */ - public Builder clearMetadataType() { - - metadataType_ = getDefaultInstance().getMetadataType(); - onChanged(); - return this; - } - /** - * string metadata_type = 5; - */ - public Builder setMetadataTypeBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - metadataType_ = value; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.ArtifactSpec) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactSpec) - private static final flyteidl.artifact.Artifacts.ArtifactSpec DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.ArtifactSpec(); - } - - public static flyteidl.artifact.Artifacts.ArtifactSpec getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public ArtifactSpec parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ArtifactSpec(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactSpec getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface CreateArtifactResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.CreateArtifactResponse) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - boolean hasArtifact(); - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - flyteidl.artifact.Artifacts.Artifact getArtifact(); - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.CreateArtifactResponse} - */ - public static final class CreateArtifactResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.CreateArtifactResponse) - CreateArtifactResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use CreateArtifactResponse.newBuilder() to construct. - private CreateArtifactResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private CreateArtifactResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CreateArtifactResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.artifact.Artifacts.Artifact.Builder subBuilder = null; - if (artifact_ != null) { - subBuilder = artifact_.toBuilder(); - } - artifact_ = input.readMessage(flyteidl.artifact.Artifacts.Artifact.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(artifact_); - artifact_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateArtifactResponse.class, flyteidl.artifact.Artifacts.CreateArtifactResponse.Builder.class); - } - - public static final int ARTIFACT_FIELD_NUMBER = 1; - private flyteidl.artifact.Artifacts.Artifact artifact_; - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public boolean hasArtifact() { - return artifact_ != null; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.Artifact getArtifact() { - return artifact_ == null ? flyteidl.artifact.Artifacts.Artifact.getDefaultInstance() : artifact_; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactOrBuilder() { - return getArtifact(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (artifact_ != null) { - output.writeMessage(1, getArtifact()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (artifact_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getArtifact()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.CreateArtifactResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.CreateArtifactResponse other = (flyteidl.artifact.Artifacts.CreateArtifactResponse) obj; - - if (hasArtifact() != other.hasArtifact()) return false; - if (hasArtifact()) { - if (!getArtifact() - .equals(other.getArtifact())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasArtifact()) { - hash = (37 * hash) + ARTIFACT_FIELD_NUMBER; - hash = (53 * hash) + getArtifact().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateArtifactResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.CreateArtifactResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.CreateArtifactResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.CreateArtifactResponse) - flyteidl.artifact.Artifacts.CreateArtifactResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateArtifactResponse.class, flyteidl.artifact.Artifacts.CreateArtifactResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.CreateArtifactResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (artifactBuilder_ == null) { - artifact_ = null; - } else { - artifact_ = null; - artifactBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateArtifactResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.CreateArtifactResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactResponse build() { - flyteidl.artifact.Artifacts.CreateArtifactResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactResponse buildPartial() { - flyteidl.artifact.Artifacts.CreateArtifactResponse result = new flyteidl.artifact.Artifacts.CreateArtifactResponse(this); - if (artifactBuilder_ == null) { - result.artifact_ = artifact_; - } else { - result.artifact_ = artifactBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.CreateArtifactResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.CreateArtifactResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.CreateArtifactResponse other) { - if (other == flyteidl.artifact.Artifacts.CreateArtifactResponse.getDefaultInstance()) return this; - if (other.hasArtifact()) { - mergeArtifact(other.getArtifact()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.CreateArtifactResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.CreateArtifactResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.artifact.Artifacts.Artifact artifact_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder> artifactBuilder_; - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public boolean hasArtifact() { - return artifactBuilder_ != null || artifact_ != null; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.Artifact getArtifact() { - if (artifactBuilder_ == null) { - return artifact_ == null ? flyteidl.artifact.Artifacts.Artifact.getDefaultInstance() : artifact_; - } else { - return artifactBuilder_.getMessage(); - } - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder setArtifact(flyteidl.artifact.Artifacts.Artifact value) { - if (artifactBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - artifact_ = value; - onChanged(); - } else { - artifactBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder setArtifact( - flyteidl.artifact.Artifacts.Artifact.Builder builderForValue) { - if (artifactBuilder_ == null) { - artifact_ = builderForValue.build(); - onChanged(); - } else { - artifactBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder mergeArtifact(flyteidl.artifact.Artifacts.Artifact value) { - if (artifactBuilder_ == null) { - if (artifact_ != null) { - artifact_ = - flyteidl.artifact.Artifacts.Artifact.newBuilder(artifact_).mergeFrom(value).buildPartial(); - } else { - artifact_ = value; - } - onChanged(); - } else { - artifactBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder clearArtifact() { - if (artifactBuilder_ == null) { - artifact_ = null; - onChanged(); - } else { - artifact_ = null; - artifactBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.Artifact.Builder getArtifactBuilder() { - - onChanged(); - return getArtifactFieldBuilder().getBuilder(); - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactOrBuilder() { - if (artifactBuilder_ != null) { - return artifactBuilder_.getMessageOrBuilder(); - } else { - return artifact_ == null ? - flyteidl.artifact.Artifacts.Artifact.getDefaultInstance() : artifact_; - } - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder> - getArtifactFieldBuilder() { - if (artifactBuilder_ == null) { - artifactBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder>( - getArtifact(), - getParentForChildren(), - isClean()); - artifact_ = null; - } - return artifactBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.CreateArtifactResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateArtifactResponse) - private static final flyteidl.artifact.Artifacts.CreateArtifactResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.CreateArtifactResponse(); - } - - public static flyteidl.artifact.Artifacts.CreateArtifactResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public CreateArtifactResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateArtifactResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateArtifactResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface GetArtifactRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.GetArtifactRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - boolean hasQuery(); - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - flyteidl.core.ArtifactId.ArtifactQuery getQuery(); - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - flyteidl.core.ArtifactId.ArtifactQueryOrBuilder getQueryOrBuilder(); - - /** - *
-     * If false, then long_description is not returned.
-     * 
- * - * bool details = 2; - */ - boolean getDetails(); - } - /** - * Protobuf type {@code flyteidl.artifact.GetArtifactRequest} - */ - public static final class GetArtifactRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.GetArtifactRequest) - GetArtifactRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use GetArtifactRequest.newBuilder() to construct. - private GetArtifactRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private GetArtifactRequest() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private GetArtifactRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.ArtifactId.ArtifactQuery.Builder subBuilder = null; - if (query_ != null) { - subBuilder = query_.toBuilder(); - } - query_ = input.readMessage(flyteidl.core.ArtifactId.ArtifactQuery.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(query_); - query_ = subBuilder.buildPartial(); - } - - break; - } - case 16: { - - details_ = input.readBool(); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.GetArtifactRequest.class, flyteidl.artifact.Artifacts.GetArtifactRequest.Builder.class); - } - - public static final int QUERY_FIELD_NUMBER = 1; - private flyteidl.core.ArtifactId.ArtifactQuery query_; - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public boolean hasQuery() { - return query_ != null; - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public flyteidl.core.ArtifactId.ArtifactQuery getQuery() { - return query_ == null ? flyteidl.core.ArtifactId.ArtifactQuery.getDefaultInstance() : query_; - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public flyteidl.core.ArtifactId.ArtifactQueryOrBuilder getQueryOrBuilder() { - return getQuery(); - } - - public static final int DETAILS_FIELD_NUMBER = 2; - private boolean details_; - /** - *
-     * If false, then long_description is not returned.
-     * 
- * - * bool details = 2; - */ - public boolean getDetails() { - return details_; - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (query_ != null) { - output.writeMessage(1, getQuery()); - } - if (details_ != false) { - output.writeBool(2, details_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (query_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getQuery()); - } - if (details_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, details_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.GetArtifactRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.GetArtifactRequest other = (flyteidl.artifact.Artifacts.GetArtifactRequest) obj; - - if (hasQuery() != other.hasQuery()) return false; - if (hasQuery()) { - if (!getQuery() - .equals(other.getQuery())) return false; - } - if (getDetails() - != other.getDetails()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasQuery()) { - hash = (37 * hash) + QUERY_FIELD_NUMBER; - hash = (53 * hash) + getQuery().hashCode(); - } - hash = (37 * hash) + DETAILS_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( - getDetails()); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.GetArtifactRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.GetArtifactRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.GetArtifactRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.GetArtifactRequest) - flyteidl.artifact.Artifacts.GetArtifactRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.GetArtifactRequest.class, flyteidl.artifact.Artifacts.GetArtifactRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.GetArtifactRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (queryBuilder_ == null) { - query_ = null; - } else { - query_ = null; - queryBuilder_ = null; - } - details_ = false; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.GetArtifactRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactRequest build() { - flyteidl.artifact.Artifacts.GetArtifactRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactRequest buildPartial() { - flyteidl.artifact.Artifacts.GetArtifactRequest result = new flyteidl.artifact.Artifacts.GetArtifactRequest(this); - if (queryBuilder_ == null) { - result.query_ = query_; - } else { - result.query_ = queryBuilder_.build(); - } - result.details_ = details_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.GetArtifactRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.GetArtifactRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.GetArtifactRequest other) { - if (other == flyteidl.artifact.Artifacts.GetArtifactRequest.getDefaultInstance()) return this; - if (other.hasQuery()) { - mergeQuery(other.getQuery()); - } - if (other.getDetails() != false) { - setDetails(other.getDetails()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.GetArtifactRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.GetArtifactRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.ArtifactId.ArtifactQuery query_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactQuery, flyteidl.core.ArtifactId.ArtifactQuery.Builder, flyteidl.core.ArtifactId.ArtifactQueryOrBuilder> queryBuilder_; - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public boolean hasQuery() { - return queryBuilder_ != null || query_ != null; - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public flyteidl.core.ArtifactId.ArtifactQuery getQuery() { - if (queryBuilder_ == null) { - return query_ == null ? flyteidl.core.ArtifactId.ArtifactQuery.getDefaultInstance() : query_; - } else { - return queryBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public Builder setQuery(flyteidl.core.ArtifactId.ArtifactQuery value) { - if (queryBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - query_ = value; - onChanged(); - } else { - queryBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public Builder setQuery( - flyteidl.core.ArtifactId.ArtifactQuery.Builder builderForValue) { - if (queryBuilder_ == null) { - query_ = builderForValue.build(); - onChanged(); - } else { - queryBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public Builder mergeQuery(flyteidl.core.ArtifactId.ArtifactQuery value) { - if (queryBuilder_ == null) { - if (query_ != null) { - query_ = - flyteidl.core.ArtifactId.ArtifactQuery.newBuilder(query_).mergeFrom(value).buildPartial(); - } else { - query_ = value; - } - onChanged(); - } else { - queryBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public Builder clearQuery() { - if (queryBuilder_ == null) { - query_ = null; - onChanged(); - } else { - query_ = null; - queryBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public flyteidl.core.ArtifactId.ArtifactQuery.Builder getQueryBuilder() { - - onChanged(); - return getQueryFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - public flyteidl.core.ArtifactId.ArtifactQueryOrBuilder getQueryOrBuilder() { - if (queryBuilder_ != null) { - return queryBuilder_.getMessageOrBuilder(); - } else { - return query_ == null ? - flyteidl.core.ArtifactId.ArtifactQuery.getDefaultInstance() : query_; - } - } - /** - * .flyteidl.core.ArtifactQuery query = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactQuery, flyteidl.core.ArtifactId.ArtifactQuery.Builder, flyteidl.core.ArtifactId.ArtifactQueryOrBuilder> - getQueryFieldBuilder() { - if (queryBuilder_ == null) { - queryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactQuery, flyteidl.core.ArtifactId.ArtifactQuery.Builder, flyteidl.core.ArtifactId.ArtifactQueryOrBuilder>( - getQuery(), - getParentForChildren(), - isClean()); - query_ = null; - } - return queryBuilder_; - } - - private boolean details_ ; - /** - *
-       * If false, then long_description is not returned.
-       * 
- * - * bool details = 2; - */ - public boolean getDetails() { - return details_; - } - /** - *
-       * If false, then long_description is not returned.
-       * 
- * - * bool details = 2; - */ - public Builder setDetails(boolean value) { - - details_ = value; - onChanged(); - return this; - } - /** - *
-       * If false, then long_description is not returned.
-       * 
- * - * bool details = 2; - */ - public Builder clearDetails() { - - details_ = false; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.GetArtifactRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.GetArtifactRequest) - private static final flyteidl.artifact.Artifacts.GetArtifactRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.GetArtifactRequest(); - } - - public static flyteidl.artifact.Artifacts.GetArtifactRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public GetArtifactRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetArtifactRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface GetArtifactResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.GetArtifactResponse) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - boolean hasArtifact(); - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - flyteidl.artifact.Artifacts.Artifact getArtifact(); - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.GetArtifactResponse} - */ - public static final class GetArtifactResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.GetArtifactResponse) - GetArtifactResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use GetArtifactResponse.newBuilder() to construct. - private GetArtifactResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private GetArtifactResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private GetArtifactResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.artifact.Artifacts.Artifact.Builder subBuilder = null; - if (artifact_ != null) { - subBuilder = artifact_.toBuilder(); - } - artifact_ = input.readMessage(flyteidl.artifact.Artifacts.Artifact.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(artifact_); - artifact_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.GetArtifactResponse.class, flyteidl.artifact.Artifacts.GetArtifactResponse.Builder.class); - } - - public static final int ARTIFACT_FIELD_NUMBER = 1; - private flyteidl.artifact.Artifacts.Artifact artifact_; - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public boolean hasArtifact() { - return artifact_ != null; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.Artifact getArtifact() { - return artifact_ == null ? flyteidl.artifact.Artifacts.Artifact.getDefaultInstance() : artifact_; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactOrBuilder() { - return getArtifact(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (artifact_ != null) { - output.writeMessage(1, getArtifact()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (artifact_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getArtifact()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.GetArtifactResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.GetArtifactResponse other = (flyteidl.artifact.Artifacts.GetArtifactResponse) obj; - - if (hasArtifact() != other.hasArtifact()) return false; - if (hasArtifact()) { - if (!getArtifact() - .equals(other.getArtifact())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasArtifact()) { - hash = (37 * hash) + ARTIFACT_FIELD_NUMBER; - hash = (53 * hash) + getArtifact().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.GetArtifactResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.GetArtifactResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.GetArtifactResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.GetArtifactResponse) - flyteidl.artifact.Artifacts.GetArtifactResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.GetArtifactResponse.class, flyteidl.artifact.Artifacts.GetArtifactResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.GetArtifactResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (artifactBuilder_ == null) { - artifact_ = null; - } else { - artifact_ = null; - artifactBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_GetArtifactResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.GetArtifactResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactResponse build() { - flyteidl.artifact.Artifacts.GetArtifactResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactResponse buildPartial() { - flyteidl.artifact.Artifacts.GetArtifactResponse result = new flyteidl.artifact.Artifacts.GetArtifactResponse(this); - if (artifactBuilder_ == null) { - result.artifact_ = artifact_; - } else { - result.artifact_ = artifactBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.GetArtifactResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.GetArtifactResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.GetArtifactResponse other) { - if (other == flyteidl.artifact.Artifacts.GetArtifactResponse.getDefaultInstance()) return this; - if (other.hasArtifact()) { - mergeArtifact(other.getArtifact()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.GetArtifactResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.GetArtifactResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.artifact.Artifacts.Artifact artifact_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder> artifactBuilder_; - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public boolean hasArtifact() { - return artifactBuilder_ != null || artifact_ != null; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.Artifact getArtifact() { - if (artifactBuilder_ == null) { - return artifact_ == null ? flyteidl.artifact.Artifacts.Artifact.getDefaultInstance() : artifact_; - } else { - return artifactBuilder_.getMessage(); - } - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder setArtifact(flyteidl.artifact.Artifacts.Artifact value) { - if (artifactBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - artifact_ = value; - onChanged(); - } else { - artifactBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder setArtifact( - flyteidl.artifact.Artifacts.Artifact.Builder builderForValue) { - if (artifactBuilder_ == null) { - artifact_ = builderForValue.build(); - onChanged(); - } else { - artifactBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder mergeArtifact(flyteidl.artifact.Artifacts.Artifact value) { - if (artifactBuilder_ == null) { - if (artifact_ != null) { - artifact_ = - flyteidl.artifact.Artifacts.Artifact.newBuilder(artifact_).mergeFrom(value).buildPartial(); - } else { - artifact_ = value; - } - onChanged(); - } else { - artifactBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public Builder clearArtifact() { - if (artifactBuilder_ == null) { - artifact_ = null; - onChanged(); - } else { - artifact_ = null; - artifactBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.Artifact.Builder getArtifactBuilder() { - - onChanged(); - return getArtifactFieldBuilder().getBuilder(); - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactOrBuilder() { - if (artifactBuilder_ != null) { - return artifactBuilder_.getMessageOrBuilder(); - } else { - return artifact_ == null ? - flyteidl.artifact.Artifacts.Artifact.getDefaultInstance() : artifact_; - } - } - /** - * .flyteidl.artifact.Artifact artifact = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder> - getArtifactFieldBuilder() { - if (artifactBuilder_ == null) { - artifactBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder>( - getArtifact(), - getParentForChildren(), - isClean()); - artifact_ = null; - } - return artifactBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.GetArtifactResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.GetArtifactResponse) - private static final flyteidl.artifact.Artifacts.GetArtifactResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.GetArtifactResponse(); - } - - public static flyteidl.artifact.Artifacts.GetArtifactResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public GetArtifactResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new GetArtifactResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.GetArtifactResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface SearchOptionsOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.SearchOptions) - com.google.protobuf.MessageOrBuilder { - - /** - *
-     * If true, this means a strict partition search. meaning if you don't specify the partition
-     * field, that will mean, non-partitioned, rather than any partition.
-     * 
- * - * bool strict_partitions = 1; - */ - boolean getStrictPartitions(); - - /** - *
-     * If true, only one artifact per key will be returned. It will be the latest one by creation time.
-     * 
- * - * bool latest_by_key = 2; - */ - boolean getLatestByKey(); - } - /** - * Protobuf type {@code flyteidl.artifact.SearchOptions} - */ - public static final class SearchOptions extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.SearchOptions) - SearchOptionsOrBuilder { - private static final long serialVersionUID = 0L; - // Use SearchOptions.newBuilder() to construct. - private SearchOptions(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private SearchOptions() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SearchOptions( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 8: { - - strictPartitions_ = input.readBool(); - break; - } - case 16: { - - latestByKey_ = input.readBool(); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchOptions_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchOptions_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.SearchOptions.class, flyteidl.artifact.Artifacts.SearchOptions.Builder.class); - } - - public static final int STRICT_PARTITIONS_FIELD_NUMBER = 1; - private boolean strictPartitions_; - /** - *
-     * If true, this means a strict partition search. meaning if you don't specify the partition
-     * field, that will mean, non-partitioned, rather than any partition.
-     * 
- * - * bool strict_partitions = 1; - */ - public boolean getStrictPartitions() { - return strictPartitions_; - } - - public static final int LATEST_BY_KEY_FIELD_NUMBER = 2; - private boolean latestByKey_; - /** - *
-     * If true, only one artifact per key will be returned. It will be the latest one by creation time.
-     * 
- * - * bool latest_by_key = 2; - */ - public boolean getLatestByKey() { - return latestByKey_; - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (strictPartitions_ != false) { - output.writeBool(1, strictPartitions_); - } - if (latestByKey_ != false) { - output.writeBool(2, latestByKey_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (strictPartitions_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(1, strictPartitions_); - } - if (latestByKey_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(2, latestByKey_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.SearchOptions)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.SearchOptions other = (flyteidl.artifact.Artifacts.SearchOptions) obj; - - if (getStrictPartitions() - != other.getStrictPartitions()) return false; - if (getLatestByKey() - != other.getLatestByKey()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + STRICT_PARTITIONS_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( - getStrictPartitions()); - hash = (37 * hash) + LATEST_BY_KEY_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( - getLatestByKey()); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchOptions parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.SearchOptions prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.SearchOptions} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.SearchOptions) - flyteidl.artifact.Artifacts.SearchOptionsOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchOptions_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchOptions_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.SearchOptions.class, flyteidl.artifact.Artifacts.SearchOptions.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.SearchOptions.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - strictPartitions_ = false; - - latestByKey_ = false; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchOptions_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchOptions getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.SearchOptions.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchOptions build() { - flyteidl.artifact.Artifacts.SearchOptions result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchOptions buildPartial() { - flyteidl.artifact.Artifacts.SearchOptions result = new flyteidl.artifact.Artifacts.SearchOptions(this); - result.strictPartitions_ = strictPartitions_; - result.latestByKey_ = latestByKey_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.SearchOptions) { - return mergeFrom((flyteidl.artifact.Artifacts.SearchOptions)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.SearchOptions other) { - if (other == flyteidl.artifact.Artifacts.SearchOptions.getDefaultInstance()) return this; - if (other.getStrictPartitions() != false) { - setStrictPartitions(other.getStrictPartitions()); - } - if (other.getLatestByKey() != false) { - setLatestByKey(other.getLatestByKey()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.SearchOptions parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.SearchOptions) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private boolean strictPartitions_ ; - /** - *
-       * If true, this means a strict partition search. meaning if you don't specify the partition
-       * field, that will mean, non-partitioned, rather than any partition.
-       * 
- * - * bool strict_partitions = 1; - */ - public boolean getStrictPartitions() { - return strictPartitions_; - } - /** - *
-       * If true, this means a strict partition search. meaning if you don't specify the partition
-       * field, that will mean, non-partitioned, rather than any partition.
-       * 
- * - * bool strict_partitions = 1; - */ - public Builder setStrictPartitions(boolean value) { - - strictPartitions_ = value; - onChanged(); - return this; - } - /** - *
-       * If true, this means a strict partition search. meaning if you don't specify the partition
-       * field, that will mean, non-partitioned, rather than any partition.
-       * 
- * - * bool strict_partitions = 1; - */ - public Builder clearStrictPartitions() { - - strictPartitions_ = false; - onChanged(); - return this; - } - - private boolean latestByKey_ ; - /** - *
-       * If true, only one artifact per key will be returned. It will be the latest one by creation time.
-       * 
- * - * bool latest_by_key = 2; - */ - public boolean getLatestByKey() { - return latestByKey_; - } - /** - *
-       * If true, only one artifact per key will be returned. It will be the latest one by creation time.
-       * 
- * - * bool latest_by_key = 2; - */ - public Builder setLatestByKey(boolean value) { - - latestByKey_ = value; - onChanged(); - return this; - } - /** - *
-       * If true, only one artifact per key will be returned. It will be the latest one by creation time.
-       * 
- * - * bool latest_by_key = 2; - */ - public Builder clearLatestByKey() { - - latestByKey_ = false; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.SearchOptions) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.SearchOptions) - private static final flyteidl.artifact.Artifacts.SearchOptions DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.SearchOptions(); - } - - public static flyteidl.artifact.Artifacts.SearchOptions getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public SearchOptions parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SearchOptions(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchOptions getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface SearchArtifactsRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.SearchArtifactsRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - boolean hasArtifactKey(); - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - flyteidl.core.ArtifactId.ArtifactKey getArtifactKey(); - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder(); - - /** - * .flyteidl.core.Partitions partitions = 2; - */ - boolean hasPartitions(); - /** - * .flyteidl.core.Partitions partitions = 2; - */ - flyteidl.core.ArtifactId.Partitions getPartitions(); - /** - * .flyteidl.core.Partitions partitions = 2; - */ - flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder(); - - /** - * string principal = 3; - */ - java.lang.String getPrincipal(); - /** - * string principal = 3; - */ - com.google.protobuf.ByteString - getPrincipalBytes(); - - /** - * string version = 4; - */ - java.lang.String getVersion(); - /** - * string version = 4; - */ - com.google.protobuf.ByteString - getVersionBytes(); - - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - boolean hasOptions(); - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - flyteidl.artifact.Artifacts.SearchOptions getOptions(); - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - flyteidl.artifact.Artifacts.SearchOptionsOrBuilder getOptionsOrBuilder(); - - /** - * string token = 6; - */ - java.lang.String getToken(); - /** - * string token = 6; - */ - com.google.protobuf.ByteString - getTokenBytes(); - - /** - * int32 limit = 7; - */ - int getLimit(); - } - /** - * Protobuf type {@code flyteidl.artifact.SearchArtifactsRequest} - */ - public static final class SearchArtifactsRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.SearchArtifactsRequest) - SearchArtifactsRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use SearchArtifactsRequest.newBuilder() to construct. - private SearchArtifactsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private SearchArtifactsRequest() { - principal_ = ""; - version_ = ""; - token_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SearchArtifactsRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.ArtifactId.ArtifactKey.Builder subBuilder = null; - if (artifactKey_ != null) { - subBuilder = artifactKey_.toBuilder(); - } - artifactKey_ = input.readMessage(flyteidl.core.ArtifactId.ArtifactKey.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(artifactKey_); - artifactKey_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - flyteidl.core.ArtifactId.Partitions.Builder subBuilder = null; - if (partitions_ != null) { - subBuilder = partitions_.toBuilder(); - } - partitions_ = input.readMessage(flyteidl.core.ArtifactId.Partitions.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(partitions_); - partitions_ = subBuilder.buildPartial(); - } - - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - - principal_ = s; - break; - } - case 34: { - java.lang.String s = input.readStringRequireUtf8(); - - version_ = s; - break; - } - case 42: { - flyteidl.artifact.Artifacts.SearchOptions.Builder subBuilder = null; - if (options_ != null) { - subBuilder = options_.toBuilder(); - } - options_ = input.readMessage(flyteidl.artifact.Artifacts.SearchOptions.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(options_); - options_ = subBuilder.buildPartial(); - } - - break; - } - case 50: { - java.lang.String s = input.readStringRequireUtf8(); - - token_ = s; - break; - } - case 56: { - - limit_ = input.readInt32(); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.SearchArtifactsRequest.class, flyteidl.artifact.Artifacts.SearchArtifactsRequest.Builder.class); - } - - public static final int ARTIFACT_KEY_FIELD_NUMBER = 1; - private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public boolean hasArtifactKey() { - return artifactKey_ != null; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { - return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { - return getArtifactKey(); - } - - public static final int PARTITIONS_FIELD_NUMBER = 2; - private flyteidl.core.ArtifactId.Partitions partitions_; - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public boolean hasPartitions() { - return partitions_ != null; - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public flyteidl.core.ArtifactId.Partitions getPartitions() { - return partitions_ == null ? flyteidl.core.ArtifactId.Partitions.getDefaultInstance() : partitions_; - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder() { - return getPartitions(); - } - - public static final int PRINCIPAL_FIELD_NUMBER = 3; - private volatile java.lang.Object principal_; - /** - * string principal = 3; - */ - public java.lang.String getPrincipal() { - java.lang.Object ref = principal_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - principal_ = s; - return s; - } - } - /** - * string principal = 3; - */ - public com.google.protobuf.ByteString - getPrincipalBytes() { - java.lang.Object ref = principal_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - principal_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int VERSION_FIELD_NUMBER = 4; - private volatile java.lang.Object version_; - /** - * string version = 4; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } - } - /** - * string version = 4; - */ - public com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int OPTIONS_FIELD_NUMBER = 5; - private flyteidl.artifact.Artifacts.SearchOptions options_; - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public boolean hasOptions() { - return options_ != null; - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public flyteidl.artifact.Artifacts.SearchOptions getOptions() { - return options_ == null ? flyteidl.artifact.Artifacts.SearchOptions.getDefaultInstance() : options_; - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public flyteidl.artifact.Artifacts.SearchOptionsOrBuilder getOptionsOrBuilder() { - return getOptions(); - } - - public static final int TOKEN_FIELD_NUMBER = 6; - private volatile java.lang.Object token_; - /** - * string token = 6; - */ - public java.lang.String getToken() { - java.lang.Object ref = token_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - token_ = s; - return s; - } - } - /** - * string token = 6; - */ - public com.google.protobuf.ByteString - getTokenBytes() { - java.lang.Object ref = token_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - token_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int LIMIT_FIELD_NUMBER = 7; - private int limit_; - /** - * int32 limit = 7; - */ - public int getLimit() { - return limit_; - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (artifactKey_ != null) { - output.writeMessage(1, getArtifactKey()); - } - if (partitions_ != null) { - output.writeMessage(2, getPartitions()); - } - if (!getPrincipalBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, principal_); - } - if (!getVersionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 4, version_); - } - if (options_ != null) { - output.writeMessage(5, getOptions()); - } - if (!getTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 6, token_); - } - if (limit_ != 0) { - output.writeInt32(7, limit_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (artifactKey_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getArtifactKey()); - } - if (partitions_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getPartitions()); - } - if (!getPrincipalBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, principal_); - } - if (!getVersionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, version_); - } - if (options_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, getOptions()); - } - if (!getTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, token_); - } - if (limit_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(7, limit_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.SearchArtifactsRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.SearchArtifactsRequest other = (flyteidl.artifact.Artifacts.SearchArtifactsRequest) obj; - - if (hasArtifactKey() != other.hasArtifactKey()) return false; - if (hasArtifactKey()) { - if (!getArtifactKey() - .equals(other.getArtifactKey())) return false; - } - if (hasPartitions() != other.hasPartitions()) return false; - if (hasPartitions()) { - if (!getPartitions() - .equals(other.getPartitions())) return false; - } - if (!getPrincipal() - .equals(other.getPrincipal())) return false; - if (!getVersion() - .equals(other.getVersion())) return false; - if (hasOptions() != other.hasOptions()) return false; - if (hasOptions()) { - if (!getOptions() - .equals(other.getOptions())) return false; - } - if (!getToken() - .equals(other.getToken())) return false; - if (getLimit() - != other.getLimit()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasArtifactKey()) { - hash = (37 * hash) + ARTIFACT_KEY_FIELD_NUMBER; - hash = (53 * hash) + getArtifactKey().hashCode(); - } - if (hasPartitions()) { - hash = (37 * hash) + PARTITIONS_FIELD_NUMBER; - hash = (53 * hash) + getPartitions().hashCode(); - } - hash = (37 * hash) + PRINCIPAL_FIELD_NUMBER; - hash = (53 * hash) + getPrincipal().hashCode(); - hash = (37 * hash) + VERSION_FIELD_NUMBER; - hash = (53 * hash) + getVersion().hashCode(); - if (hasOptions()) { - hash = (37 * hash) + OPTIONS_FIELD_NUMBER; - hash = (53 * hash) + getOptions().hashCode(); - } - hash = (37 * hash) + TOKEN_FIELD_NUMBER; - hash = (53 * hash) + getToken().hashCode(); - hash = (37 * hash) + LIMIT_FIELD_NUMBER; - hash = (53 * hash) + getLimit(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.SearchArtifactsRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.SearchArtifactsRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.SearchArtifactsRequest) - flyteidl.artifact.Artifacts.SearchArtifactsRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.SearchArtifactsRequest.class, flyteidl.artifact.Artifacts.SearchArtifactsRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.SearchArtifactsRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (artifactKeyBuilder_ == null) { - artifactKey_ = null; - } else { - artifactKey_ = null; - artifactKeyBuilder_ = null; - } - if (partitionsBuilder_ == null) { - partitions_ = null; - } else { - partitions_ = null; - partitionsBuilder_ = null; - } - principal_ = ""; - - version_ = ""; - - if (optionsBuilder_ == null) { - options_ = null; - } else { - options_ = null; - optionsBuilder_ = null; - } - token_ = ""; - - limit_ = 0; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.SearchArtifactsRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsRequest build() { - flyteidl.artifact.Artifacts.SearchArtifactsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsRequest buildPartial() { - flyteidl.artifact.Artifacts.SearchArtifactsRequest result = new flyteidl.artifact.Artifacts.SearchArtifactsRequest(this); - if (artifactKeyBuilder_ == null) { - result.artifactKey_ = artifactKey_; - } else { - result.artifactKey_ = artifactKeyBuilder_.build(); - } - if (partitionsBuilder_ == null) { - result.partitions_ = partitions_; - } else { - result.partitions_ = partitionsBuilder_.build(); - } - result.principal_ = principal_; - result.version_ = version_; - if (optionsBuilder_ == null) { - result.options_ = options_; - } else { - result.options_ = optionsBuilder_.build(); - } - result.token_ = token_; - result.limit_ = limit_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.SearchArtifactsRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.SearchArtifactsRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.SearchArtifactsRequest other) { - if (other == flyteidl.artifact.Artifacts.SearchArtifactsRequest.getDefaultInstance()) return this; - if (other.hasArtifactKey()) { - mergeArtifactKey(other.getArtifactKey()); - } - if (other.hasPartitions()) { - mergePartitions(other.getPartitions()); - } - if (!other.getPrincipal().isEmpty()) { - principal_ = other.principal_; - onChanged(); - } - if (!other.getVersion().isEmpty()) { - version_ = other.version_; - onChanged(); - } - if (other.hasOptions()) { - mergeOptions(other.getOptions()); - } - if (!other.getToken().isEmpty()) { - token_ = other.token_; - onChanged(); - } - if (other.getLimit() != 0) { - setLimit(other.getLimit()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.SearchArtifactsRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.SearchArtifactsRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> artifactKeyBuilder_; - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public boolean hasArtifactKey() { - return artifactKeyBuilder_ != null || artifactKey_ != null; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { - if (artifactKeyBuilder_ == null) { - return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } else { - return artifactKeyBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder setArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { - if (artifactKeyBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - artifactKey_ = value; - onChanged(); - } else { - artifactKeyBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder setArtifactKey( - flyteidl.core.ArtifactId.ArtifactKey.Builder builderForValue) { - if (artifactKeyBuilder_ == null) { - artifactKey_ = builderForValue.build(); - onChanged(); - } else { - artifactKeyBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder mergeArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { - if (artifactKeyBuilder_ == null) { - if (artifactKey_ != null) { - artifactKey_ = - flyteidl.core.ArtifactId.ArtifactKey.newBuilder(artifactKey_).mergeFrom(value).buildPartial(); - } else { - artifactKey_ = value; - } - onChanged(); - } else { - artifactKeyBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public Builder clearArtifactKey() { - if (artifactKeyBuilder_ == null) { - artifactKey_ = null; - onChanged(); - } else { - artifactKey_ = null; - artifactKeyBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey.Builder getArtifactKeyBuilder() { - - onChanged(); - return getArtifactKeyFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { - if (artifactKeyBuilder_ != null) { - return artifactKeyBuilder_.getMessageOrBuilder(); - } else { - return artifactKey_ == null ? - flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> - getArtifactKeyFieldBuilder() { - if (artifactKeyBuilder_ == null) { - artifactKeyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder>( - getArtifactKey(), - getParentForChildren(), - isClean()); - artifactKey_ = null; - } - return artifactKeyBuilder_; - } - - private flyteidl.core.ArtifactId.Partitions partitions_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder> partitionsBuilder_; - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public boolean hasPartitions() { - return partitionsBuilder_ != null || partitions_ != null; - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public flyteidl.core.ArtifactId.Partitions getPartitions() { - if (partitionsBuilder_ == null) { - return partitions_ == null ? flyteidl.core.ArtifactId.Partitions.getDefaultInstance() : partitions_; - } else { - return partitionsBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public Builder setPartitions(flyteidl.core.ArtifactId.Partitions value) { - if (partitionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - partitions_ = value; - onChanged(); - } else { - partitionsBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public Builder setPartitions( - flyteidl.core.ArtifactId.Partitions.Builder builderForValue) { - if (partitionsBuilder_ == null) { - partitions_ = builderForValue.build(); - onChanged(); - } else { - partitionsBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public Builder mergePartitions(flyteidl.core.ArtifactId.Partitions value) { - if (partitionsBuilder_ == null) { - if (partitions_ != null) { - partitions_ = - flyteidl.core.ArtifactId.Partitions.newBuilder(partitions_).mergeFrom(value).buildPartial(); - } else { - partitions_ = value; - } - onChanged(); - } else { - partitionsBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public Builder clearPartitions() { - if (partitionsBuilder_ == null) { - partitions_ = null; - onChanged(); - } else { - partitions_ = null; - partitionsBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public flyteidl.core.ArtifactId.Partitions.Builder getPartitionsBuilder() { - - onChanged(); - return getPartitionsFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - public flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder() { - if (partitionsBuilder_ != null) { - return partitionsBuilder_.getMessageOrBuilder(); - } else { - return partitions_ == null ? - flyteidl.core.ArtifactId.Partitions.getDefaultInstance() : partitions_; - } - } - /** - * .flyteidl.core.Partitions partitions = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder> - getPartitionsFieldBuilder() { - if (partitionsBuilder_ == null) { - partitionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder>( - getPartitions(), - getParentForChildren(), - isClean()); - partitions_ = null; - } - return partitionsBuilder_; - } - - private java.lang.Object principal_ = ""; - /** - * string principal = 3; - */ - public java.lang.String getPrincipal() { - java.lang.Object ref = principal_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - principal_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string principal = 3; - */ - public com.google.protobuf.ByteString - getPrincipalBytes() { - java.lang.Object ref = principal_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - principal_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string principal = 3; - */ - public Builder setPrincipal( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - principal_ = value; - onChanged(); - return this; - } - /** - * string principal = 3; - */ - public Builder clearPrincipal() { - - principal_ = getDefaultInstance().getPrincipal(); - onChanged(); - return this; - } - /** - * string principal = 3; - */ - public Builder setPrincipalBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - principal_ = value; - onChanged(); - return this; - } - - private java.lang.Object version_ = ""; - /** - * string version = 4; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string version = 4; - */ - public com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string version = 4; - */ - public Builder setVersion( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - version_ = value; - onChanged(); - return this; - } - /** - * string version = 4; - */ - public Builder clearVersion() { - - version_ = getDefaultInstance().getVersion(); - onChanged(); - return this; - } - /** - * string version = 4; - */ - public Builder setVersionBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - version_ = value; - onChanged(); - return this; - } - - private flyteidl.artifact.Artifacts.SearchOptions options_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.SearchOptions, flyteidl.artifact.Artifacts.SearchOptions.Builder, flyteidl.artifact.Artifacts.SearchOptionsOrBuilder> optionsBuilder_; - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public boolean hasOptions() { - return optionsBuilder_ != null || options_ != null; - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public flyteidl.artifact.Artifacts.SearchOptions getOptions() { - if (optionsBuilder_ == null) { - return options_ == null ? flyteidl.artifact.Artifacts.SearchOptions.getDefaultInstance() : options_; - } else { - return optionsBuilder_.getMessage(); - } - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public Builder setOptions(flyteidl.artifact.Artifacts.SearchOptions value) { - if (optionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - options_ = value; - onChanged(); - } else { - optionsBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public Builder setOptions( - flyteidl.artifact.Artifacts.SearchOptions.Builder builderForValue) { - if (optionsBuilder_ == null) { - options_ = builderForValue.build(); - onChanged(); - } else { - optionsBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public Builder mergeOptions(flyteidl.artifact.Artifacts.SearchOptions value) { - if (optionsBuilder_ == null) { - if (options_ != null) { - options_ = - flyteidl.artifact.Artifacts.SearchOptions.newBuilder(options_).mergeFrom(value).buildPartial(); - } else { - options_ = value; - } - onChanged(); - } else { - optionsBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public Builder clearOptions() { - if (optionsBuilder_ == null) { - options_ = null; - onChanged(); - } else { - options_ = null; - optionsBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public flyteidl.artifact.Artifacts.SearchOptions.Builder getOptionsBuilder() { - - onChanged(); - return getOptionsFieldBuilder().getBuilder(); - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - public flyteidl.artifact.Artifacts.SearchOptionsOrBuilder getOptionsOrBuilder() { - if (optionsBuilder_ != null) { - return optionsBuilder_.getMessageOrBuilder(); - } else { - return options_ == null ? - flyteidl.artifact.Artifacts.SearchOptions.getDefaultInstance() : options_; - } - } - /** - * .flyteidl.artifact.SearchOptions options = 5; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.SearchOptions, flyteidl.artifact.Artifacts.SearchOptions.Builder, flyteidl.artifact.Artifacts.SearchOptionsOrBuilder> - getOptionsFieldBuilder() { - if (optionsBuilder_ == null) { - optionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.artifact.Artifacts.SearchOptions, flyteidl.artifact.Artifacts.SearchOptions.Builder, flyteidl.artifact.Artifacts.SearchOptionsOrBuilder>( - getOptions(), - getParentForChildren(), - isClean()); - options_ = null; - } - return optionsBuilder_; - } - - private java.lang.Object token_ = ""; - /** - * string token = 6; - */ - public java.lang.String getToken() { - java.lang.Object ref = token_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - token_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string token = 6; - */ - public com.google.protobuf.ByteString - getTokenBytes() { - java.lang.Object ref = token_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - token_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string token = 6; - */ - public Builder setToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - token_ = value; - onChanged(); - return this; - } - /** - * string token = 6; - */ - public Builder clearToken() { - - token_ = getDefaultInstance().getToken(); - onChanged(); - return this; - } - /** - * string token = 6; - */ - public Builder setTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - token_ = value; - onChanged(); - return this; - } - - private int limit_ ; - /** - * int32 limit = 7; - */ - public int getLimit() { - return limit_; - } - /** - * int32 limit = 7; - */ - public Builder setLimit(int value) { - - limit_ = value; - onChanged(); - return this; - } - /** - * int32 limit = 7; - */ - public Builder clearLimit() { - - limit_ = 0; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.SearchArtifactsRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.SearchArtifactsRequest) - private static final flyteidl.artifact.Artifacts.SearchArtifactsRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.SearchArtifactsRequest(); - } - - public static flyteidl.artifact.Artifacts.SearchArtifactsRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public SearchArtifactsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SearchArtifactsRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface SearchArtifactsResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.SearchArtifactsResponse) - com.google.protobuf.MessageOrBuilder { - - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - java.util.List - getArtifactsList(); - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - flyteidl.artifact.Artifacts.Artifact getArtifacts(int index); - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - int getArtifactsCount(); - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - java.util.List - getArtifactsOrBuilderList(); - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactsOrBuilder( - int index); - - /** - *
-     * continuation token if relevant.
-     * 
- * - * string token = 2; - */ - java.lang.String getToken(); - /** - *
-     * continuation token if relevant.
-     * 
- * - * string token = 2; - */ - com.google.protobuf.ByteString - getTokenBytes(); - } - /** - * Protobuf type {@code flyteidl.artifact.SearchArtifactsResponse} - */ - public static final class SearchArtifactsResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.SearchArtifactsResponse) - SearchArtifactsResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use SearchArtifactsResponse.newBuilder() to construct. - private SearchArtifactsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private SearchArtifactsResponse() { - artifacts_ = java.util.Collections.emptyList(); - token_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private SearchArtifactsResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - artifacts_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - artifacts_.add( - input.readMessage(flyteidl.artifact.Artifacts.Artifact.parser(), extensionRegistry)); - break; - } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - token_ = s; - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - artifacts_ = java.util.Collections.unmodifiableList(artifacts_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.SearchArtifactsResponse.class, flyteidl.artifact.Artifacts.SearchArtifactsResponse.Builder.class); - } - - private int bitField0_; - public static final int ARTIFACTS_FIELD_NUMBER = 1; - private java.util.List artifacts_; - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public java.util.List getArtifactsList() { - return artifacts_; - } - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public java.util.List - getArtifactsOrBuilderList() { - return artifacts_; - } - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public int getArtifactsCount() { - return artifacts_.size(); - } - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public flyteidl.artifact.Artifacts.Artifact getArtifacts(int index) { - return artifacts_.get(index); - } - /** - *
-     * If artifact specs are not requested, the resultant artifacts may be empty.
-     * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactsOrBuilder( - int index) { - return artifacts_.get(index); - } - - public static final int TOKEN_FIELD_NUMBER = 2; - private volatile java.lang.Object token_; - /** - *
-     * continuation token if relevant.
-     * 
- * - * string token = 2; - */ - public java.lang.String getToken() { - java.lang.Object ref = token_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - token_ = s; - return s; - } - } - /** - *
-     * continuation token if relevant.
-     * 
- * - * string token = 2; - */ - public com.google.protobuf.ByteString - getTokenBytes() { - java.lang.Object ref = token_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - token_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < artifacts_.size(); i++) { - output.writeMessage(1, artifacts_.get(i)); - } - if (!getTokenBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, token_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < artifacts_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, artifacts_.get(i)); - } - if (!getTokenBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, token_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.SearchArtifactsResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.SearchArtifactsResponse other = (flyteidl.artifact.Artifacts.SearchArtifactsResponse) obj; - - if (!getArtifactsList() - .equals(other.getArtifactsList())) return false; - if (!getToken() - .equals(other.getToken())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (getArtifactsCount() > 0) { - hash = (37 * hash) + ARTIFACTS_FIELD_NUMBER; - hash = (53 * hash) + getArtifactsList().hashCode(); - } - hash = (37 * hash) + TOKEN_FIELD_NUMBER; - hash = (53 * hash) + getToken().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.SearchArtifactsResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.SearchArtifactsResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.SearchArtifactsResponse) - flyteidl.artifact.Artifacts.SearchArtifactsResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.SearchArtifactsResponse.class, flyteidl.artifact.Artifacts.SearchArtifactsResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.SearchArtifactsResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getArtifactsFieldBuilder(); - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (artifactsBuilder_ == null) { - artifacts_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - artifactsBuilder_.clear(); - } - token_ = ""; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_SearchArtifactsResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.SearchArtifactsResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsResponse build() { - flyteidl.artifact.Artifacts.SearchArtifactsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsResponse buildPartial() { - flyteidl.artifact.Artifacts.SearchArtifactsResponse result = new flyteidl.artifact.Artifacts.SearchArtifactsResponse(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (artifactsBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { - artifacts_ = java.util.Collections.unmodifiableList(artifacts_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.artifacts_ = artifacts_; - } else { - result.artifacts_ = artifactsBuilder_.build(); - } - result.token_ = token_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.SearchArtifactsResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.SearchArtifactsResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.SearchArtifactsResponse other) { - if (other == flyteidl.artifact.Artifacts.SearchArtifactsResponse.getDefaultInstance()) return this; - if (artifactsBuilder_ == null) { - if (!other.artifacts_.isEmpty()) { - if (artifacts_.isEmpty()) { - artifacts_ = other.artifacts_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureArtifactsIsMutable(); - artifacts_.addAll(other.artifacts_); - } - onChanged(); - } - } else { - if (!other.artifacts_.isEmpty()) { - if (artifactsBuilder_.isEmpty()) { - artifactsBuilder_.dispose(); - artifactsBuilder_ = null; - artifacts_ = other.artifacts_; - bitField0_ = (bitField0_ & ~0x00000001); - artifactsBuilder_ = - com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getArtifactsFieldBuilder() : null; - } else { - artifactsBuilder_.addAllMessages(other.artifacts_); - } - } - } - if (!other.getToken().isEmpty()) { - token_ = other.token_; - onChanged(); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.SearchArtifactsResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.SearchArtifactsResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List artifacts_ = - java.util.Collections.emptyList(); - private void ensureArtifactsIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { - artifacts_ = new java.util.ArrayList(artifacts_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder> artifactsBuilder_; - - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public java.util.List getArtifactsList() { - if (artifactsBuilder_ == null) { - return java.util.Collections.unmodifiableList(artifacts_); - } else { - return artifactsBuilder_.getMessageList(); - } - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public int getArtifactsCount() { - if (artifactsBuilder_ == null) { - return artifacts_.size(); - } else { - return artifactsBuilder_.getCount(); - } - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public flyteidl.artifact.Artifacts.Artifact getArtifacts(int index) { - if (artifactsBuilder_ == null) { - return artifacts_.get(index); - } else { - return artifactsBuilder_.getMessage(index); - } - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder setArtifacts( - int index, flyteidl.artifact.Artifacts.Artifact value) { - if (artifactsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureArtifactsIsMutable(); - artifacts_.set(index, value); - onChanged(); - } else { - artifactsBuilder_.setMessage(index, value); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder setArtifacts( - int index, flyteidl.artifact.Artifacts.Artifact.Builder builderForValue) { - if (artifactsBuilder_ == null) { - ensureArtifactsIsMutable(); - artifacts_.set(index, builderForValue.build()); - onChanged(); - } else { - artifactsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder addArtifacts(flyteidl.artifact.Artifacts.Artifact value) { - if (artifactsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureArtifactsIsMutable(); - artifacts_.add(value); - onChanged(); - } else { - artifactsBuilder_.addMessage(value); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder addArtifacts( - int index, flyteidl.artifact.Artifacts.Artifact value) { - if (artifactsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureArtifactsIsMutable(); - artifacts_.add(index, value); - onChanged(); - } else { - artifactsBuilder_.addMessage(index, value); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder addArtifacts( - flyteidl.artifact.Artifacts.Artifact.Builder builderForValue) { - if (artifactsBuilder_ == null) { - ensureArtifactsIsMutable(); - artifacts_.add(builderForValue.build()); - onChanged(); - } else { - artifactsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder addArtifacts( - int index, flyteidl.artifact.Artifacts.Artifact.Builder builderForValue) { - if (artifactsBuilder_ == null) { - ensureArtifactsIsMutable(); - artifacts_.add(index, builderForValue.build()); - onChanged(); - } else { - artifactsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder addAllArtifacts( - java.lang.Iterable values) { - if (artifactsBuilder_ == null) { - ensureArtifactsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, artifacts_); - onChanged(); - } else { - artifactsBuilder_.addAllMessages(values); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder clearArtifacts() { - if (artifactsBuilder_ == null) { - artifacts_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - artifactsBuilder_.clear(); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public Builder removeArtifacts(int index) { - if (artifactsBuilder_ == null) { - ensureArtifactsIsMutable(); - artifacts_.remove(index); - onChanged(); - } else { - artifactsBuilder_.remove(index); - } - return this; - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public flyteidl.artifact.Artifacts.Artifact.Builder getArtifactsBuilder( - int index) { - return getArtifactsFieldBuilder().getBuilder(index); - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactOrBuilder getArtifactsOrBuilder( - int index) { - if (artifactsBuilder_ == null) { - return artifacts_.get(index); } else { - return artifactsBuilder_.getMessageOrBuilder(index); - } - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public java.util.List - getArtifactsOrBuilderList() { - if (artifactsBuilder_ != null) { - return artifactsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(artifacts_); - } - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public flyteidl.artifact.Artifacts.Artifact.Builder addArtifactsBuilder() { - return getArtifactsFieldBuilder().addBuilder( - flyteidl.artifact.Artifacts.Artifact.getDefaultInstance()); - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public flyteidl.artifact.Artifacts.Artifact.Builder addArtifactsBuilder( - int index) { - return getArtifactsFieldBuilder().addBuilder( - index, flyteidl.artifact.Artifacts.Artifact.getDefaultInstance()); - } - /** - *
-       * If artifact specs are not requested, the resultant artifacts may be empty.
-       * 
- * - * repeated .flyteidl.artifact.Artifact artifacts = 1; - */ - public java.util.List - getArtifactsBuilderList() { - return getArtifactsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder> - getArtifactsFieldBuilder() { - if (artifactsBuilder_ == null) { - artifactsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.Artifact, flyteidl.artifact.Artifacts.Artifact.Builder, flyteidl.artifact.Artifacts.ArtifactOrBuilder>( - artifacts_, - ((bitField0_ & 0x00000001) != 0), - getParentForChildren(), - isClean()); - artifacts_ = null; - } - return artifactsBuilder_; - } - - private java.lang.Object token_ = ""; - /** - *
-       * continuation token if relevant.
-       * 
- * - * string token = 2; - */ - public java.lang.String getToken() { - java.lang.Object ref = token_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - token_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - *
-       * continuation token if relevant.
-       * 
- * - * string token = 2; - */ - public com.google.protobuf.ByteString - getTokenBytes() { - java.lang.Object ref = token_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - token_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - *
-       * continuation token if relevant.
-       * 
- * - * string token = 2; - */ - public Builder setToken( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - token_ = value; - onChanged(); - return this; - } - /** - *
-       * continuation token if relevant.
-       * 
- * - * string token = 2; - */ - public Builder clearToken() { - - token_ = getDefaultInstance().getToken(); - onChanged(); - return this; - } - /** - *
-       * continuation token if relevant.
-       * 
- * - * string token = 2; - */ - public Builder setTokenBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - token_ = value; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.SearchArtifactsResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.SearchArtifactsResponse) - private static final flyteidl.artifact.Artifacts.SearchArtifactsResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.SearchArtifactsResponse(); - } - - public static flyteidl.artifact.Artifacts.SearchArtifactsResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public SearchArtifactsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new SearchArtifactsResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.SearchArtifactsResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface FindByWorkflowExecRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.FindByWorkflowExecRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - boolean hasExecId(); - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getExecId(); - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getExecIdOrBuilder(); - - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - int getDirectionValue(); - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction getDirection(); - } - /** - * Protobuf type {@code flyteidl.artifact.FindByWorkflowExecRequest} - */ - public static final class FindByWorkflowExecRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.FindByWorkflowExecRequest) - FindByWorkflowExecRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use FindByWorkflowExecRequest.newBuilder() to construct. - private FindByWorkflowExecRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private FindByWorkflowExecRequest() { - direction_ = 0; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private FindByWorkflowExecRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder subBuilder = null; - if (execId_ != null) { - subBuilder = execId_.toBuilder(); - } - execId_ = input.readMessage(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(execId_); - execId_ = subBuilder.buildPartial(); - } - - break; - } - case 16: { - int rawValue = input.readEnum(); - - direction_ = rawValue; - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_FindByWorkflowExecRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_FindByWorkflowExecRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.class, flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Builder.class); - } - - /** - * Protobuf enum {@code flyteidl.artifact.FindByWorkflowExecRequest.Direction} - */ - public enum Direction - implements com.google.protobuf.ProtocolMessageEnum { - /** - * INPUTS = 0; - */ - INPUTS(0), - /** - * OUTPUTS = 1; - */ - OUTPUTS(1), - UNRECOGNIZED(-1), - ; - - /** - * INPUTS = 0; - */ - public static final int INPUTS_VALUE = 0; - /** - * OUTPUTS = 1; - */ - public static final int OUTPUTS_VALUE = 1; - - - public final int getNumber() { - if (this == UNRECOGNIZED) { - throw new java.lang.IllegalArgumentException( - "Can't get the number of an unknown enum value."); - } - return value; - } - - /** - * @deprecated Use {@link #forNumber(int)} instead. - */ - @java.lang.Deprecated - public static Direction valueOf(int value) { - return forNumber(value); - } - - public static Direction forNumber(int value) { - switch (value) { - case 0: return INPUTS; - case 1: return OUTPUTS; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static final com.google.protobuf.Internal.EnumLiteMap< - Direction> internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public Direction findValueByNumber(int number) { - return Direction.forNumber(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(ordinal()); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.getDescriptor().getEnumTypes().get(0); - } - - private static final Direction[] VALUES = values(); - - public static Direction valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - if (desc.getIndex() == -1) { - return UNRECOGNIZED; - } - return VALUES[desc.getIndex()]; - } - - private final int value; - - private Direction(int value) { - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:flyteidl.artifact.FindByWorkflowExecRequest.Direction) - } - - public static final int EXEC_ID_FIELD_NUMBER = 1; - private flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier execId_; - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public boolean hasExecId() { - return execId_ != null; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getExecId() { - return execId_ == null ? flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : execId_; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getExecIdOrBuilder() { - return getExecId(); - } - - public static final int DIRECTION_FIELD_NUMBER = 2; - private int direction_; - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - public int getDirectionValue() { - return direction_; - } - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - public flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction getDirection() { - @SuppressWarnings("deprecation") - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction result = flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction.valueOf(direction_); - return result == null ? flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction.UNRECOGNIZED : result; - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (execId_ != null) { - output.writeMessage(1, getExecId()); - } - if (direction_ != flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction.INPUTS.getNumber()) { - output.writeEnum(2, direction_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (execId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getExecId()); - } - if (direction_ != flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction.INPUTS.getNumber()) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, direction_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.FindByWorkflowExecRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest other = (flyteidl.artifact.Artifacts.FindByWorkflowExecRequest) obj; - - if (hasExecId() != other.hasExecId()) return false; - if (hasExecId()) { - if (!getExecId() - .equals(other.getExecId())) return false; - } - if (direction_ != other.direction_) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasExecId()) { - hash = (37 * hash) + EXEC_ID_FIELD_NUMBER; - hash = (53 * hash) + getExecId().hashCode(); - } - hash = (37 * hash) + DIRECTION_FIELD_NUMBER; - hash = (53 * hash) + direction_; - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.FindByWorkflowExecRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.FindByWorkflowExecRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.FindByWorkflowExecRequest) - flyteidl.artifact.Artifacts.FindByWorkflowExecRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_FindByWorkflowExecRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_FindByWorkflowExecRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.class, flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (execIdBuilder_ == null) { - execId_ = null; - } else { - execId_ = null; - execIdBuilder_ = null; - } - direction_ = 0; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_FindByWorkflowExecRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.FindByWorkflowExecRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.FindByWorkflowExecRequest build() { - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.FindByWorkflowExecRequest buildPartial() { - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest result = new flyteidl.artifact.Artifacts.FindByWorkflowExecRequest(this); - if (execIdBuilder_ == null) { - result.execId_ = execId_; - } else { - result.execId_ = execIdBuilder_.build(); - } - result.direction_ = direction_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.FindByWorkflowExecRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.FindByWorkflowExecRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.FindByWorkflowExecRequest other) { - if (other == flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.getDefaultInstance()) return this; - if (other.hasExecId()) { - mergeExecId(other.getExecId()); - } - if (other.direction_ != 0) { - setDirectionValue(other.getDirectionValue()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.FindByWorkflowExecRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier execId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> execIdBuilder_; - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public boolean hasExecId() { - return execIdBuilder_ != null || execId_ != null; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getExecId() { - if (execIdBuilder_ == null) { - return execId_ == null ? flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : execId_; - } else { - return execIdBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public Builder setExecId(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { - if (execIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - execId_ = value; - onChanged(); - } else { - execIdBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public Builder setExecId( - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder builderForValue) { - if (execIdBuilder_ == null) { - execId_ = builderForValue.build(); - onChanged(); - } else { - execIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public Builder mergeExecId(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { - if (execIdBuilder_ == null) { - if (execId_ != null) { - execId_ = - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.newBuilder(execId_).mergeFrom(value).buildPartial(); - } else { - execId_ = value; - } - onChanged(); - } else { - execIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public Builder clearExecId() { - if (execIdBuilder_ == null) { - execId_ = null; - onChanged(); - } else { - execId_ = null; - execIdBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder getExecIdBuilder() { - - onChanged(); - return getExecIdFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getExecIdOrBuilder() { - if (execIdBuilder_ != null) { - return execIdBuilder_.getMessageOrBuilder(); - } else { - return execId_ == null ? - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : execId_; - } - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier exec_id = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> - getExecIdFieldBuilder() { - if (execIdBuilder_ == null) { - execIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder>( - getExecId(), - getParentForChildren(), - isClean()); - execId_ = null; - } - return execIdBuilder_; - } - - private int direction_ = 0; - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - public int getDirectionValue() { - return direction_; - } - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - public Builder setDirectionValue(int value) { - direction_ = value; - onChanged(); - return this; - } - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - public flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction getDirection() { - @SuppressWarnings("deprecation") - flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction result = flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction.valueOf(direction_); - return result == null ? flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction.UNRECOGNIZED : result; - } - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - public Builder setDirection(flyteidl.artifact.Artifacts.FindByWorkflowExecRequest.Direction value) { - if (value == null) { - throw new NullPointerException(); - } - - direction_ = value.getNumber(); - onChanged(); - return this; - } - /** - * .flyteidl.artifact.FindByWorkflowExecRequest.Direction direction = 2; - */ - public Builder clearDirection() { - - direction_ = 0; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.FindByWorkflowExecRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.FindByWorkflowExecRequest) - private static final flyteidl.artifact.Artifacts.FindByWorkflowExecRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.FindByWorkflowExecRequest(); - } - - public static flyteidl.artifact.Artifacts.FindByWorkflowExecRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public FindByWorkflowExecRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new FindByWorkflowExecRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.FindByWorkflowExecRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface AddTagRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.AddTagRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - boolean hasArtifactId(); - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - flyteidl.core.ArtifactId.ArtifactID getArtifactId(); - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder(); - - /** - * string value = 2; - */ - java.lang.String getValue(); - /** - * string value = 2; - */ - com.google.protobuf.ByteString - getValueBytes(); - - /** - *
-     * If true, and another version already has the specified kind/value, set this version instead
-     * 
- * - * bool overwrite = 3; - */ - boolean getOverwrite(); - } - /** - *
-   * Aliases identify a particular version of an artifact. They are different than tags in that they
-   * have to be unique for a given artifact project/domain/name. That is, for a given project/domain/name/kind,
-   * at most one version can have any given value at any point.
-   * 
- * - * Protobuf type {@code flyteidl.artifact.AddTagRequest} - */ - public static final class AddTagRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.AddTagRequest) - AddTagRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use AddTagRequest.newBuilder() to construct. - private AddTagRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private AddTagRequest() { - value_ = ""; - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private AddTagRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.ArtifactId.ArtifactID.Builder subBuilder = null; - if (artifactId_ != null) { - subBuilder = artifactId_.toBuilder(); - } - artifactId_ = input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(artifactId_); - artifactId_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - value_ = s; - break; - } - case 24: { - - overwrite_ = input.readBool(); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.AddTagRequest.class, flyteidl.artifact.Artifacts.AddTagRequest.Builder.class); - } - - public static final int ARTIFACT_ID_FIELD_NUMBER = 1; - private flyteidl.core.ArtifactId.ArtifactID artifactId_; - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public boolean hasArtifactId() { - return artifactId_ != null; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { - return artifactId_ == null ? flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance() : artifactId_; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { - return getArtifactId(); - } - - public static final int VALUE_FIELD_NUMBER = 2; - private volatile java.lang.Object value_; - /** - * string value = 2; - */ - public java.lang.String getValue() { - java.lang.Object ref = value_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - value_ = s; - return s; - } - } - /** - * string value = 2; - */ - public com.google.protobuf.ByteString - getValueBytes() { - java.lang.Object ref = value_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - value_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int OVERWRITE_FIELD_NUMBER = 3; - private boolean overwrite_; - /** - *
-     * If true, and another version already has the specified kind/value, set this version instead
-     * 
- * - * bool overwrite = 3; - */ - public boolean getOverwrite() { - return overwrite_; - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (artifactId_ != null) { - output.writeMessage(1, getArtifactId()); - } - if (!getValueBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, value_); - } - if (overwrite_ != false) { - output.writeBool(3, overwrite_); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (artifactId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getArtifactId()); - } - if (!getValueBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, value_); - } - if (overwrite_ != false) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, overwrite_); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.AddTagRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.AddTagRequest other = (flyteidl.artifact.Artifacts.AddTagRequest) obj; - - if (hasArtifactId() != other.hasArtifactId()) return false; - if (hasArtifactId()) { - if (!getArtifactId() - .equals(other.getArtifactId())) return false; - } - if (!getValue() - .equals(other.getValue())) return false; - if (getOverwrite() - != other.getOverwrite()) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasArtifactId()) { - hash = (37 * hash) + ARTIFACT_ID_FIELD_NUMBER; - hash = (53 * hash) + getArtifactId().hashCode(); - } - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); - hash = (37 * hash) + OVERWRITE_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( - getOverwrite()); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.AddTagRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.AddTagRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - *
-     * Aliases identify a particular version of an artifact. They are different than tags in that they
-     * have to be unique for a given artifact project/domain/name. That is, for a given project/domain/name/kind,
-     * at most one version can have any given value at any point.
-     * 
- * - * Protobuf type {@code flyteidl.artifact.AddTagRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.AddTagRequest) - flyteidl.artifact.Artifacts.AddTagRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.AddTagRequest.class, flyteidl.artifact.Artifacts.AddTagRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.AddTagRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (artifactIdBuilder_ == null) { - artifactId_ = null; - } else { - artifactId_ = null; - artifactIdBuilder_ = null; - } - value_ = ""; - - overwrite_ = false; - - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.AddTagRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagRequest build() { - flyteidl.artifact.Artifacts.AddTagRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagRequest buildPartial() { - flyteidl.artifact.Artifacts.AddTagRequest result = new flyteidl.artifact.Artifacts.AddTagRequest(this); - if (artifactIdBuilder_ == null) { - result.artifactId_ = artifactId_; - } else { - result.artifactId_ = artifactIdBuilder_.build(); - } - result.value_ = value_; - result.overwrite_ = overwrite_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.AddTagRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.AddTagRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.AddTagRequest other) { - if (other == flyteidl.artifact.Artifacts.AddTagRequest.getDefaultInstance()) return this; - if (other.hasArtifactId()) { - mergeArtifactId(other.getArtifactId()); - } - if (!other.getValue().isEmpty()) { - value_ = other.value_; - onChanged(); - } - if (other.getOverwrite() != false) { - setOverwrite(other.getOverwrite()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.AddTagRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.AddTagRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.ArtifactId.ArtifactID artifactId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> artifactIdBuilder_; - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public boolean hasArtifactId() { - return artifactIdBuilder_ != null || artifactId_ != null; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { - if (artifactIdBuilder_ == null) { - return artifactId_ == null ? flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance() : artifactId_; - } else { - return artifactIdBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder setArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { - if (artifactIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - artifactId_ = value; - onChanged(); - } else { - artifactIdBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder setArtifactId( - flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (artifactIdBuilder_ == null) { - artifactId_ = builderForValue.build(); - onChanged(); - } else { - artifactIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder mergeArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { - if (artifactIdBuilder_ == null) { - if (artifactId_ != null) { - artifactId_ = - flyteidl.core.ArtifactId.ArtifactID.newBuilder(artifactId_).mergeFrom(value).buildPartial(); - } else { - artifactId_ = value; - } - onChanged(); - } else { - artifactIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public Builder clearArtifactId() { - if (artifactIdBuilder_ == null) { - artifactId_ = null; - onChanged(); - } else { - artifactId_ = null; - artifactIdBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdBuilder() { - - onChanged(); - return getArtifactIdFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { - if (artifactIdBuilder_ != null) { - return artifactIdBuilder_.getMessageOrBuilder(); - } else { - return artifactId_ == null ? - flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance() : artifactId_; - } - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> - getArtifactIdFieldBuilder() { - if (artifactIdBuilder_ == null) { - artifactIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( - getArtifactId(), - getParentForChildren(), - isClean()); - artifactId_ = null; - } - return artifactIdBuilder_; - } - - private java.lang.Object value_ = ""; - /** - * string value = 2; - */ - public java.lang.String getValue() { - java.lang.Object ref = value_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - value_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string value = 2; - */ - public com.google.protobuf.ByteString - getValueBytes() { - java.lang.Object ref = value_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - value_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string value = 2; - */ - public Builder setValue( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - value_ = value; - onChanged(); - return this; - } - /** - * string value = 2; - */ - public Builder clearValue() { - - value_ = getDefaultInstance().getValue(); - onChanged(); - return this; - } - /** - * string value = 2; - */ - public Builder setValueBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - - value_ = value; - onChanged(); - return this; - } - - private boolean overwrite_ ; - /** - *
-       * If true, and another version already has the specified kind/value, set this version instead
-       * 
- * - * bool overwrite = 3; - */ - public boolean getOverwrite() { - return overwrite_; - } - /** - *
-       * If true, and another version already has the specified kind/value, set this version instead
-       * 
- * - * bool overwrite = 3; - */ - public Builder setOverwrite(boolean value) { - - overwrite_ = value; - onChanged(); - return this; - } - /** - *
-       * If true, and another version already has the specified kind/value, set this version instead
-       * 
- * - * bool overwrite = 3; - */ - public Builder clearOverwrite() { - - overwrite_ = false; - onChanged(); - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.AddTagRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.AddTagRequest) - private static final flyteidl.artifact.Artifacts.AddTagRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.AddTagRequest(); - } - - public static flyteidl.artifact.Artifacts.AddTagRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public AddTagRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AddTagRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface AddTagResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.AddTagResponse) - com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code flyteidl.artifact.AddTagResponse} - */ - public static final class AddTagResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.AddTagResponse) - AddTagResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use AddTagResponse.newBuilder() to construct. - private AddTagResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private AddTagResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private AddTagResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.AddTagResponse.class, flyteidl.artifact.Artifacts.AddTagResponse.Builder.class); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.AddTagResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.AddTagResponse other = (flyteidl.artifact.Artifacts.AddTagResponse) obj; - - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.AddTagResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.AddTagResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.AddTagResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.AddTagResponse) - flyteidl.artifact.Artifacts.AddTagResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.AddTagResponse.class, flyteidl.artifact.Artifacts.AddTagResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.AddTagResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_AddTagResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.AddTagResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagResponse build() { - flyteidl.artifact.Artifacts.AddTagResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagResponse buildPartial() { - flyteidl.artifact.Artifacts.AddTagResponse result = new flyteidl.artifact.Artifacts.AddTagResponse(this); - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.AddTagResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.AddTagResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.AddTagResponse other) { - if (other == flyteidl.artifact.Artifacts.AddTagResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.AddTagResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.AddTagResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.AddTagResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.AddTagResponse) - private static final flyteidl.artifact.Artifacts.AddTagResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.AddTagResponse(); - } - - public static flyteidl.artifact.Artifacts.AddTagResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public AddTagResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new AddTagResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.AddTagResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface CreateTriggerRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.CreateTriggerRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - boolean hasTriggerLaunchPlan(); - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan getTriggerLaunchPlan(); - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - flyteidl.admin.LaunchPlanOuterClass.LaunchPlanOrBuilder getTriggerLaunchPlanOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.CreateTriggerRequest} - */ - public static final class CreateTriggerRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.CreateTriggerRequest) - CreateTriggerRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use CreateTriggerRequest.newBuilder() to construct. - private CreateTriggerRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private CreateTriggerRequest() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CreateTriggerRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.Builder subBuilder = null; - if (triggerLaunchPlan_ != null) { - subBuilder = triggerLaunchPlan_.toBuilder(); - } - triggerLaunchPlan_ = input.readMessage(flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(triggerLaunchPlan_); - triggerLaunchPlan_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateTriggerRequest.class, flyteidl.artifact.Artifacts.CreateTriggerRequest.Builder.class); - } - - public static final int TRIGGER_LAUNCH_PLAN_FIELD_NUMBER = 1; - private flyteidl.admin.LaunchPlanOuterClass.LaunchPlan triggerLaunchPlan_; - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public boolean hasTriggerLaunchPlan() { - return triggerLaunchPlan_ != null; - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public flyteidl.admin.LaunchPlanOuterClass.LaunchPlan getTriggerLaunchPlan() { - return triggerLaunchPlan_ == null ? flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.getDefaultInstance() : triggerLaunchPlan_; - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public flyteidl.admin.LaunchPlanOuterClass.LaunchPlanOrBuilder getTriggerLaunchPlanOrBuilder() { - return getTriggerLaunchPlan(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (triggerLaunchPlan_ != null) { - output.writeMessage(1, getTriggerLaunchPlan()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (triggerLaunchPlan_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getTriggerLaunchPlan()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.CreateTriggerRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.CreateTriggerRequest other = (flyteidl.artifact.Artifacts.CreateTriggerRequest) obj; - - if (hasTriggerLaunchPlan() != other.hasTriggerLaunchPlan()) return false; - if (hasTriggerLaunchPlan()) { - if (!getTriggerLaunchPlan() - .equals(other.getTriggerLaunchPlan())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasTriggerLaunchPlan()) { - hash = (37 * hash) + TRIGGER_LAUNCH_PLAN_FIELD_NUMBER; - hash = (53 * hash) + getTriggerLaunchPlan().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateTriggerRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.CreateTriggerRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.CreateTriggerRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.CreateTriggerRequest) - flyteidl.artifact.Artifacts.CreateTriggerRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateTriggerRequest.class, flyteidl.artifact.Artifacts.CreateTriggerRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.CreateTriggerRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (triggerLaunchPlanBuilder_ == null) { - triggerLaunchPlan_ = null; - } else { - triggerLaunchPlan_ = null; - triggerLaunchPlanBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.CreateTriggerRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerRequest build() { - flyteidl.artifact.Artifacts.CreateTriggerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerRequest buildPartial() { - flyteidl.artifact.Artifacts.CreateTriggerRequest result = new flyteidl.artifact.Artifacts.CreateTriggerRequest(this); - if (triggerLaunchPlanBuilder_ == null) { - result.triggerLaunchPlan_ = triggerLaunchPlan_; - } else { - result.triggerLaunchPlan_ = triggerLaunchPlanBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.CreateTriggerRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.CreateTriggerRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.CreateTriggerRequest other) { - if (other == flyteidl.artifact.Artifacts.CreateTriggerRequest.getDefaultInstance()) return this; - if (other.hasTriggerLaunchPlan()) { - mergeTriggerLaunchPlan(other.getTriggerLaunchPlan()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.CreateTriggerRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.CreateTriggerRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.admin.LaunchPlanOuterClass.LaunchPlan triggerLaunchPlan_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan, flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.Builder, flyteidl.admin.LaunchPlanOuterClass.LaunchPlanOrBuilder> triggerLaunchPlanBuilder_; - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public boolean hasTriggerLaunchPlan() { - return triggerLaunchPlanBuilder_ != null || triggerLaunchPlan_ != null; - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public flyteidl.admin.LaunchPlanOuterClass.LaunchPlan getTriggerLaunchPlan() { - if (triggerLaunchPlanBuilder_ == null) { - return triggerLaunchPlan_ == null ? flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.getDefaultInstance() : triggerLaunchPlan_; - } else { - return triggerLaunchPlanBuilder_.getMessage(); - } - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public Builder setTriggerLaunchPlan(flyteidl.admin.LaunchPlanOuterClass.LaunchPlan value) { - if (triggerLaunchPlanBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - triggerLaunchPlan_ = value; - onChanged(); - } else { - triggerLaunchPlanBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public Builder setTriggerLaunchPlan( - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.Builder builderForValue) { - if (triggerLaunchPlanBuilder_ == null) { - triggerLaunchPlan_ = builderForValue.build(); - onChanged(); - } else { - triggerLaunchPlanBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public Builder mergeTriggerLaunchPlan(flyteidl.admin.LaunchPlanOuterClass.LaunchPlan value) { - if (triggerLaunchPlanBuilder_ == null) { - if (triggerLaunchPlan_ != null) { - triggerLaunchPlan_ = - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.newBuilder(triggerLaunchPlan_).mergeFrom(value).buildPartial(); - } else { - triggerLaunchPlan_ = value; - } - onChanged(); - } else { - triggerLaunchPlanBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public Builder clearTriggerLaunchPlan() { - if (triggerLaunchPlanBuilder_ == null) { - triggerLaunchPlan_ = null; - onChanged(); - } else { - triggerLaunchPlan_ = null; - triggerLaunchPlanBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.Builder getTriggerLaunchPlanBuilder() { - - onChanged(); - return getTriggerLaunchPlanFieldBuilder().getBuilder(); - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - public flyteidl.admin.LaunchPlanOuterClass.LaunchPlanOrBuilder getTriggerLaunchPlanOrBuilder() { - if (triggerLaunchPlanBuilder_ != null) { - return triggerLaunchPlanBuilder_.getMessageOrBuilder(); - } else { - return triggerLaunchPlan_ == null ? - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.getDefaultInstance() : triggerLaunchPlan_; - } - } - /** - * .flyteidl.admin.LaunchPlan trigger_launch_plan = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan, flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.Builder, flyteidl.admin.LaunchPlanOuterClass.LaunchPlanOrBuilder> - getTriggerLaunchPlanFieldBuilder() { - if (triggerLaunchPlanBuilder_ == null) { - triggerLaunchPlanBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.admin.LaunchPlanOuterClass.LaunchPlan, flyteidl.admin.LaunchPlanOuterClass.LaunchPlan.Builder, flyteidl.admin.LaunchPlanOuterClass.LaunchPlanOrBuilder>( - getTriggerLaunchPlan(), - getParentForChildren(), - isClean()); - triggerLaunchPlan_ = null; - } - return triggerLaunchPlanBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.CreateTriggerRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateTriggerRequest) - private static final flyteidl.artifact.Artifacts.CreateTriggerRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.CreateTriggerRequest(); - } - - public static flyteidl.artifact.Artifacts.CreateTriggerRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public CreateTriggerRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateTriggerRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface CreateTriggerResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.CreateTriggerResponse) - com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code flyteidl.artifact.CreateTriggerResponse} - */ - public static final class CreateTriggerResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.CreateTriggerResponse) - CreateTriggerResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use CreateTriggerResponse.newBuilder() to construct. - private CreateTriggerResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private CreateTriggerResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private CreateTriggerResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateTriggerResponse.class, flyteidl.artifact.Artifacts.CreateTriggerResponse.Builder.class); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.CreateTriggerResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.CreateTriggerResponse other = (flyteidl.artifact.Artifacts.CreateTriggerResponse) obj; - - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.CreateTriggerResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.CreateTriggerResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.CreateTriggerResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.CreateTriggerResponse) - flyteidl.artifact.Artifacts.CreateTriggerResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.CreateTriggerResponse.class, flyteidl.artifact.Artifacts.CreateTriggerResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.CreateTriggerResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_CreateTriggerResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.CreateTriggerResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerResponse build() { - flyteidl.artifact.Artifacts.CreateTriggerResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerResponse buildPartial() { - flyteidl.artifact.Artifacts.CreateTriggerResponse result = new flyteidl.artifact.Artifacts.CreateTriggerResponse(this); - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.CreateTriggerResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.CreateTriggerResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.CreateTriggerResponse other) { - if (other == flyteidl.artifact.Artifacts.CreateTriggerResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.CreateTriggerResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.CreateTriggerResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.CreateTriggerResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.CreateTriggerResponse) - private static final flyteidl.artifact.Artifacts.CreateTriggerResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.CreateTriggerResponse(); - } - - public static flyteidl.artifact.Artifacts.CreateTriggerResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public CreateTriggerResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new CreateTriggerResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.CreateTriggerResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface DeleteTriggerRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.DeleteTriggerRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - boolean hasTriggerId(); - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - flyteidl.core.IdentifierOuterClass.Identifier getTriggerId(); - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTriggerIdOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.DeleteTriggerRequest} - */ - public static final class DeleteTriggerRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.DeleteTriggerRequest) - DeleteTriggerRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use DeleteTriggerRequest.newBuilder() to construct. - private DeleteTriggerRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private DeleteTriggerRequest() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DeleteTriggerRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.IdentifierOuterClass.Identifier.Builder subBuilder = null; - if (triggerId_ != null) { - subBuilder = triggerId_.toBuilder(); - } - triggerId_ = input.readMessage(flyteidl.core.IdentifierOuterClass.Identifier.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(triggerId_); - triggerId_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.DeleteTriggerRequest.class, flyteidl.artifact.Artifacts.DeleteTriggerRequest.Builder.class); - } - - public static final int TRIGGER_ID_FIELD_NUMBER = 1; - private flyteidl.core.IdentifierOuterClass.Identifier triggerId_; - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public boolean hasTriggerId() { - return triggerId_ != null; - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getTriggerId() { - return triggerId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : triggerId_; - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTriggerIdOrBuilder() { - return getTriggerId(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (triggerId_ != null) { - output.writeMessage(1, getTriggerId()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (triggerId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getTriggerId()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.DeleteTriggerRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.DeleteTriggerRequest other = (flyteidl.artifact.Artifacts.DeleteTriggerRequest) obj; - - if (hasTriggerId() != other.hasTriggerId()) return false; - if (hasTriggerId()) { - if (!getTriggerId() - .equals(other.getTriggerId())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasTriggerId()) { - hash = (37 * hash) + TRIGGER_ID_FIELD_NUMBER; - hash = (53 * hash) + getTriggerId().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.DeleteTriggerRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.DeleteTriggerRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.DeleteTriggerRequest) - flyteidl.artifact.Artifacts.DeleteTriggerRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.DeleteTriggerRequest.class, flyteidl.artifact.Artifacts.DeleteTriggerRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.DeleteTriggerRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (triggerIdBuilder_ == null) { - triggerId_ = null; - } else { - triggerId_ = null; - triggerIdBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.DeleteTriggerRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerRequest build() { - flyteidl.artifact.Artifacts.DeleteTriggerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerRequest buildPartial() { - flyteidl.artifact.Artifacts.DeleteTriggerRequest result = new flyteidl.artifact.Artifacts.DeleteTriggerRequest(this); - if (triggerIdBuilder_ == null) { - result.triggerId_ = triggerId_; - } else { - result.triggerId_ = triggerIdBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.DeleteTriggerRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.DeleteTriggerRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.DeleteTriggerRequest other) { - if (other == flyteidl.artifact.Artifacts.DeleteTriggerRequest.getDefaultInstance()) return this; - if (other.hasTriggerId()) { - mergeTriggerId(other.getTriggerId()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.DeleteTriggerRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.DeleteTriggerRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.IdentifierOuterClass.Identifier triggerId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> triggerIdBuilder_; - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public boolean hasTriggerId() { - return triggerIdBuilder_ != null || triggerId_ != null; - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getTriggerId() { - if (triggerIdBuilder_ == null) { - return triggerId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : triggerId_; - } else { - return triggerIdBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public Builder setTriggerId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (triggerIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - triggerId_ = value; - onChanged(); - } else { - triggerIdBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public Builder setTriggerId( - flyteidl.core.IdentifierOuterClass.Identifier.Builder builderForValue) { - if (triggerIdBuilder_ == null) { - triggerId_ = builderForValue.build(); - onChanged(); - } else { - triggerIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public Builder mergeTriggerId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (triggerIdBuilder_ == null) { - if (triggerId_ != null) { - triggerId_ = - flyteidl.core.IdentifierOuterClass.Identifier.newBuilder(triggerId_).mergeFrom(value).buildPartial(); - } else { - triggerId_ = value; - } - onChanged(); - } else { - triggerIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public Builder clearTriggerId() { - if (triggerIdBuilder_ == null) { - triggerId_ = null; - onChanged(); - } else { - triggerId_ = null; - triggerIdBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier.Builder getTriggerIdBuilder() { - - onChanged(); - return getTriggerIdFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTriggerIdOrBuilder() { - if (triggerIdBuilder_ != null) { - return triggerIdBuilder_.getMessageOrBuilder(); - } else { - return triggerId_ == null ? - flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : triggerId_; - } - } - /** - * .flyteidl.core.Identifier trigger_id = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> - getTriggerIdFieldBuilder() { - if (triggerIdBuilder_ == null) { - triggerIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder>( - getTriggerId(), - getParentForChildren(), - isClean()); - triggerId_ = null; - } - return triggerIdBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.DeleteTriggerRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.DeleteTriggerRequest) - private static final flyteidl.artifact.Artifacts.DeleteTriggerRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.DeleteTriggerRequest(); - } - - public static flyteidl.artifact.Artifacts.DeleteTriggerRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public DeleteTriggerRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteTriggerRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface DeleteTriggerResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.DeleteTriggerResponse) - com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code flyteidl.artifact.DeleteTriggerResponse} - */ - public static final class DeleteTriggerResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.DeleteTriggerResponse) - DeleteTriggerResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use DeleteTriggerResponse.newBuilder() to construct. - private DeleteTriggerResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private DeleteTriggerResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DeleteTriggerResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.DeleteTriggerResponse.class, flyteidl.artifact.Artifacts.DeleteTriggerResponse.Builder.class); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.DeleteTriggerResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.DeleteTriggerResponse other = (flyteidl.artifact.Artifacts.DeleteTriggerResponse) obj; - - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.DeleteTriggerResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.DeleteTriggerResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.DeleteTriggerResponse) - flyteidl.artifact.Artifacts.DeleteTriggerResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.DeleteTriggerResponse.class, flyteidl.artifact.Artifacts.DeleteTriggerResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.DeleteTriggerResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_DeleteTriggerResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.DeleteTriggerResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerResponse build() { - flyteidl.artifact.Artifacts.DeleteTriggerResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerResponse buildPartial() { - flyteidl.artifact.Artifacts.DeleteTriggerResponse result = new flyteidl.artifact.Artifacts.DeleteTriggerResponse(this); - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.DeleteTriggerResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.DeleteTriggerResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.DeleteTriggerResponse other) { - if (other == flyteidl.artifact.Artifacts.DeleteTriggerResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.DeleteTriggerResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.DeleteTriggerResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.DeleteTriggerResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.DeleteTriggerResponse) - private static final flyteidl.artifact.Artifacts.DeleteTriggerResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.DeleteTriggerResponse(); - } - - public static flyteidl.artifact.Artifacts.DeleteTriggerResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public DeleteTriggerResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeleteTriggerResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.DeleteTriggerResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface ArtifactProducerOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.ArtifactProducer) - com.google.protobuf.MessageOrBuilder { - - /** - *
-     * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-     * Admin's domain.
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - boolean hasEntityId(); - /** - *
-     * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-     * Admin's domain.
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - flyteidl.core.IdentifierOuterClass.Identifier getEntityId(); - /** - *
-     * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-     * Admin's domain.
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getEntityIdOrBuilder(); - - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - boolean hasOutputs(); - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - flyteidl.core.Interface.VariableMap getOutputs(); - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - flyteidl.core.Interface.VariableMapOrBuilder getOutputsOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactProducer} - */ - public static final class ArtifactProducer extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.ArtifactProducer) - ArtifactProducerOrBuilder { - private static final long serialVersionUID = 0L; - // Use ArtifactProducer.newBuilder() to construct. - private ArtifactProducer(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ArtifactProducer() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ArtifactProducer( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.IdentifierOuterClass.Identifier.Builder subBuilder = null; - if (entityId_ != null) { - subBuilder = entityId_.toBuilder(); - } - entityId_ = input.readMessage(flyteidl.core.IdentifierOuterClass.Identifier.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(entityId_); - entityId_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - flyteidl.core.Interface.VariableMap.Builder subBuilder = null; - if (outputs_ != null) { - subBuilder = outputs_.toBuilder(); - } - outputs_ = input.readMessage(flyteidl.core.Interface.VariableMap.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(outputs_); - outputs_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactProducer_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactProducer_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactProducer.class, flyteidl.artifact.Artifacts.ArtifactProducer.Builder.class); - } - - public static final int ENTITY_ID_FIELD_NUMBER = 1; - private flyteidl.core.IdentifierOuterClass.Identifier entityId_; - /** - *
-     * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-     * Admin's domain.
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public boolean hasEntityId() { - return entityId_ != null; - } - /** - *
-     * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-     * Admin's domain.
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getEntityId() { - return entityId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : entityId_; - } - /** - *
-     * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-     * Admin's domain.
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getEntityIdOrBuilder() { - return getEntityId(); - } - - public static final int OUTPUTS_FIELD_NUMBER = 2; - private flyteidl.core.Interface.VariableMap outputs_; - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public boolean hasOutputs() { - return outputs_ != null; - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public flyteidl.core.Interface.VariableMap getOutputs() { - return outputs_ == null ? flyteidl.core.Interface.VariableMap.getDefaultInstance() : outputs_; - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public flyteidl.core.Interface.VariableMapOrBuilder getOutputsOrBuilder() { - return getOutputs(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (entityId_ != null) { - output.writeMessage(1, getEntityId()); - } - if (outputs_ != null) { - output.writeMessage(2, getOutputs()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (entityId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getEntityId()); - } - if (outputs_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getOutputs()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.ArtifactProducer)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.ArtifactProducer other = (flyteidl.artifact.Artifacts.ArtifactProducer) obj; - - if (hasEntityId() != other.hasEntityId()) return false; - if (hasEntityId()) { - if (!getEntityId() - .equals(other.getEntityId())) return false; - } - if (hasOutputs() != other.hasOutputs()) return false; - if (hasOutputs()) { - if (!getOutputs() - .equals(other.getOutputs())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasEntityId()) { - hash = (37 * hash) + ENTITY_ID_FIELD_NUMBER; - hash = (53 * hash) + getEntityId().hashCode(); - } - if (hasOutputs()) { - hash = (37 * hash) + OUTPUTS_FIELD_NUMBER; - hash = (53 * hash) + getOutputs().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactProducer parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.ArtifactProducer prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactProducer} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.ArtifactProducer) - flyteidl.artifact.Artifacts.ArtifactProducerOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactProducer_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactProducer_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactProducer.class, flyteidl.artifact.Artifacts.ArtifactProducer.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.ArtifactProducer.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (entityIdBuilder_ == null) { - entityId_ = null; - } else { - entityId_ = null; - entityIdBuilder_ = null; - } - if (outputsBuilder_ == null) { - outputs_ = null; - } else { - outputs_ = null; - outputsBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactProducer_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactProducer getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.ArtifactProducer.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactProducer build() { - flyteidl.artifact.Artifacts.ArtifactProducer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactProducer buildPartial() { - flyteidl.artifact.Artifacts.ArtifactProducer result = new flyteidl.artifact.Artifacts.ArtifactProducer(this); - if (entityIdBuilder_ == null) { - result.entityId_ = entityId_; - } else { - result.entityId_ = entityIdBuilder_.build(); - } - if (outputsBuilder_ == null) { - result.outputs_ = outputs_; - } else { - result.outputs_ = outputsBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.ArtifactProducer) { - return mergeFrom((flyteidl.artifact.Artifacts.ArtifactProducer)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.ArtifactProducer other) { - if (other == flyteidl.artifact.Artifacts.ArtifactProducer.getDefaultInstance()) return this; - if (other.hasEntityId()) { - mergeEntityId(other.getEntityId()); - } - if (other.hasOutputs()) { - mergeOutputs(other.getOutputs()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.ArtifactProducer parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.ArtifactProducer) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.IdentifierOuterClass.Identifier entityId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> entityIdBuilder_; - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public boolean hasEntityId() { - return entityIdBuilder_ != null || entityId_ != null; - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getEntityId() { - if (entityIdBuilder_ == null) { - return entityId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : entityId_; - } else { - return entityIdBuilder_.getMessage(); - } - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder setEntityId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (entityIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - entityId_ = value; - onChanged(); - } else { - entityIdBuilder_.setMessage(value); - } - - return this; - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder setEntityId( - flyteidl.core.IdentifierOuterClass.Identifier.Builder builderForValue) { - if (entityIdBuilder_ == null) { - entityId_ = builderForValue.build(); - onChanged(); - } else { - entityIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder mergeEntityId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (entityIdBuilder_ == null) { - if (entityId_ != null) { - entityId_ = - flyteidl.core.IdentifierOuterClass.Identifier.newBuilder(entityId_).mergeFrom(value).buildPartial(); - } else { - entityId_ = value; - } - onChanged(); - } else { - entityIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder clearEntityId() { - if (entityIdBuilder_ == null) { - entityId_ = null; - onChanged(); - } else { - entityId_ = null; - entityIdBuilder_ = null; - } - - return this; - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier.Builder getEntityIdBuilder() { - - onChanged(); - return getEntityIdFieldBuilder().getBuilder(); - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getEntityIdOrBuilder() { - if (entityIdBuilder_ != null) { - return entityIdBuilder_.getMessageOrBuilder(); - } else { - return entityId_ == null ? - flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : entityId_; - } - } - /** - *
-       * These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in
-       * Admin's domain.
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> - getEntityIdFieldBuilder() { - if (entityIdBuilder_ == null) { - entityIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder>( - getEntityId(), - getParentForChildren(), - isClean()); - entityId_ = null; - } - return entityIdBuilder_; - } - - private flyteidl.core.Interface.VariableMap outputs_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Interface.VariableMap, flyteidl.core.Interface.VariableMap.Builder, flyteidl.core.Interface.VariableMapOrBuilder> outputsBuilder_; - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public boolean hasOutputs() { - return outputsBuilder_ != null || outputs_ != null; - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public flyteidl.core.Interface.VariableMap getOutputs() { - if (outputsBuilder_ == null) { - return outputs_ == null ? flyteidl.core.Interface.VariableMap.getDefaultInstance() : outputs_; - } else { - return outputsBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public Builder setOutputs(flyteidl.core.Interface.VariableMap value) { - if (outputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - outputs_ = value; - onChanged(); - } else { - outputsBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public Builder setOutputs( - flyteidl.core.Interface.VariableMap.Builder builderForValue) { - if (outputsBuilder_ == null) { - outputs_ = builderForValue.build(); - onChanged(); - } else { - outputsBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public Builder mergeOutputs(flyteidl.core.Interface.VariableMap value) { - if (outputsBuilder_ == null) { - if (outputs_ != null) { - outputs_ = - flyteidl.core.Interface.VariableMap.newBuilder(outputs_).mergeFrom(value).buildPartial(); - } else { - outputs_ = value; - } - onChanged(); - } else { - outputsBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public Builder clearOutputs() { - if (outputsBuilder_ == null) { - outputs_ = null; - onChanged(); - } else { - outputs_ = null; - outputsBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public flyteidl.core.Interface.VariableMap.Builder getOutputsBuilder() { - - onChanged(); - return getOutputsFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - public flyteidl.core.Interface.VariableMapOrBuilder getOutputsOrBuilder() { - if (outputsBuilder_ != null) { - return outputsBuilder_.getMessageOrBuilder(); - } else { - return outputs_ == null ? - flyteidl.core.Interface.VariableMap.getDefaultInstance() : outputs_; - } - } - /** - * .flyteidl.core.VariableMap outputs = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Interface.VariableMap, flyteidl.core.Interface.VariableMap.Builder, flyteidl.core.Interface.VariableMapOrBuilder> - getOutputsFieldBuilder() { - if (outputsBuilder_ == null) { - outputsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Interface.VariableMap, flyteidl.core.Interface.VariableMap.Builder, flyteidl.core.Interface.VariableMapOrBuilder>( - getOutputs(), - getParentForChildren(), - isClean()); - outputs_ = null; - } - return outputsBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.ArtifactProducer) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactProducer) - private static final flyteidl.artifact.Artifacts.ArtifactProducer DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.ArtifactProducer(); - } - - public static flyteidl.artifact.Artifacts.ArtifactProducer getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public ArtifactProducer parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ArtifactProducer(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactProducer getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface RegisterProducerRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.RegisterProducerRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - java.util.List - getProducersList(); - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - flyteidl.artifact.Artifacts.ArtifactProducer getProducers(int index); - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - int getProducersCount(); - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - java.util.List - getProducersOrBuilderList(); - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - flyteidl.artifact.Artifacts.ArtifactProducerOrBuilder getProducersOrBuilder( - int index); - } - /** - * Protobuf type {@code flyteidl.artifact.RegisterProducerRequest} - */ - public static final class RegisterProducerRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.RegisterProducerRequest) - RegisterProducerRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use RegisterProducerRequest.newBuilder() to construct. - private RegisterProducerRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private RegisterProducerRequest() { - producers_ = java.util.Collections.emptyList(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private RegisterProducerRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - producers_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - producers_.add( - input.readMessage(flyteidl.artifact.Artifacts.ArtifactProducer.parser(), extensionRegistry)); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - producers_ = java.util.Collections.unmodifiableList(producers_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterProducerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterProducerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.RegisterProducerRequest.class, flyteidl.artifact.Artifacts.RegisterProducerRequest.Builder.class); - } - - public static final int PRODUCERS_FIELD_NUMBER = 1; - private java.util.List producers_; - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public java.util.List getProducersList() { - return producers_; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public java.util.List - getProducersOrBuilderList() { - return producers_; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public int getProducersCount() { - return producers_.size(); - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactProducer getProducers(int index) { - return producers_.get(index); - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactProducerOrBuilder getProducersOrBuilder( - int index) { - return producers_.get(index); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < producers_.size(); i++) { - output.writeMessage(1, producers_.get(i)); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < producers_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, producers_.get(i)); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.RegisterProducerRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.RegisterProducerRequest other = (flyteidl.artifact.Artifacts.RegisterProducerRequest) obj; - - if (!getProducersList() - .equals(other.getProducersList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (getProducersCount() > 0) { - hash = (37 * hash) + PRODUCERS_FIELD_NUMBER; - hash = (53 * hash) + getProducersList().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterProducerRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.RegisterProducerRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.RegisterProducerRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.RegisterProducerRequest) - flyteidl.artifact.Artifacts.RegisterProducerRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterProducerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterProducerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.RegisterProducerRequest.class, flyteidl.artifact.Artifacts.RegisterProducerRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.RegisterProducerRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getProducersFieldBuilder(); - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (producersBuilder_ == null) { - producers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - producersBuilder_.clear(); - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterProducerRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterProducerRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.RegisterProducerRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterProducerRequest build() { - flyteidl.artifact.Artifacts.RegisterProducerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterProducerRequest buildPartial() { - flyteidl.artifact.Artifacts.RegisterProducerRequest result = new flyteidl.artifact.Artifacts.RegisterProducerRequest(this); - int from_bitField0_ = bitField0_; - if (producersBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { - producers_ = java.util.Collections.unmodifiableList(producers_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.producers_ = producers_; - } else { - result.producers_ = producersBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.RegisterProducerRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.RegisterProducerRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.RegisterProducerRequest other) { - if (other == flyteidl.artifact.Artifacts.RegisterProducerRequest.getDefaultInstance()) return this; - if (producersBuilder_ == null) { - if (!other.producers_.isEmpty()) { - if (producers_.isEmpty()) { - producers_ = other.producers_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureProducersIsMutable(); - producers_.addAll(other.producers_); - } - onChanged(); - } - } else { - if (!other.producers_.isEmpty()) { - if (producersBuilder_.isEmpty()) { - producersBuilder_.dispose(); - producersBuilder_ = null; - producers_ = other.producers_; - bitField0_ = (bitField0_ & ~0x00000001); - producersBuilder_ = - com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getProducersFieldBuilder() : null; - } else { - producersBuilder_.addAllMessages(other.producers_); - } - } - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.RegisterProducerRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.RegisterProducerRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List producers_ = - java.util.Collections.emptyList(); - private void ensureProducersIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { - producers_ = new java.util.ArrayList(producers_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactProducer, flyteidl.artifact.Artifacts.ArtifactProducer.Builder, flyteidl.artifact.Artifacts.ArtifactProducerOrBuilder> producersBuilder_; - - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public java.util.List getProducersList() { - if (producersBuilder_ == null) { - return java.util.Collections.unmodifiableList(producers_); - } else { - return producersBuilder_.getMessageList(); - } - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public int getProducersCount() { - if (producersBuilder_ == null) { - return producers_.size(); - } else { - return producersBuilder_.getCount(); - } - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactProducer getProducers(int index) { - if (producersBuilder_ == null) { - return producers_.get(index); - } else { - return producersBuilder_.getMessage(index); - } - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder setProducers( - int index, flyteidl.artifact.Artifacts.ArtifactProducer value) { - if (producersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureProducersIsMutable(); - producers_.set(index, value); - onChanged(); - } else { - producersBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder setProducers( - int index, flyteidl.artifact.Artifacts.ArtifactProducer.Builder builderForValue) { - if (producersBuilder_ == null) { - ensureProducersIsMutable(); - producers_.set(index, builderForValue.build()); - onChanged(); - } else { - producersBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder addProducers(flyteidl.artifact.Artifacts.ArtifactProducer value) { - if (producersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureProducersIsMutable(); - producers_.add(value); - onChanged(); - } else { - producersBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder addProducers( - int index, flyteidl.artifact.Artifacts.ArtifactProducer value) { - if (producersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureProducersIsMutable(); - producers_.add(index, value); - onChanged(); - } else { - producersBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder addProducers( - flyteidl.artifact.Artifacts.ArtifactProducer.Builder builderForValue) { - if (producersBuilder_ == null) { - ensureProducersIsMutable(); - producers_.add(builderForValue.build()); - onChanged(); - } else { - producersBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder addProducers( - int index, flyteidl.artifact.Artifacts.ArtifactProducer.Builder builderForValue) { - if (producersBuilder_ == null) { - ensureProducersIsMutable(); - producers_.add(index, builderForValue.build()); - onChanged(); - } else { - producersBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder addAllProducers( - java.lang.Iterable values) { - if (producersBuilder_ == null) { - ensureProducersIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, producers_); - onChanged(); - } else { - producersBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder clearProducers() { - if (producersBuilder_ == null) { - producers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - producersBuilder_.clear(); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public Builder removeProducers(int index) { - if (producersBuilder_ == null) { - ensureProducersIsMutable(); - producers_.remove(index); - onChanged(); - } else { - producersBuilder_.remove(index); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactProducer.Builder getProducersBuilder( - int index) { - return getProducersFieldBuilder().getBuilder(index); - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactProducerOrBuilder getProducersOrBuilder( - int index) { - if (producersBuilder_ == null) { - return producers_.get(index); } else { - return producersBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public java.util.List - getProducersOrBuilderList() { - if (producersBuilder_ != null) { - return producersBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(producers_); - } - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactProducer.Builder addProducersBuilder() { - return getProducersFieldBuilder().addBuilder( - flyteidl.artifact.Artifacts.ArtifactProducer.getDefaultInstance()); - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactProducer.Builder addProducersBuilder( - int index) { - return getProducersFieldBuilder().addBuilder( - index, flyteidl.artifact.Artifacts.ArtifactProducer.getDefaultInstance()); - } - /** - * repeated .flyteidl.artifact.ArtifactProducer producers = 1; - */ - public java.util.List - getProducersBuilderList() { - return getProducersFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactProducer, flyteidl.artifact.Artifacts.ArtifactProducer.Builder, flyteidl.artifact.Artifacts.ArtifactProducerOrBuilder> - getProducersFieldBuilder() { - if (producersBuilder_ == null) { - producersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactProducer, flyteidl.artifact.Artifacts.ArtifactProducer.Builder, flyteidl.artifact.Artifacts.ArtifactProducerOrBuilder>( - producers_, - ((bitField0_ & 0x00000001) != 0), - getParentForChildren(), - isClean()); - producers_ = null; - } - return producersBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.RegisterProducerRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.RegisterProducerRequest) - private static final flyteidl.artifact.Artifacts.RegisterProducerRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.RegisterProducerRequest(); - } - - public static flyteidl.artifact.Artifacts.RegisterProducerRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public RegisterProducerRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegisterProducerRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterProducerRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface ArtifactConsumerOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.ArtifactConsumer) - com.google.protobuf.MessageOrBuilder { - - /** - *
-     * These should all be launch plan IDs
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - boolean hasEntityId(); - /** - *
-     * These should all be launch plan IDs
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - flyteidl.core.IdentifierOuterClass.Identifier getEntityId(); - /** - *
-     * These should all be launch plan IDs
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getEntityIdOrBuilder(); - - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - boolean hasInputs(); - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - flyteidl.core.Interface.ParameterMap getInputs(); - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - flyteidl.core.Interface.ParameterMapOrBuilder getInputsOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactConsumer} - */ - public static final class ArtifactConsumer extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.ArtifactConsumer) - ArtifactConsumerOrBuilder { - private static final long serialVersionUID = 0L; - // Use ArtifactConsumer.newBuilder() to construct. - private ArtifactConsumer(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ArtifactConsumer() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ArtifactConsumer( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.IdentifierOuterClass.Identifier.Builder subBuilder = null; - if (entityId_ != null) { - subBuilder = entityId_.toBuilder(); - } - entityId_ = input.readMessage(flyteidl.core.IdentifierOuterClass.Identifier.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(entityId_); - entityId_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - flyteidl.core.Interface.ParameterMap.Builder subBuilder = null; - if (inputs_ != null) { - subBuilder = inputs_.toBuilder(); - } - inputs_ = input.readMessage(flyteidl.core.Interface.ParameterMap.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(inputs_); - inputs_ = subBuilder.buildPartial(); - } - - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactConsumer_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactConsumer_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactConsumer.class, flyteidl.artifact.Artifacts.ArtifactConsumer.Builder.class); - } - - public static final int ENTITY_ID_FIELD_NUMBER = 1; - private flyteidl.core.IdentifierOuterClass.Identifier entityId_; - /** - *
-     * These should all be launch plan IDs
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public boolean hasEntityId() { - return entityId_ != null; - } - /** - *
-     * These should all be launch plan IDs
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getEntityId() { - return entityId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : entityId_; - } - /** - *
-     * These should all be launch plan IDs
-     * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getEntityIdOrBuilder() { - return getEntityId(); - } - - public static final int INPUTS_FIELD_NUMBER = 2; - private flyteidl.core.Interface.ParameterMap inputs_; - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public boolean hasInputs() { - return inputs_ != null; - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public flyteidl.core.Interface.ParameterMap getInputs() { - return inputs_ == null ? flyteidl.core.Interface.ParameterMap.getDefaultInstance() : inputs_; - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public flyteidl.core.Interface.ParameterMapOrBuilder getInputsOrBuilder() { - return getInputs(); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (entityId_ != null) { - output.writeMessage(1, getEntityId()); - } - if (inputs_ != null) { - output.writeMessage(2, getInputs()); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (entityId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getEntityId()); - } - if (inputs_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getInputs()); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.ArtifactConsumer)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.ArtifactConsumer other = (flyteidl.artifact.Artifacts.ArtifactConsumer) obj; - - if (hasEntityId() != other.hasEntityId()) return false; - if (hasEntityId()) { - if (!getEntityId() - .equals(other.getEntityId())) return false; - } - if (hasInputs() != other.hasInputs()) return false; - if (hasInputs()) { - if (!getInputs() - .equals(other.getInputs())) return false; - } - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasEntityId()) { - hash = (37 * hash) + ENTITY_ID_FIELD_NUMBER; - hash = (53 * hash) + getEntityId().hashCode(); - } - if (hasInputs()) { - hash = (37 * hash) + INPUTS_FIELD_NUMBER; - hash = (53 * hash) + getInputs().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ArtifactConsumer parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.ArtifactConsumer prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.ArtifactConsumer} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.ArtifactConsumer) - flyteidl.artifact.Artifacts.ArtifactConsumerOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactConsumer_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactConsumer_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ArtifactConsumer.class, flyteidl.artifact.Artifacts.ArtifactConsumer.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.ArtifactConsumer.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (entityIdBuilder_ == null) { - entityId_ = null; - } else { - entityId_ = null; - entityIdBuilder_ = null; - } - if (inputsBuilder_ == null) { - inputs_ = null; - } else { - inputs_ = null; - inputsBuilder_ = null; - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ArtifactConsumer_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactConsumer getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.ArtifactConsumer.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactConsumer build() { - flyteidl.artifact.Artifacts.ArtifactConsumer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactConsumer buildPartial() { - flyteidl.artifact.Artifacts.ArtifactConsumer result = new flyteidl.artifact.Artifacts.ArtifactConsumer(this); - if (entityIdBuilder_ == null) { - result.entityId_ = entityId_; - } else { - result.entityId_ = entityIdBuilder_.build(); - } - if (inputsBuilder_ == null) { - result.inputs_ = inputs_; - } else { - result.inputs_ = inputsBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.ArtifactConsumer) { - return mergeFrom((flyteidl.artifact.Artifacts.ArtifactConsumer)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.ArtifactConsumer other) { - if (other == flyteidl.artifact.Artifacts.ArtifactConsumer.getDefaultInstance()) return this; - if (other.hasEntityId()) { - mergeEntityId(other.getEntityId()); - } - if (other.hasInputs()) { - mergeInputs(other.getInputs()); - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.ArtifactConsumer parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.ArtifactConsumer) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - - private flyteidl.core.IdentifierOuterClass.Identifier entityId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> entityIdBuilder_; - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public boolean hasEntityId() { - return entityIdBuilder_ != null || entityId_ != null; - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier getEntityId() { - if (entityIdBuilder_ == null) { - return entityId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : entityId_; - } else { - return entityIdBuilder_.getMessage(); - } - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder setEntityId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (entityIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - entityId_ = value; - onChanged(); - } else { - entityIdBuilder_.setMessage(value); - } - - return this; - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder setEntityId( - flyteidl.core.IdentifierOuterClass.Identifier.Builder builderForValue) { - if (entityIdBuilder_ == null) { - entityId_ = builderForValue.build(); - onChanged(); - } else { - entityIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder mergeEntityId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (entityIdBuilder_ == null) { - if (entityId_ != null) { - entityId_ = - flyteidl.core.IdentifierOuterClass.Identifier.newBuilder(entityId_).mergeFrom(value).buildPartial(); - } else { - entityId_ = value; - } - onChanged(); - } else { - entityIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public Builder clearEntityId() { - if (entityIdBuilder_ == null) { - entityId_ = null; - onChanged(); - } else { - entityId_ = null; - entityIdBuilder_ = null; - } - - return this; - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.Identifier.Builder getEntityIdBuilder() { - - onChanged(); - return getEntityIdFieldBuilder().getBuilder(); - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getEntityIdOrBuilder() { - if (entityIdBuilder_ != null) { - return entityIdBuilder_.getMessageOrBuilder(); - } else { - return entityId_ == null ? - flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : entityId_; - } - } - /** - *
-       * These should all be launch plan IDs
-       * 
- * - * .flyteidl.core.Identifier entity_id = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> - getEntityIdFieldBuilder() { - if (entityIdBuilder_ == null) { - entityIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder>( - getEntityId(), - getParentForChildren(), - isClean()); - entityId_ = null; - } - return entityIdBuilder_; - } - - private flyteidl.core.Interface.ParameterMap inputs_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Interface.ParameterMap, flyteidl.core.Interface.ParameterMap.Builder, flyteidl.core.Interface.ParameterMapOrBuilder> inputsBuilder_; - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public boolean hasInputs() { - return inputsBuilder_ != null || inputs_ != null; - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public flyteidl.core.Interface.ParameterMap getInputs() { - if (inputsBuilder_ == null) { - return inputs_ == null ? flyteidl.core.Interface.ParameterMap.getDefaultInstance() : inputs_; - } else { - return inputsBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public Builder setInputs(flyteidl.core.Interface.ParameterMap value) { - if (inputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - inputs_ = value; - onChanged(); - } else { - inputsBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public Builder setInputs( - flyteidl.core.Interface.ParameterMap.Builder builderForValue) { - if (inputsBuilder_ == null) { - inputs_ = builderForValue.build(); - onChanged(); - } else { - inputsBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public Builder mergeInputs(flyteidl.core.Interface.ParameterMap value) { - if (inputsBuilder_ == null) { - if (inputs_ != null) { - inputs_ = - flyteidl.core.Interface.ParameterMap.newBuilder(inputs_).mergeFrom(value).buildPartial(); - } else { - inputs_ = value; - } - onChanged(); - } else { - inputsBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public Builder clearInputs() { - if (inputsBuilder_ == null) { - inputs_ = null; - onChanged(); - } else { - inputs_ = null; - inputsBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public flyteidl.core.Interface.ParameterMap.Builder getInputsBuilder() { - - onChanged(); - return getInputsFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - public flyteidl.core.Interface.ParameterMapOrBuilder getInputsOrBuilder() { - if (inputsBuilder_ != null) { - return inputsBuilder_.getMessageOrBuilder(); - } else { - return inputs_ == null ? - flyteidl.core.Interface.ParameterMap.getDefaultInstance() : inputs_; - } - } - /** - * .flyteidl.core.ParameterMap inputs = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Interface.ParameterMap, flyteidl.core.Interface.ParameterMap.Builder, flyteidl.core.Interface.ParameterMapOrBuilder> - getInputsFieldBuilder() { - if (inputsBuilder_ == null) { - inputsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Interface.ParameterMap, flyteidl.core.Interface.ParameterMap.Builder, flyteidl.core.Interface.ParameterMapOrBuilder>( - getInputs(), - getParentForChildren(), - isClean()); - inputs_ = null; - } - return inputsBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.ArtifactConsumer) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ArtifactConsumer) - private static final flyteidl.artifact.Artifacts.ArtifactConsumer DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.ArtifactConsumer(); - } - - public static flyteidl.artifact.Artifacts.ArtifactConsumer getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public ArtifactConsumer parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ArtifactConsumer(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ArtifactConsumer getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface RegisterConsumerRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.RegisterConsumerRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - java.util.List - getConsumersList(); - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - flyteidl.artifact.Artifacts.ArtifactConsumer getConsumers(int index); - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - int getConsumersCount(); - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - java.util.List - getConsumersOrBuilderList(); - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - flyteidl.artifact.Artifacts.ArtifactConsumerOrBuilder getConsumersOrBuilder( - int index); - } - /** - * Protobuf type {@code flyteidl.artifact.RegisterConsumerRequest} - */ - public static final class RegisterConsumerRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.RegisterConsumerRequest) - RegisterConsumerRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use RegisterConsumerRequest.newBuilder() to construct. - private RegisterConsumerRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private RegisterConsumerRequest() { - consumers_ = java.util.Collections.emptyList(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private RegisterConsumerRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - if (!((mutable_bitField0_ & 0x00000001) != 0)) { - consumers_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - consumers_.add( - input.readMessage(flyteidl.artifact.Artifacts.ArtifactConsumer.parser(), extensionRegistry)); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) != 0)) { - consumers_ = java.util.Collections.unmodifiableList(consumers_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterConsumerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterConsumerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.RegisterConsumerRequest.class, flyteidl.artifact.Artifacts.RegisterConsumerRequest.Builder.class); - } - - public static final int CONSUMERS_FIELD_NUMBER = 1; - private java.util.List consumers_; - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public java.util.List getConsumersList() { - return consumers_; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public java.util.List - getConsumersOrBuilderList() { - return consumers_; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public int getConsumersCount() { - return consumers_.size(); - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactConsumer getConsumers(int index) { - return consumers_.get(index); - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactConsumerOrBuilder getConsumersOrBuilder( - int index) { - return consumers_.get(index); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - for (int i = 0; i < consumers_.size(); i++) { - output.writeMessage(1, consumers_.get(i)); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < consumers_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, consumers_.get(i)); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.RegisterConsumerRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.RegisterConsumerRequest other = (flyteidl.artifact.Artifacts.RegisterConsumerRequest) obj; - - if (!getConsumersList() - .equals(other.getConsumersList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (getConsumersCount() > 0) { - hash = (37 * hash) + CONSUMERS_FIELD_NUMBER; - hash = (53 * hash) + getConsumersList().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.RegisterConsumerRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.RegisterConsumerRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.RegisterConsumerRequest) - flyteidl.artifact.Artifacts.RegisterConsumerRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterConsumerRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterConsumerRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.RegisterConsumerRequest.class, flyteidl.artifact.Artifacts.RegisterConsumerRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.RegisterConsumerRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getConsumersFieldBuilder(); - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (consumersBuilder_ == null) { - consumers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - consumersBuilder_.clear(); - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterConsumerRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterConsumerRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.RegisterConsumerRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterConsumerRequest build() { - flyteidl.artifact.Artifacts.RegisterConsumerRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterConsumerRequest buildPartial() { - flyteidl.artifact.Artifacts.RegisterConsumerRequest result = new flyteidl.artifact.Artifacts.RegisterConsumerRequest(this); - int from_bitField0_ = bitField0_; - if (consumersBuilder_ == null) { - if (((bitField0_ & 0x00000001) != 0)) { - consumers_ = java.util.Collections.unmodifiableList(consumers_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.consumers_ = consumers_; - } else { - result.consumers_ = consumersBuilder_.build(); - } - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.RegisterConsumerRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.RegisterConsumerRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.RegisterConsumerRequest other) { - if (other == flyteidl.artifact.Artifacts.RegisterConsumerRequest.getDefaultInstance()) return this; - if (consumersBuilder_ == null) { - if (!other.consumers_.isEmpty()) { - if (consumers_.isEmpty()) { - consumers_ = other.consumers_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureConsumersIsMutable(); - consumers_.addAll(other.consumers_); - } - onChanged(); - } - } else { - if (!other.consumers_.isEmpty()) { - if (consumersBuilder_.isEmpty()) { - consumersBuilder_.dispose(); - consumersBuilder_ = null; - consumers_ = other.consumers_; - bitField0_ = (bitField0_ & ~0x00000001); - consumersBuilder_ = - com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getConsumersFieldBuilder() : null; - } else { - consumersBuilder_.addAllMessages(other.consumers_); - } - } - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.RegisterConsumerRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.RegisterConsumerRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private java.util.List consumers_ = - java.util.Collections.emptyList(); - private void ensureConsumersIsMutable() { - if (!((bitField0_ & 0x00000001) != 0)) { - consumers_ = new java.util.ArrayList(consumers_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactConsumer, flyteidl.artifact.Artifacts.ArtifactConsumer.Builder, flyteidl.artifact.Artifacts.ArtifactConsumerOrBuilder> consumersBuilder_; - - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public java.util.List getConsumersList() { - if (consumersBuilder_ == null) { - return java.util.Collections.unmodifiableList(consumers_); - } else { - return consumersBuilder_.getMessageList(); - } - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public int getConsumersCount() { - if (consumersBuilder_ == null) { - return consumers_.size(); - } else { - return consumersBuilder_.getCount(); - } - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactConsumer getConsumers(int index) { - if (consumersBuilder_ == null) { - return consumers_.get(index); - } else { - return consumersBuilder_.getMessage(index); - } - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder setConsumers( - int index, flyteidl.artifact.Artifacts.ArtifactConsumer value) { - if (consumersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureConsumersIsMutable(); - consumers_.set(index, value); - onChanged(); - } else { - consumersBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder setConsumers( - int index, flyteidl.artifact.Artifacts.ArtifactConsumer.Builder builderForValue) { - if (consumersBuilder_ == null) { - ensureConsumersIsMutable(); - consumers_.set(index, builderForValue.build()); - onChanged(); - } else { - consumersBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder addConsumers(flyteidl.artifact.Artifacts.ArtifactConsumer value) { - if (consumersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureConsumersIsMutable(); - consumers_.add(value); - onChanged(); - } else { - consumersBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder addConsumers( - int index, flyteidl.artifact.Artifacts.ArtifactConsumer value) { - if (consumersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureConsumersIsMutable(); - consumers_.add(index, value); - onChanged(); - } else { - consumersBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder addConsumers( - flyteidl.artifact.Artifacts.ArtifactConsumer.Builder builderForValue) { - if (consumersBuilder_ == null) { - ensureConsumersIsMutable(); - consumers_.add(builderForValue.build()); - onChanged(); - } else { - consumersBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder addConsumers( - int index, flyteidl.artifact.Artifacts.ArtifactConsumer.Builder builderForValue) { - if (consumersBuilder_ == null) { - ensureConsumersIsMutable(); - consumers_.add(index, builderForValue.build()); - onChanged(); - } else { - consumersBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder addAllConsumers( - java.lang.Iterable values) { - if (consumersBuilder_ == null) { - ensureConsumersIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, consumers_); - onChanged(); - } else { - consumersBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder clearConsumers() { - if (consumersBuilder_ == null) { - consumers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - consumersBuilder_.clear(); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public Builder removeConsumers(int index) { - if (consumersBuilder_ == null) { - ensureConsumersIsMutable(); - consumers_.remove(index); - onChanged(); - } else { - consumersBuilder_.remove(index); - } - return this; - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactConsumer.Builder getConsumersBuilder( - int index) { - return getConsumersFieldBuilder().getBuilder(index); - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactConsumerOrBuilder getConsumersOrBuilder( - int index) { - if (consumersBuilder_ == null) { - return consumers_.get(index); } else { - return consumersBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public java.util.List - getConsumersOrBuilderList() { - if (consumersBuilder_ != null) { - return consumersBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(consumers_); - } - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactConsumer.Builder addConsumersBuilder() { - return getConsumersFieldBuilder().addBuilder( - flyteidl.artifact.Artifacts.ArtifactConsumer.getDefaultInstance()); - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public flyteidl.artifact.Artifacts.ArtifactConsumer.Builder addConsumersBuilder( - int index) { - return getConsumersFieldBuilder().addBuilder( - index, flyteidl.artifact.Artifacts.ArtifactConsumer.getDefaultInstance()); - } - /** - * repeated .flyteidl.artifact.ArtifactConsumer consumers = 1; - */ - public java.util.List - getConsumersBuilderList() { - return getConsumersFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactConsumer, flyteidl.artifact.Artifacts.ArtifactConsumer.Builder, flyteidl.artifact.Artifacts.ArtifactConsumerOrBuilder> - getConsumersFieldBuilder() { - if (consumersBuilder_ == null) { - consumersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.artifact.Artifacts.ArtifactConsumer, flyteidl.artifact.Artifacts.ArtifactConsumer.Builder, flyteidl.artifact.Artifacts.ArtifactConsumerOrBuilder>( - consumers_, - ((bitField0_ & 0x00000001) != 0), - getParentForChildren(), - isClean()); - consumers_ = null; - } - return consumersBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.RegisterConsumerRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.RegisterConsumerRequest) - private static final flyteidl.artifact.Artifacts.RegisterConsumerRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.RegisterConsumerRequest(); - } - - public static flyteidl.artifact.Artifacts.RegisterConsumerRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public RegisterConsumerRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegisterConsumerRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterConsumerRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface RegisterResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.RegisterResponse) - com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code flyteidl.artifact.RegisterResponse} - */ - public static final class RegisterResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.RegisterResponse) - RegisterResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use RegisterResponse.newBuilder() to construct. - private RegisterResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private RegisterResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private RegisterResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.RegisterResponse.class, flyteidl.artifact.Artifacts.RegisterResponse.Builder.class); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.RegisterResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.RegisterResponse other = (flyteidl.artifact.Artifacts.RegisterResponse) obj; - - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.RegisterResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.RegisterResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.RegisterResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.RegisterResponse) - flyteidl.artifact.Artifacts.RegisterResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.RegisterResponse.class, flyteidl.artifact.Artifacts.RegisterResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.RegisterResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_RegisterResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.RegisterResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterResponse build() { - flyteidl.artifact.Artifacts.RegisterResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterResponse buildPartial() { - flyteidl.artifact.Artifacts.RegisterResponse result = new flyteidl.artifact.Artifacts.RegisterResponse(this); - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.RegisterResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.RegisterResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.RegisterResponse other) { - if (other == flyteidl.artifact.Artifacts.RegisterResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.RegisterResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.RegisterResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.RegisterResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.RegisterResponse) - private static final flyteidl.artifact.Artifacts.RegisterResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.RegisterResponse(); - } - - public static flyteidl.artifact.Artifacts.RegisterResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public RegisterResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegisterResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.RegisterResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface ExecutionInputsRequestOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.ExecutionInputsRequest) - com.google.protobuf.MessageOrBuilder { - - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - boolean hasExecutionId(); - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getExecutionId(); - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getExecutionIdOrBuilder(); - - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - java.util.List - getInputsList(); - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - flyteidl.core.ArtifactId.ArtifactID getInputs(int index); - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - int getInputsCount(); - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - java.util.List - getInputsOrBuilderList(); - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - flyteidl.core.ArtifactId.ArtifactIDOrBuilder getInputsOrBuilder( - int index); - } - /** - * Protobuf type {@code flyteidl.artifact.ExecutionInputsRequest} - */ - public static final class ExecutionInputsRequest extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.ExecutionInputsRequest) - ExecutionInputsRequestOrBuilder { - private static final long serialVersionUID = 0L; - // Use ExecutionInputsRequest.newBuilder() to construct. - private ExecutionInputsRequest(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ExecutionInputsRequest() { - inputs_ = java.util.Collections.emptyList(); - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ExecutionInputsRequest( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 10: { - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder subBuilder = null; - if (executionId_ != null) { - subBuilder = executionId_.toBuilder(); - } - executionId_ = input.readMessage(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(executionId_); - executionId_ = subBuilder.buildPartial(); - } - - break; - } - case 18: { - if (!((mutable_bitField0_ & 0x00000002) != 0)) { - inputs_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000002; - } - inputs_.add( - input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry)); - break; - } - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000002) != 0)) { - inputs_ = java.util.Collections.unmodifiableList(inputs_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ExecutionInputsRequest.class, flyteidl.artifact.Artifacts.ExecutionInputsRequest.Builder.class); - } - - private int bitField0_; - public static final int EXECUTION_ID_FIELD_NUMBER = 1; - private flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier executionId_; - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public boolean hasExecutionId() { - return executionId_ != null; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getExecutionId() { - return executionId_ == null ? flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : executionId_; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getExecutionIdOrBuilder() { - return getExecutionId(); - } - - public static final int INPUTS_FIELD_NUMBER = 2; - private java.util.List inputs_; - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public java.util.List getInputsList() { - return inputs_; - } - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public java.util.List - getInputsOrBuilderList() { - return inputs_; - } - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public int getInputsCount() { - return inputs_.size(); - } - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID getInputs(int index) { - return inputs_.get(index); - } - /** - *
-     * can make this a map in the future, currently no need.
-     * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getInputsOrBuilder( - int index) { - return inputs_.get(index); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (executionId_ != null) { - output.writeMessage(1, getExecutionId()); - } - for (int i = 0; i < inputs_.size(); i++) { - output.writeMessage(2, inputs_.get(i)); - } - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (executionId_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getExecutionId()); - } - for (int i = 0; i < inputs_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, inputs_.get(i)); - } - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.ExecutionInputsRequest)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.ExecutionInputsRequest other = (flyteidl.artifact.Artifacts.ExecutionInputsRequest) obj; - - if (hasExecutionId() != other.hasExecutionId()) return false; - if (hasExecutionId()) { - if (!getExecutionId() - .equals(other.getExecutionId())) return false; - } - if (!getInputsList() - .equals(other.getInputsList())) return false; - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - if (hasExecutionId()) { - hash = (37 * hash) + EXECUTION_ID_FIELD_NUMBER; - hash = (53 * hash) + getExecutionId().hashCode(); - } - if (getInputsCount() > 0) { - hash = (37 * hash) + INPUTS_FIELD_NUMBER; - hash = (53 * hash) + getInputsList().hashCode(); - } - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.ExecutionInputsRequest prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.ExecutionInputsRequest} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.ExecutionInputsRequest) - flyteidl.artifact.Artifacts.ExecutionInputsRequestOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsRequest_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsRequest_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ExecutionInputsRequest.class, flyteidl.artifact.Artifacts.ExecutionInputsRequest.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.ExecutionInputsRequest.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - getInputsFieldBuilder(); - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - if (executionIdBuilder_ == null) { - executionId_ = null; - } else { - executionId_ = null; - executionIdBuilder_ = null; - } - if (inputsBuilder_ == null) { - inputs_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - inputsBuilder_.clear(); - } - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsRequest_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsRequest getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.ExecutionInputsRequest.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsRequest build() { - flyteidl.artifact.Artifacts.ExecutionInputsRequest result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsRequest buildPartial() { - flyteidl.artifact.Artifacts.ExecutionInputsRequest result = new flyteidl.artifact.Artifacts.ExecutionInputsRequest(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (executionIdBuilder_ == null) { - result.executionId_ = executionId_; - } else { - result.executionId_ = executionIdBuilder_.build(); - } - if (inputsBuilder_ == null) { - if (((bitField0_ & 0x00000002) != 0)) { - inputs_ = java.util.Collections.unmodifiableList(inputs_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.inputs_ = inputs_; - } else { - result.inputs_ = inputsBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.ExecutionInputsRequest) { - return mergeFrom((flyteidl.artifact.Artifacts.ExecutionInputsRequest)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.ExecutionInputsRequest other) { - if (other == flyteidl.artifact.Artifacts.ExecutionInputsRequest.getDefaultInstance()) return this; - if (other.hasExecutionId()) { - mergeExecutionId(other.getExecutionId()); - } - if (inputsBuilder_ == null) { - if (!other.inputs_.isEmpty()) { - if (inputs_.isEmpty()) { - inputs_ = other.inputs_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureInputsIsMutable(); - inputs_.addAll(other.inputs_); - } - onChanged(); - } - } else { - if (!other.inputs_.isEmpty()) { - if (inputsBuilder_.isEmpty()) { - inputsBuilder_.dispose(); - inputsBuilder_ = null; - inputs_ = other.inputs_; - bitField0_ = (bitField0_ & ~0x00000002); - inputsBuilder_ = - com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getInputsFieldBuilder() : null; - } else { - inputsBuilder_.addAllMessages(other.inputs_); - } - } - } - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.ExecutionInputsRequest parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.ExecutionInputsRequest) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - private flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier executionId_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> executionIdBuilder_; - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public boolean hasExecutionId() { - return executionIdBuilder_ != null || executionId_ != null; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getExecutionId() { - if (executionIdBuilder_ == null) { - return executionId_ == null ? flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : executionId_; - } else { - return executionIdBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public Builder setExecutionId(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { - if (executionIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - executionId_ = value; - onChanged(); - } else { - executionIdBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public Builder setExecutionId( - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder builderForValue) { - if (executionIdBuilder_ == null) { - executionId_ = builderForValue.build(); - onChanged(); - } else { - executionIdBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public Builder mergeExecutionId(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { - if (executionIdBuilder_ == null) { - if (executionId_ != null) { - executionId_ = - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.newBuilder(executionId_).mergeFrom(value).buildPartial(); - } else { - executionId_ = value; - } - onChanged(); - } else { - executionIdBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public Builder clearExecutionId() { - if (executionIdBuilder_ == null) { - executionId_ = null; - onChanged(); - } else { - executionId_ = null; - executionIdBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder getExecutionIdBuilder() { - - onChanged(); - return getExecutionIdFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getExecutionIdOrBuilder() { - if (executionIdBuilder_ != null) { - return executionIdBuilder_.getMessageOrBuilder(); - } else { - return executionId_ == null ? - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : executionId_; - } - } - /** - * .flyteidl.core.WorkflowExecutionIdentifier execution_id = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> - getExecutionIdFieldBuilder() { - if (executionIdBuilder_ == null) { - executionIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder>( - getExecutionId(), - getParentForChildren(), - isClean()); - executionId_ = null; - } - return executionIdBuilder_; - } - - private java.util.List inputs_ = - java.util.Collections.emptyList(); - private void ensureInputsIsMutable() { - if (!((bitField0_ & 0x00000002) != 0)) { - inputs_ = new java.util.ArrayList(inputs_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> inputsBuilder_; - - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public java.util.List getInputsList() { - if (inputsBuilder_ == null) { - return java.util.Collections.unmodifiableList(inputs_); - } else { - return inputsBuilder_.getMessageList(); - } - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public int getInputsCount() { - if (inputsBuilder_ == null) { - return inputs_.size(); - } else { - return inputsBuilder_.getCount(); - } - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID getInputs(int index) { - if (inputsBuilder_ == null) { - return inputs_.get(index); - } else { - return inputsBuilder_.getMessage(index); - } - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder setInputs( - int index, flyteidl.core.ArtifactId.ArtifactID value) { - if (inputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureInputsIsMutable(); - inputs_.set(index, value); - onChanged(); - } else { - inputsBuilder_.setMessage(index, value); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder setInputs( - int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.set(index, builderForValue.build()); - onChanged(); - } else { - inputsBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder addInputs(flyteidl.core.ArtifactId.ArtifactID value) { - if (inputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureInputsIsMutable(); - inputs_.add(value); - onChanged(); - } else { - inputsBuilder_.addMessage(value); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder addInputs( - int index, flyteidl.core.ArtifactId.ArtifactID value) { - if (inputsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureInputsIsMutable(); - inputs_.add(index, value); - onChanged(); - } else { - inputsBuilder_.addMessage(index, value); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder addInputs( - flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.add(builderForValue.build()); - onChanged(); - } else { - inputsBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder addInputs( - int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.add(index, builderForValue.build()); - onChanged(); - } else { - inputsBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder addAllInputs( - java.lang.Iterable values) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, inputs_); - onChanged(); - } else { - inputsBuilder_.addAllMessages(values); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder clearInputs() { - if (inputsBuilder_ == null) { - inputs_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - inputsBuilder_.clear(); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public Builder removeInputs(int index) { - if (inputsBuilder_ == null) { - ensureInputsIsMutable(); - inputs_.remove(index); - onChanged(); - } else { - inputsBuilder_.remove(index); - } - return this; - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder getInputsBuilder( - int index) { - return getInputsFieldBuilder().getBuilder(index); - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getInputsOrBuilder( - int index) { - if (inputsBuilder_ == null) { - return inputs_.get(index); } else { - return inputsBuilder_.getMessageOrBuilder(index); - } - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public java.util.List - getInputsOrBuilderList() { - if (inputsBuilder_ != null) { - return inputsBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(inputs_); - } - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder addInputsBuilder() { - return getInputsFieldBuilder().addBuilder( - flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()); - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder addInputsBuilder( - int index) { - return getInputsFieldBuilder().addBuilder( - index, flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()); - } - /** - *
-       * can make this a map in the future, currently no need.
-       * 
- * - * repeated .flyteidl.core.ArtifactID inputs = 2; - */ - public java.util.List - getInputsBuilderList() { - return getInputsFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> - getInputsFieldBuilder() { - if (inputsBuilder_ == null) { - inputsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( - inputs_, - ((bitField0_ & 0x00000002) != 0), - getParentForChildren(), - isClean()); - inputs_ = null; - } - return inputsBuilder_; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.ExecutionInputsRequest) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ExecutionInputsRequest) - private static final flyteidl.artifact.Artifacts.ExecutionInputsRequest DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.ExecutionInputsRequest(); - } - - public static flyteidl.artifact.Artifacts.ExecutionInputsRequest getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public ExecutionInputsRequest parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ExecutionInputsRequest(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsRequest getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - public interface ExecutionInputsResponseOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.artifact.ExecutionInputsResponse) - com.google.protobuf.MessageOrBuilder { - } - /** - * Protobuf type {@code flyteidl.artifact.ExecutionInputsResponse} - */ - public static final class ExecutionInputsResponse extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.artifact.ExecutionInputsResponse) - ExecutionInputsResponseOrBuilder { - private static final long serialVersionUID = 0L; - // Use ExecutionInputsResponse.newBuilder() to construct. - private ExecutionInputsResponse(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); - } - private ExecutionInputsResponse() { - } - - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ExecutionInputsResponse( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - this(); - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField( - input, unknownFields, extensionRegistry, tag)) { - done = true; - } - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ExecutionInputsResponse.class, flyteidl.artifact.Artifacts.ExecutionInputsResponse.Builder.class); - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - unknownFields.writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - size += unknownFields.getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof flyteidl.artifact.Artifacts.ExecutionInputsResponse)) { - return super.equals(obj); - } - flyteidl.artifact.Artifacts.ExecutionInputsResponse other = (flyteidl.artifact.Artifacts.ExecutionInputsResponse) obj; - - if (!unknownFields.equals(other.unknownFields)) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (29 * hash) + unknownFields.hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input); - } - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessageV3 - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(flyteidl.artifact.Artifacts.ExecutionInputsResponse prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code flyteidl.artifact.ExecutionInputsResponse} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.artifact.ExecutionInputsResponse) - flyteidl.artifact.Artifacts.ExecutionInputsResponseOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsResponse_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internalGetFieldAccessorTable() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsResponse_fieldAccessorTable - .ensureFieldAccessorsInitialized( - flyteidl.artifact.Artifacts.ExecutionInputsResponse.class, flyteidl.artifact.Artifacts.ExecutionInputsResponse.Builder.class); - } - - // Construct using flyteidl.artifact.Artifacts.ExecutionInputsResponse.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessageV3 - .alwaysUseFieldBuilders) { - } - } - @java.lang.Override - public Builder clear() { - super.clear(); - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return flyteidl.artifact.Artifacts.internal_static_flyteidl_artifact_ExecutionInputsResponse_descriptor; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsResponse getDefaultInstanceForType() { - return flyteidl.artifact.Artifacts.ExecutionInputsResponse.getDefaultInstance(); - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsResponse build() { - flyteidl.artifact.Artifacts.ExecutionInputsResponse result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsResponse buildPartial() { - flyteidl.artifact.Artifacts.ExecutionInputsResponse result = new flyteidl.artifact.Artifacts.ExecutionInputsResponse(this); - onBuilt(); - return result; - } - - @java.lang.Override - public Builder clone() { - return super.clone(); - } - @java.lang.Override - public Builder setField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.setField(field, value); - } - @java.lang.Override - public Builder clearField( - com.google.protobuf.Descriptors.FieldDescriptor field) { - return super.clearField(field); - } - @java.lang.Override - public Builder clearOneof( - com.google.protobuf.Descriptors.OneofDescriptor oneof) { - return super.clearOneof(oneof); - } - @java.lang.Override - public Builder setRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - int index, java.lang.Object value) { - return super.setRepeatedField(field, index, value); - } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); - } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.artifact.Artifacts.ExecutionInputsResponse) { - return mergeFrom((flyteidl.artifact.Artifacts.ExecutionInputsResponse)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(flyteidl.artifact.Artifacts.ExecutionInputsResponse other) { - if (other == flyteidl.artifact.Artifacts.ExecutionInputsResponse.getDefaultInstance()) return this; - this.mergeUnknownFields(other.unknownFields); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.artifact.Artifacts.ExecutionInputsResponse parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.artifact.Artifacts.ExecutionInputsResponse) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); - } - - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); - } - - - // @@protoc_insertion_point(builder_scope:flyteidl.artifact.ExecutionInputsResponse) - } - - // @@protoc_insertion_point(class_scope:flyteidl.artifact.ExecutionInputsResponse) - private static final flyteidl.artifact.Artifacts.ExecutionInputsResponse DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new flyteidl.artifact.Artifacts.ExecutionInputsResponse(); - } - - public static flyteidl.artifact.Artifacts.ExecutionInputsResponse getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public ExecutionInputsResponse parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ExecutionInputsResponse(input, extensionRegistry); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public flyteidl.artifact.Artifacts.ExecutionInputsResponse getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - - } - - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_Artifact_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_Artifact_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_CreateArtifactRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_CreateArtifactRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_CreateArtifactRequest_PartitionsEntry_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_CreateArtifactRequest_PartitionsEntry_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_ArtifactSource_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_ArtifactSource_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_ArtifactSpec_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_ArtifactSpec_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_CreateArtifactResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_CreateArtifactResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_GetArtifactRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_GetArtifactRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_GetArtifactResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_GetArtifactResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_SearchOptions_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_SearchOptions_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_SearchArtifactsRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_SearchArtifactsRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_SearchArtifactsResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_SearchArtifactsResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_FindByWorkflowExecRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_FindByWorkflowExecRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_AddTagRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_AddTagRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_AddTagResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_AddTagResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_CreateTriggerRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_CreateTriggerRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_CreateTriggerResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_CreateTriggerResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_DeleteTriggerRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_DeleteTriggerRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_DeleteTriggerResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_DeleteTriggerResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_ArtifactProducer_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_ArtifactProducer_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_RegisterProducerRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_RegisterProducerRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_ArtifactConsumer_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_ArtifactConsumer_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_RegisterConsumerRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_RegisterConsumerRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_RegisterResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_RegisterResponse_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_ExecutionInputsRequest_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_ExecutionInputsRequest_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_artifact_ExecutionInputsResponse_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_artifact_ExecutionInputsResponse_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n!flyteidl/artifact/artifacts.proto\022\021fly" + - "teidl.artifact\032\031google/protobuf/any.prot" + - "o\032\034google/api/annotations.proto\032 flyteid" + - "l/admin/launch_plan.proto\032\034flyteidl/core" + - "/literals.proto\032\031flyteidl/core/types.pro" + - "to\032\036flyteidl/core/identifier.proto\032\037flyt" + - "eidl/core/artifact_id.proto\032\035flyteidl/co" + - "re/interface.proto\032 flyteidl/event/cloud" + - "events.proto\"\252\001\n\010Artifact\022.\n\013artifact_id" + - "\030\001 \001(\0132\031.flyteidl.core.ArtifactID\022-\n\004spe" + - "c\030\002 \001(\0132\037.flyteidl.artifact.ArtifactSpec" + - "\022\014\n\004tags\030\003 \003(\t\0221\n\006source\030\004 \001(\0132!.flyteid" + - "l.artifact.ArtifactSource\"\312\002\n\025CreateArti" + - "factRequest\0220\n\014artifact_key\030\001 \001(\0132\032.flyt" + - "eidl.core.ArtifactKey\022\017\n\007version\030\003 \001(\t\022-" + - "\n\004spec\030\002 \001(\0132\037.flyteidl.artifact.Artifac" + - "tSpec\022L\n\npartitions\030\004 \003(\01328.flyteidl.art" + - "ifact.CreateArtifactRequest.PartitionsEn" + - "try\022\013\n\003tag\030\005 \001(\t\0221\n\006source\030\006 \001(\0132!.flyte" + - "idl.artifact.ArtifactSource\0321\n\017Partition" + - "sEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\277" + - "\001\n\016ArtifactSource\022F\n\022workflow_execution\030" + - "\001 \001(\0132*.flyteidl.core.WorkflowExecutionI" + - "dentifier\022\017\n\007node_id\030\002 \001(\t\022*\n\007task_id\030\003 " + - "\001(\0132\031.flyteidl.core.Identifier\022\025\n\rretry_" + - "attempt\030\004 \001(\r\022\021\n\tprincipal\030\005 \001(\t\"\276\001\n\014Art" + - "ifactSpec\022%\n\005value\030\001 \001(\0132\026.flyteidl.core" + - ".Literal\022(\n\004type\030\002 \001(\0132\032.flyteidl.core.L" + - "iteralType\022\031\n\021short_description\030\003 \001(\t\022+\n" + - "\ruser_metadata\030\004 \001(\0132\024.google.protobuf.A" + - "ny\022\025\n\rmetadata_type\030\005 \001(\t\"G\n\026CreateArtif" + - "actResponse\022-\n\010artifact\030\001 \001(\0132\033.flyteidl" + - ".artifact.Artifact\"R\n\022GetArtifactRequest" + - "\022+\n\005query\030\001 \001(\0132\034.flyteidl.core.Artifact" + - "Query\022\017\n\007details\030\002 \001(\010\"D\n\023GetArtifactRes" + - "ponse\022-\n\010artifact\030\001 \001(\0132\033.flyteidl.artif" + - "act.Artifact\"A\n\rSearchOptions\022\031\n\021strict_" + - "partitions\030\001 \001(\010\022\025\n\rlatest_by_key\030\002 \001(\010\"" + - "\356\001\n\026SearchArtifactsRequest\0220\n\014artifact_k" + - "ey\030\001 \001(\0132\032.flyteidl.core.ArtifactKey\022-\n\n" + - "partitions\030\002 \001(\0132\031.flyteidl.core.Partiti" + - "ons\022\021\n\tprincipal\030\003 \001(\t\022\017\n\007version\030\004 \001(\t\022" + - "1\n\007options\030\005 \001(\0132 .flyteidl.artifact.Sea" + - "rchOptions\022\r\n\005token\030\006 \001(\t\022\r\n\005limit\030\007 \001(\005" + - "\"X\n\027SearchArtifactsResponse\022.\n\tartifacts" + - "\030\001 \003(\0132\033.flyteidl.artifact.Artifact\022\r\n\005t" + - "oken\030\002 \001(\t\"\311\001\n\031FindByWorkflowExecRequest" + - "\022;\n\007exec_id\030\001 \001(\0132*.flyteidl.core.Workfl" + - "owExecutionIdentifier\022I\n\tdirection\030\002 \001(\016" + - "26.flyteidl.artifact.FindByWorkflowExecR" + - "equest.Direction\"$\n\tDirection\022\n\n\006INPUTS\020" + - "\000\022\013\n\007OUTPUTS\020\001\"a\n\rAddTagRequest\022.\n\013artif" + - "act_id\030\001 \001(\0132\031.flyteidl.core.ArtifactID\022" + - "\r\n\005value\030\002 \001(\t\022\021\n\toverwrite\030\003 \001(\010\"\020\n\016Add" + - "TagResponse\"O\n\024CreateTriggerRequest\0227\n\023t" + - "rigger_launch_plan\030\001 \001(\0132\032.flyteidl.admi" + - "n.LaunchPlan\"\027\n\025CreateTriggerResponse\"E\n" + - "\024DeleteTriggerRequest\022-\n\ntrigger_id\030\001 \001(" + - "\0132\031.flyteidl.core.Identifier\"\027\n\025DeleteTr" + - "iggerResponse\"m\n\020ArtifactProducer\022,\n\tent" + - "ity_id\030\001 \001(\0132\031.flyteidl.core.Identifier\022" + - "+\n\007outputs\030\002 \001(\0132\032.flyteidl.core.Variabl" + - "eMap\"Q\n\027RegisterProducerRequest\0226\n\tprodu" + - "cers\030\001 \003(\0132#.flyteidl.artifact.ArtifactP" + - "roducer\"m\n\020ArtifactConsumer\022,\n\tentity_id" + - "\030\001 \001(\0132\031.flyteidl.core.Identifier\022+\n\006inp" + - "uts\030\002 \001(\0132\033.flyteidl.core.ParameterMap\"Q" + - "\n\027RegisterConsumerRequest\0226\n\tconsumers\030\001" + - " \003(\0132#.flyteidl.artifact.ArtifactConsume" + - "r\"\022\n\020RegisterResponse\"\205\001\n\026ExecutionInput" + - "sRequest\022@\n\014execution_id\030\001 \001(\0132*.flyteid" + - "l.core.WorkflowExecutionIdentifier\022)\n\006in" + - "puts\030\002 \003(\0132\031.flyteidl.core.ArtifactID\"\031\n" + - "\027ExecutionInputsResponse2\327\016\n\020ArtifactReg" + - "istry\022g\n\016CreateArtifact\022(.flyteidl.artif" + - "act.CreateArtifactRequest\032).flyteidl.art" + - "ifact.CreateArtifactResponse\"\000\022\205\005\n\013GetAr" + - "tifact\022%.flyteidl.artifact.GetArtifactRe" + - "quest\032&.flyteidl.artifact.GetArtifactRes" + - "ponse\"\246\004\202\323\344\223\002\237\004\022 /artifacts/api/v1/data/" + - "artifactsZ\270\001\022\265\001/artifacts/api/v1/data/ar" + - "tifact/id/{query.artifact_id.artifact_ke" + - "y.project}/{query.artifact_id.artifact_k" + - "ey.domain}/{query.artifact_id.artifact_k" + - "ey.name}/{query.artifact_id.version}Z\234\001\022" + - "\231\001/artifacts/api/v1/data/artifact/id/{qu" + - "ery.artifact_id.artifact_key.project}/{q" + - "uery.artifact_id.artifact_key.domain}/{q" + - "uery.artifact_id.artifact_key.name}Z\240\001\022\235" + - "\001/artifacts/api/v1/data/artifact/tag/{qu" + - "ery.artifact_tag.artifact_key.project}/{" + - "query.artifact_tag.artifact_key.domain}/" + - "{query.artifact_tag.artifact_key.name}\022\240" + - "\002\n\017SearchArtifacts\022).flyteidl.artifact.S" + - "earchArtifactsRequest\032*.flyteidl.artifac" + - "t.SearchArtifactsResponse\"\265\001\202\323\344\223\002\256\001\022_/ar" + - "tifacts/api/v1/data/query/s/{artifact_ke" + - "y.project}/{artifact_key.domain}/{artifa" + - "ct_key.name}ZK\022I/artifacts/api/v1/data/q" + - "uery/{artifact_key.project}/{artifact_ke" + - "y.domain}\022d\n\rCreateTrigger\022\'.flyteidl.ar" + - "tifact.CreateTriggerRequest\032(.flyteidl.a" + - "rtifact.CreateTriggerResponse\"\000\022d\n\rDelet" + - "eTrigger\022\'.flyteidl.artifact.DeleteTrigg" + - "erRequest\032(.flyteidl.artifact.DeleteTrig" + - "gerResponse\"\000\022O\n\006AddTag\022 .flyteidl.artif" + - "act.AddTagRequest\032!.flyteidl.artifact.Ad" + - "dTagResponse\"\000\022e\n\020RegisterProducer\022*.fly" + - "teidl.artifact.RegisterProducerRequest\032#" + - ".flyteidl.artifact.RegisterResponse\"\000\022e\n" + - "\020RegisterConsumer\022*.flyteidl.artifact.Re" + - "gisterConsumerRequest\032#.flyteidl.artifac" + - "t.RegisterResponse\"\000\022m\n\022SetExecutionInpu" + - "ts\022).flyteidl.artifact.ExecutionInputsRe" + - "quest\032*.flyteidl.artifact.ExecutionInput" + - "sResponse\"\000\022\324\001\n\022FindByWorkflowExec\022,.fly" + - "teidl.artifact.FindByWorkflowExecRequest" + - "\032*.flyteidl.artifact.SearchArtifactsResp" + - "onse\"d\202\323\344\223\002^\022\\/artifacts/api/v1/data/que" + - "ry/e/{exec_id.project}/{exec_id.domain}/" + - "{exec_id.name}/{direction}B@Z>github.com" + - "/flyteorg/flyte/flyteidl/gen/pb-go/flyte" + - "idl/artifactb\006proto3" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - com.google.protobuf.AnyProto.getDescriptor(), - com.google.api.AnnotationsProto.getDescriptor(), - flyteidl.admin.LaunchPlanOuterClass.getDescriptor(), - flyteidl.core.Literals.getDescriptor(), - flyteidl.core.Types.getDescriptor(), - flyteidl.core.IdentifierOuterClass.getDescriptor(), - flyteidl.core.ArtifactId.getDescriptor(), - flyteidl.core.Interface.getDescriptor(), - flyteidl.event.Cloudevents.getDescriptor(), - }, assigner); - internal_static_flyteidl_artifact_Artifact_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_flyteidl_artifact_Artifact_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_Artifact_descriptor, - new java.lang.String[] { "ArtifactId", "Spec", "Tags", "Source", }); - internal_static_flyteidl_artifact_CreateArtifactRequest_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_flyteidl_artifact_CreateArtifactRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_CreateArtifactRequest_descriptor, - new java.lang.String[] { "ArtifactKey", "Version", "Spec", "Partitions", "Tag", "Source", }); - internal_static_flyteidl_artifact_CreateArtifactRequest_PartitionsEntry_descriptor = - internal_static_flyteidl_artifact_CreateArtifactRequest_descriptor.getNestedTypes().get(0); - internal_static_flyteidl_artifact_CreateArtifactRequest_PartitionsEntry_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_CreateArtifactRequest_PartitionsEntry_descriptor, - new java.lang.String[] { "Key", "Value", }); - internal_static_flyteidl_artifact_ArtifactSource_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_flyteidl_artifact_ArtifactSource_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_ArtifactSource_descriptor, - new java.lang.String[] { "WorkflowExecution", "NodeId", "TaskId", "RetryAttempt", "Principal", }); - internal_static_flyteidl_artifact_ArtifactSpec_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_flyteidl_artifact_ArtifactSpec_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_ArtifactSpec_descriptor, - new java.lang.String[] { "Value", "Type", "ShortDescription", "UserMetadata", "MetadataType", }); - internal_static_flyteidl_artifact_CreateArtifactResponse_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_flyteidl_artifact_CreateArtifactResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_CreateArtifactResponse_descriptor, - new java.lang.String[] { "Artifact", }); - internal_static_flyteidl_artifact_GetArtifactRequest_descriptor = - getDescriptor().getMessageTypes().get(5); - internal_static_flyteidl_artifact_GetArtifactRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_GetArtifactRequest_descriptor, - new java.lang.String[] { "Query", "Details", }); - internal_static_flyteidl_artifact_GetArtifactResponse_descriptor = - getDescriptor().getMessageTypes().get(6); - internal_static_flyteidl_artifact_GetArtifactResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_GetArtifactResponse_descriptor, - new java.lang.String[] { "Artifact", }); - internal_static_flyteidl_artifact_SearchOptions_descriptor = - getDescriptor().getMessageTypes().get(7); - internal_static_flyteidl_artifact_SearchOptions_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_SearchOptions_descriptor, - new java.lang.String[] { "StrictPartitions", "LatestByKey", }); - internal_static_flyteidl_artifact_SearchArtifactsRequest_descriptor = - getDescriptor().getMessageTypes().get(8); - internal_static_flyteidl_artifact_SearchArtifactsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_SearchArtifactsRequest_descriptor, - new java.lang.String[] { "ArtifactKey", "Partitions", "Principal", "Version", "Options", "Token", "Limit", }); - internal_static_flyteidl_artifact_SearchArtifactsResponse_descriptor = - getDescriptor().getMessageTypes().get(9); - internal_static_flyteidl_artifact_SearchArtifactsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_SearchArtifactsResponse_descriptor, - new java.lang.String[] { "Artifacts", "Token", }); - internal_static_flyteidl_artifact_FindByWorkflowExecRequest_descriptor = - getDescriptor().getMessageTypes().get(10); - internal_static_flyteidl_artifact_FindByWorkflowExecRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_FindByWorkflowExecRequest_descriptor, - new java.lang.String[] { "ExecId", "Direction", }); - internal_static_flyteidl_artifact_AddTagRequest_descriptor = - getDescriptor().getMessageTypes().get(11); - internal_static_flyteidl_artifact_AddTagRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_AddTagRequest_descriptor, - new java.lang.String[] { "ArtifactId", "Value", "Overwrite", }); - internal_static_flyteidl_artifact_AddTagResponse_descriptor = - getDescriptor().getMessageTypes().get(12); - internal_static_flyteidl_artifact_AddTagResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_AddTagResponse_descriptor, - new java.lang.String[] { }); - internal_static_flyteidl_artifact_CreateTriggerRequest_descriptor = - getDescriptor().getMessageTypes().get(13); - internal_static_flyteidl_artifact_CreateTriggerRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_CreateTriggerRequest_descriptor, - new java.lang.String[] { "TriggerLaunchPlan", }); - internal_static_flyteidl_artifact_CreateTriggerResponse_descriptor = - getDescriptor().getMessageTypes().get(14); - internal_static_flyteidl_artifact_CreateTriggerResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_CreateTriggerResponse_descriptor, - new java.lang.String[] { }); - internal_static_flyteidl_artifact_DeleteTriggerRequest_descriptor = - getDescriptor().getMessageTypes().get(15); - internal_static_flyteidl_artifact_DeleteTriggerRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_DeleteTriggerRequest_descriptor, - new java.lang.String[] { "TriggerId", }); - internal_static_flyteidl_artifact_DeleteTriggerResponse_descriptor = - getDescriptor().getMessageTypes().get(16); - internal_static_flyteidl_artifact_DeleteTriggerResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_DeleteTriggerResponse_descriptor, - new java.lang.String[] { }); - internal_static_flyteidl_artifact_ArtifactProducer_descriptor = - getDescriptor().getMessageTypes().get(17); - internal_static_flyteidl_artifact_ArtifactProducer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_ArtifactProducer_descriptor, - new java.lang.String[] { "EntityId", "Outputs", }); - internal_static_flyteidl_artifact_RegisterProducerRequest_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_flyteidl_artifact_RegisterProducerRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_RegisterProducerRequest_descriptor, - new java.lang.String[] { "Producers", }); - internal_static_flyteidl_artifact_ArtifactConsumer_descriptor = - getDescriptor().getMessageTypes().get(19); - internal_static_flyteidl_artifact_ArtifactConsumer_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_ArtifactConsumer_descriptor, - new java.lang.String[] { "EntityId", "Inputs", }); - internal_static_flyteidl_artifact_RegisterConsumerRequest_descriptor = - getDescriptor().getMessageTypes().get(20); - internal_static_flyteidl_artifact_RegisterConsumerRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_RegisterConsumerRequest_descriptor, - new java.lang.String[] { "Consumers", }); - internal_static_flyteidl_artifact_RegisterResponse_descriptor = - getDescriptor().getMessageTypes().get(21); - internal_static_flyteidl_artifact_RegisterResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_RegisterResponse_descriptor, - new java.lang.String[] { }); - internal_static_flyteidl_artifact_ExecutionInputsRequest_descriptor = - getDescriptor().getMessageTypes().get(22); - internal_static_flyteidl_artifact_ExecutionInputsRequest_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_ExecutionInputsRequest_descriptor, - new java.lang.String[] { "ExecutionId", "Inputs", }); - internal_static_flyteidl_artifact_ExecutionInputsResponse_descriptor = - getDescriptor().getMessageTypes().get(23); - internal_static_flyteidl_artifact_ExecutionInputsResponse_fieldAccessorTable = new - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( - internal_static_flyteidl_artifact_ExecutionInputsResponse_descriptor, - new java.lang.String[] { }); - com.google.protobuf.ExtensionRegistry registry = - com.google.protobuf.ExtensionRegistry.newInstance(); - registry.add(com.google.api.AnnotationsProto.http); - com.google.protobuf.Descriptors.FileDescriptor - .internalUpdateFileDescriptor(descriptor, registry); - com.google.protobuf.AnyProto.getDescriptor(); - com.google.api.AnnotationsProto.getDescriptor(); - flyteidl.admin.LaunchPlanOuterClass.getDescriptor(); - flyteidl.core.Literals.getDescriptor(); - flyteidl.core.Types.getDescriptor(); - flyteidl.core.IdentifierOuterClass.getDescriptor(); - flyteidl.core.ArtifactId.getDescriptor(); - flyteidl.core.Interface.getDescriptor(); - flyteidl.event.Cloudevents.getDescriptor(); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/flyteidl/gen/pb-java/flyteidl/core/ArtifactId.java b/flyteidl/gen/pb-java/flyteidl/core/ArtifactId.java index 3f9d3c7616..86fd69c612 100644 --- a/flyteidl/gen/pb-java/flyteidl/core/ArtifactId.java +++ b/flyteidl/gen/pb-java/flyteidl/core/ArtifactId.java @@ -883,32 +883,39 @@ public interface ArtifactBindingDataOrBuilder extends int getIndex(); /** - *
-     * These two fields are only relevant in the partition value case
-     * 
- * * string partition_key = 2; */ java.lang.String getPartitionKey(); /** - *
-     * These two fields are only relevant in the partition value case
-     * 
- * * string partition_key = 2; */ com.google.protobuf.ByteString getPartitionKeyBytes(); /** - * string transform = 3; + * bool bind_to_time_partition = 3; + */ + boolean getBindToTimePartition(); + + /** + *
+     * This is only relevant in the time partition case
+     * 
+ * + * string transform = 4; */ java.lang.String getTransform(); /** - * string transform = 3; + *
+     * This is only relevant in the time partition case
+     * 
+ * + * string transform = 4; */ com.google.protobuf.ByteString getTransformBytes(); + + public flyteidl.core.ArtifactId.ArtifactBindingData.PartitionDataCase getPartitionDataCase(); } /** *
@@ -927,7 +934,6 @@ private ArtifactBindingData(com.google.protobuf.GeneratedMessageV3.Builder bu
       super(builder);
     }
     private ArtifactBindingData() {
-      partitionKey_ = "";
       transform_ = "";
     }
 
@@ -962,11 +968,16 @@ private ArtifactBindingData(
             }
             case 18: {
               java.lang.String s = input.readStringRequireUtf8();
-
-              partitionKey_ = s;
+              partitionDataCase_ = 2;
+              partitionData_ = s;
               break;
             }
-            case 26: {
+            case 24: {
+              partitionDataCase_ = 3;
+              partitionData_ = input.readBool();
+              break;
+            }
+            case 34: {
               java.lang.String s = input.readStringRequireUtf8();
 
               transform_ = s;
@@ -1004,6 +1015,44 @@ private ArtifactBindingData(
               flyteidl.core.ArtifactId.ArtifactBindingData.class, flyteidl.core.ArtifactId.ArtifactBindingData.Builder.class);
     }
 
+    private int partitionDataCase_ = 0;
+    private java.lang.Object partitionData_;
+    public enum PartitionDataCase
+        implements com.google.protobuf.Internal.EnumLite {
+      PARTITION_KEY(2),
+      BIND_TO_TIME_PARTITION(3),
+      PARTITIONDATA_NOT_SET(0);
+      private final int value;
+      private PartitionDataCase(int value) {
+        this.value = value;
+      }
+      /**
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
+      public static PartitionDataCase valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static PartitionDataCase forNumber(int value) {
+        switch (value) {
+          case 2: return PARTITION_KEY;
+          case 3: return BIND_TO_TIME_PARTITION;
+          case 0: return PARTITIONDATA_NOT_SET;
+          default: return null;
+        }
+      }
+      public int getNumber() {
+        return this.value;
+      }
+    };
+
+    public PartitionDataCase
+    getPartitionDataCase() {
+      return PartitionDataCase.forNumber(
+          partitionDataCase_);
+    }
+
     public static final int INDEX_FIELD_NUMBER = 1;
     private int index_;
     /**
@@ -1014,51 +1063,67 @@ public int getIndex() {
     }
 
     public static final int PARTITION_KEY_FIELD_NUMBER = 2;
-    private volatile java.lang.Object partitionKey_;
     /**
-     * 
-     * These two fields are only relevant in the partition value case
-     * 
- * * string partition_key = 2; */ public java.lang.String getPartitionKey() { - java.lang.Object ref = partitionKey_; + java.lang.Object ref = ""; + if (partitionDataCase_ == 2) { + ref = partitionData_; + } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); - partitionKey_ = s; + if (partitionDataCase_ == 2) { + partitionData_ = s; + } return s; } } /** - *
-     * These two fields are only relevant in the partition value case
-     * 
- * * string partition_key = 2; */ public com.google.protobuf.ByteString getPartitionKeyBytes() { - java.lang.Object ref = partitionKey_; + java.lang.Object ref = ""; + if (partitionDataCase_ == 2) { + ref = partitionData_; + } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - partitionKey_ = b; + if (partitionDataCase_ == 2) { + partitionData_ = b; + } return b; } else { return (com.google.protobuf.ByteString) ref; } } - public static final int TRANSFORM_FIELD_NUMBER = 3; + public static final int BIND_TO_TIME_PARTITION_FIELD_NUMBER = 3; + /** + * bool bind_to_time_partition = 3; + */ + public boolean getBindToTimePartition() { + if (partitionDataCase_ == 3) { + return (java.lang.Boolean) partitionData_; + } + return false; + } + + public static final int TRANSFORM_FIELD_NUMBER = 4; private volatile java.lang.Object transform_; /** - * string transform = 3; + *
+     * This is only relevant in the time partition case
+     * 
+ * + * string transform = 4; */ public java.lang.String getTransform() { java.lang.Object ref = transform_; @@ -1073,7 +1138,11 @@ public java.lang.String getTransform() { } } /** - * string transform = 3; + *
+     * This is only relevant in the time partition case
+     * 
+ * + * string transform = 4; */ public com.google.protobuf.ByteString getTransformBytes() { @@ -1106,11 +1175,15 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (index_ != 0) { output.writeUInt32(1, index_); } - if (!getPartitionKeyBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, partitionKey_); + if (partitionDataCase_ == 2) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, partitionData_); + } + if (partitionDataCase_ == 3) { + output.writeBool( + 3, (boolean)((java.lang.Boolean) partitionData_)); } if (!getTransformBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, transform_); + com.google.protobuf.GeneratedMessageV3.writeString(output, 4, transform_); } unknownFields.writeTo(output); } @@ -1125,11 +1198,16 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, index_); } - if (!getPartitionKeyBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, partitionKey_); + if (partitionDataCase_ == 2) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, partitionData_); + } + if (partitionDataCase_ == 3) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize( + 3, (boolean)((java.lang.Boolean) partitionData_)); } if (!getTransformBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, transform_); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, transform_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -1148,10 +1226,21 @@ public boolean equals(final java.lang.Object obj) { if (getIndex() != other.getIndex()) return false; - if (!getPartitionKey() - .equals(other.getPartitionKey())) return false; if (!getTransform() .equals(other.getTransform())) return false; + if (!getPartitionDataCase().equals(other.getPartitionDataCase())) return false; + switch (partitionDataCase_) { + case 2: + if (!getPartitionKey() + .equals(other.getPartitionKey())) return false; + break; + case 3: + if (getBindToTimePartition() + != other.getBindToTimePartition()) return false; + break; + case 0: + default: + } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1165,10 +1254,21 @@ public int hashCode() { hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + INDEX_FIELD_NUMBER; hash = (53 * hash) + getIndex(); - hash = (37 * hash) + PARTITION_KEY_FIELD_NUMBER; - hash = (53 * hash) + getPartitionKey().hashCode(); hash = (37 * hash) + TRANSFORM_FIELD_NUMBER; hash = (53 * hash) + getTransform().hashCode(); + switch (partitionDataCase_) { + case 2: + hash = (37 * hash) + PARTITION_KEY_FIELD_NUMBER; + hash = (53 * hash) + getPartitionKey().hashCode(); + break; + case 3: + hash = (37 * hash) + BIND_TO_TIME_PARTITION_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean( + getBindToTimePartition()); + break; + case 0: + default: + } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1308,10 +1408,10 @@ public Builder clear() { super.clear(); index_ = 0; - partitionKey_ = ""; - transform_ = ""; + partitionDataCase_ = 0; + partitionData_ = null; return this; } @@ -1339,8 +1439,14 @@ public flyteidl.core.ArtifactId.ArtifactBindingData build() { public flyteidl.core.ArtifactId.ArtifactBindingData buildPartial() { flyteidl.core.ArtifactId.ArtifactBindingData result = new flyteidl.core.ArtifactId.ArtifactBindingData(this); result.index_ = index_; - result.partitionKey_ = partitionKey_; + if (partitionDataCase_ == 2) { + result.partitionData_ = partitionData_; + } + if (partitionDataCase_ == 3) { + result.partitionData_ = partitionData_; + } result.transform_ = transform_; + result.partitionDataCase_ = partitionDataCase_; onBuilt(); return result; } @@ -1392,14 +1498,25 @@ public Builder mergeFrom(flyteidl.core.ArtifactId.ArtifactBindingData other) { if (other.getIndex() != 0) { setIndex(other.getIndex()); } - if (!other.getPartitionKey().isEmpty()) { - partitionKey_ = other.partitionKey_; - onChanged(); - } if (!other.getTransform().isEmpty()) { transform_ = other.transform_; onChanged(); } + switch (other.getPartitionDataCase()) { + case PARTITION_KEY: { + partitionDataCase_ = 2; + partitionData_ = other.partitionData_; + onChanged(); + break; + } + case BIND_TO_TIME_PARTITION: { + setBindToTimePartition(other.getBindToTimePartition()); + break; + } + case PARTITIONDATA_NOT_SET: { + break; + } + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1428,6 +1545,21 @@ public Builder mergeFrom( } return this; } + private int partitionDataCase_ = 0; + private java.lang.Object partitionData_; + public PartitionDataCase + getPartitionDataCase() { + return PartitionDataCase.forNumber( + partitionDataCase_); + } + + public Builder clearPartitionData() { + partitionDataCase_ = 0; + partitionData_ = null; + onChanged(); + return this; + } + private int index_ ; /** @@ -1455,51 +1587,48 @@ public Builder clearIndex() { return this; } - private java.lang.Object partitionKey_ = ""; /** - *
-       * These two fields are only relevant in the partition value case
-       * 
- * * string partition_key = 2; */ public java.lang.String getPartitionKey() { - java.lang.Object ref = partitionKey_; + java.lang.Object ref = ""; + if (partitionDataCase_ == 2) { + ref = partitionData_; + } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); - partitionKey_ = s; + if (partitionDataCase_ == 2) { + partitionData_ = s; + } return s; } else { return (java.lang.String) ref; } } /** - *
-       * These two fields are only relevant in the partition value case
-       * 
- * * string partition_key = 2; */ public com.google.protobuf.ByteString getPartitionKeyBytes() { - java.lang.Object ref = partitionKey_; + java.lang.Object ref = ""; + if (partitionDataCase_ == 2) { + ref = partitionData_; + } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - partitionKey_ = b; + if (partitionDataCase_ == 2) { + partitionData_ = b; + } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** - *
-       * These two fields are only relevant in the partition value case
-       * 
- * * string partition_key = 2; */ public Builder setPartitionKey( @@ -1507,29 +1636,23 @@ public Builder setPartitionKey( if (value == null) { throw new NullPointerException(); } - - partitionKey_ = value; + partitionDataCase_ = 2; + partitionData_ = value; onChanged(); return this; } /** - *
-       * These two fields are only relevant in the partition value case
-       * 
- * * string partition_key = 2; */ public Builder clearPartitionKey() { - - partitionKey_ = getDefaultInstance().getPartitionKey(); - onChanged(); + if (partitionDataCase_ == 2) { + partitionDataCase_ = 0; + partitionData_ = null; + onChanged(); + } return this; } /** - *
-       * These two fields are only relevant in the partition value case
-       * 
- * * string partition_key = 2; */ public Builder setPartitionKeyBytes( @@ -1538,15 +1661,49 @@ public Builder setPartitionKeyBytes( throw new NullPointerException(); } checkByteStringIsUtf8(value); - - partitionKey_ = value; + partitionDataCase_ = 2; + partitionData_ = value; + onChanged(); + return this; + } + + /** + * bool bind_to_time_partition = 3; + */ + public boolean getBindToTimePartition() { + if (partitionDataCase_ == 3) { + return (java.lang.Boolean) partitionData_; + } + return false; + } + /** + * bool bind_to_time_partition = 3; + */ + public Builder setBindToTimePartition(boolean value) { + partitionDataCase_ = 3; + partitionData_ = value; onChanged(); return this; } + /** + * bool bind_to_time_partition = 3; + */ + public Builder clearBindToTimePartition() { + if (partitionDataCase_ == 3) { + partitionDataCase_ = 0; + partitionData_ = null; + onChanged(); + } + return this; + } private java.lang.Object transform_ = ""; /** - * string transform = 3; + *
+       * This is only relevant in the time partition case
+       * 
+ * + * string transform = 4; */ public java.lang.String getTransform() { java.lang.Object ref = transform_; @@ -1561,7 +1718,11 @@ public java.lang.String getTransform() { } } /** - * string transform = 3; + *
+       * This is only relevant in the time partition case
+       * 
+ * + * string transform = 4; */ public com.google.protobuf.ByteString getTransformBytes() { @@ -1577,7 +1738,11 @@ public java.lang.String getTransform() { } } /** - * string transform = 3; + *
+       * This is only relevant in the time partition case
+       * 
+ * + * string transform = 4; */ public Builder setTransform( java.lang.String value) { @@ -1590,7 +1755,11 @@ public Builder setTransform( return this; } /** - * string transform = 3; + *
+       * This is only relevant in the time partition case
+       * 
+ * + * string transform = 4; */ public Builder clearTransform() { @@ -1599,7 +1768,11 @@ public Builder clearTransform() { return this; } /** - * string transform = 3; + *
+       * This is only relevant in the time partition case
+       * 
+ * + * string transform = 4; */ public Builder setTransformBytes( com.google.protobuf.ByteString value) { @@ -2219,38 +2392,71 @@ public interface LabelValueOrBuilder extends com.google.protobuf.MessageOrBuilder { /** + *
+     * The string static value is for use in the Partitions object
+     * 
+ * * string static_value = 1; */ java.lang.String getStaticValue(); /** + *
+     * The string static value is for use in the Partitions object
+     * 
+ * * string static_value = 1; */ com.google.protobuf.ByteString getStaticValueBytes(); /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + *
+     * The time value is for use in the TimePartition case
+     * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + boolean hasTimeValue(); + /** + *
+     * The time value is for use in the TimePartition case
+     * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + com.google.protobuf.Timestamp getTimeValue(); + /** + *
+     * The time value is for use in the TimePartition case
+     * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + com.google.protobuf.TimestampOrBuilder getTimeValueOrBuilder(); + + /** + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ boolean hasTriggeredBinding(); /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ flyteidl.core.ArtifactId.ArtifactBindingData getTriggeredBinding(); /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getTriggeredBindingOrBuilder(); /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ boolean hasInputBinding(); /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ flyteidl.core.ArtifactId.InputBindingData getInputBinding(); /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ flyteidl.core.ArtifactId.InputBindingDataOrBuilder getInputBindingOrBuilder(); @@ -2302,8 +2508,22 @@ private LabelValue( break; } case 18: { - flyteidl.core.ArtifactId.ArtifactBindingData.Builder subBuilder = null; + com.google.protobuf.Timestamp.Builder subBuilder = null; if (valueCase_ == 2) { + subBuilder = ((com.google.protobuf.Timestamp) value_).toBuilder(); + } + value_ = + input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((com.google.protobuf.Timestamp) value_); + value_ = subBuilder.buildPartial(); + } + valueCase_ = 2; + break; + } + case 26: { + flyteidl.core.ArtifactId.ArtifactBindingData.Builder subBuilder = null; + if (valueCase_ == 3) { subBuilder = ((flyteidl.core.ArtifactId.ArtifactBindingData) value_).toBuilder(); } value_ = @@ -2312,12 +2532,12 @@ private LabelValue( subBuilder.mergeFrom((flyteidl.core.ArtifactId.ArtifactBindingData) value_); value_ = subBuilder.buildPartial(); } - valueCase_ = 2; + valueCase_ = 3; break; } - case 26: { + case 34: { flyteidl.core.ArtifactId.InputBindingData.Builder subBuilder = null; - if (valueCase_ == 3) { + if (valueCase_ == 4) { subBuilder = ((flyteidl.core.ArtifactId.InputBindingData) value_).toBuilder(); } value_ = @@ -2326,7 +2546,7 @@ private LabelValue( subBuilder.mergeFrom((flyteidl.core.ArtifactId.InputBindingData) value_); value_ = subBuilder.buildPartial(); } - valueCase_ = 3; + valueCase_ = 4; break; } default: { @@ -2366,8 +2586,9 @@ private LabelValue( public enum ValueCase implements com.google.protobuf.Internal.EnumLite { STATIC_VALUE(1), - TRIGGERED_BINDING(2), - INPUT_BINDING(3), + TIME_VALUE(2), + TRIGGERED_BINDING(3), + INPUT_BINDING(4), VALUE_NOT_SET(0); private final int value; private ValueCase(int value) { @@ -2384,8 +2605,9 @@ public static ValueCase valueOf(int value) { public static ValueCase forNumber(int value) { switch (value) { case 1: return STATIC_VALUE; - case 2: return TRIGGERED_BINDING; - case 3: return INPUT_BINDING; + case 2: return TIME_VALUE; + case 3: return TRIGGERED_BINDING; + case 4: return INPUT_BINDING; case 0: return VALUE_NOT_SET; default: return null; } @@ -2403,6 +2625,10 @@ public int getNumber() { public static final int STATIC_VALUE_FIELD_NUMBER = 1; /** + *
+     * The string static value is for use in the Partitions object
+     * 
+ * * string static_value = 1; */ public java.lang.String getStaticValue() { @@ -2423,6 +2649,10 @@ public java.lang.String getStaticValue() { } } /** + *
+     * The string static value is for use in the Partitions object
+     * 
+ * * string static_value = 1; */ public com.google.protobuf.ByteString @@ -2444,53 +2674,91 @@ public java.lang.String getStaticValue() { } } - public static final int TRIGGERED_BINDING_FIELD_NUMBER = 2; + public static final int TIME_VALUE_FIELD_NUMBER = 2; /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + *
+     * The time value is for use in the TimePartition case
+     * 
+ * + * .google.protobuf.Timestamp time_value = 2; */ - public boolean hasTriggeredBinding() { + public boolean hasTimeValue() { return valueCase_ == 2; } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + *
+     * The time value is for use in the TimePartition case
+     * 
+ * + * .google.protobuf.Timestamp time_value = 2; */ - public flyteidl.core.ArtifactId.ArtifactBindingData getTriggeredBinding() { + public com.google.protobuf.Timestamp getTimeValue() { + if (valueCase_ == 2) { + return (com.google.protobuf.Timestamp) value_; + } + return com.google.protobuf.Timestamp.getDefaultInstance(); + } + /** + *
+     * The time value is for use in the TimePartition case
+     * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public com.google.protobuf.TimestampOrBuilder getTimeValueOrBuilder() { if (valueCase_ == 2) { + return (com.google.protobuf.Timestamp) value_; + } + return com.google.protobuf.Timestamp.getDefaultInstance(); + } + + public static final int TRIGGERED_BINDING_FIELD_NUMBER = 3; + /** + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; + */ + public boolean hasTriggeredBinding() { + return valueCase_ == 3; + } + /** + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; + */ + public flyteidl.core.ArtifactId.ArtifactBindingData getTriggeredBinding() { + if (valueCase_ == 3) { return (flyteidl.core.ArtifactId.ArtifactBindingData) value_; } return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getTriggeredBindingOrBuilder() { - if (valueCase_ == 2) { + if (valueCase_ == 3) { return (flyteidl.core.ArtifactId.ArtifactBindingData) value_; } return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } - public static final int INPUT_BINDING_FIELD_NUMBER = 3; + public static final int INPUT_BINDING_FIELD_NUMBER = 4; /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public boolean hasInputBinding() { - return valueCase_ == 3; + return valueCase_ == 4; } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public flyteidl.core.ArtifactId.InputBindingData getInputBinding() { - if (valueCase_ == 3) { + if (valueCase_ == 4) { return (flyteidl.core.ArtifactId.InputBindingData) value_; } return flyteidl.core.ArtifactId.InputBindingData.getDefaultInstance(); } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public flyteidl.core.ArtifactId.InputBindingDataOrBuilder getInputBindingOrBuilder() { - if (valueCase_ == 3) { + if (valueCase_ == 4) { return (flyteidl.core.ArtifactId.InputBindingData) value_; } return flyteidl.core.ArtifactId.InputBindingData.getDefaultInstance(); @@ -2514,10 +2782,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) com.google.protobuf.GeneratedMessageV3.writeString(output, 1, value_); } if (valueCase_ == 2) { - output.writeMessage(2, (flyteidl.core.ArtifactId.ArtifactBindingData) value_); + output.writeMessage(2, (com.google.protobuf.Timestamp) value_); } if (valueCase_ == 3) { - output.writeMessage(3, (flyteidl.core.ArtifactId.InputBindingData) value_); + output.writeMessage(3, (flyteidl.core.ArtifactId.ArtifactBindingData) value_); + } + if (valueCase_ == 4) { + output.writeMessage(4, (flyteidl.core.ArtifactId.InputBindingData) value_); } unknownFields.writeTo(output); } @@ -2533,11 +2804,15 @@ public int getSerializedSize() { } if (valueCase_ == 2) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, (flyteidl.core.ArtifactId.ArtifactBindingData) value_); + .computeMessageSize(2, (com.google.protobuf.Timestamp) value_); } if (valueCase_ == 3) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, (flyteidl.core.ArtifactId.InputBindingData) value_); + .computeMessageSize(3, (flyteidl.core.ArtifactId.ArtifactBindingData) value_); + } + if (valueCase_ == 4) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, (flyteidl.core.ArtifactId.InputBindingData) value_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -2561,10 +2836,14 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getStaticValue())) return false; break; case 2: + if (!getTimeValue() + .equals(other.getTimeValue())) return false; + break; + case 3: if (!getTriggeredBinding() .equals(other.getTriggeredBinding())) return false; break; - case 3: + case 4: if (!getInputBinding() .equals(other.getInputBinding())) return false; break; @@ -2588,10 +2867,14 @@ public int hashCode() { hash = (53 * hash) + getStaticValue().hashCode(); break; case 2: + hash = (37 * hash) + TIME_VALUE_FIELD_NUMBER; + hash = (53 * hash) + getTimeValue().hashCode(); + break; + case 3: hash = (37 * hash) + TRIGGERED_BINDING_FIELD_NUMBER; hash = (53 * hash) + getTriggeredBinding().hashCode(); break; - case 3: + case 4: hash = (37 * hash) + INPUT_BINDING_FIELD_NUMBER; hash = (53 * hash) + getInputBinding().hashCode(); break; @@ -2763,13 +3046,20 @@ public flyteidl.core.ArtifactId.LabelValue buildPartial() { result.value_ = value_; } if (valueCase_ == 2) { + if (timeValueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = timeValueBuilder_.build(); + } + } + if (valueCase_ == 3) { if (triggeredBindingBuilder_ == null) { result.value_ = value_; } else { result.value_ = triggeredBindingBuilder_.build(); } } - if (valueCase_ == 3) { + if (valueCase_ == 4) { if (inputBindingBuilder_ == null) { result.value_ = value_; } else { @@ -2832,6 +3122,10 @@ public Builder mergeFrom(flyteidl.core.ArtifactId.LabelValue other) { onChanged(); break; } + case TIME_VALUE: { + mergeTimeValue(other.getTimeValue()); + break; + } case TRIGGERED_BINDING: { mergeTriggeredBinding(other.getTriggeredBinding()); break; @@ -2889,6 +3183,10 @@ public Builder clearValue() { /** + *
+       * The string static value is for use in the Partitions object
+       * 
+ * * string static_value = 1; */ public java.lang.String getStaticValue() { @@ -2909,6 +3207,10 @@ public java.lang.String getStaticValue() { } } /** + *
+       * The string static value is for use in the Partitions object
+       * 
+ * * string static_value = 1; */ public com.google.protobuf.ByteString @@ -2930,6 +3232,10 @@ public java.lang.String getStaticValue() { } } /** + *
+       * The string static value is for use in the Partitions object
+       * 
+ * * string static_value = 1; */ public Builder setStaticValue( @@ -2943,6 +3249,10 @@ public Builder setStaticValue( return this; } /** + *
+       * The string static value is for use in the Partitions object
+       * 
+ * * string static_value = 1; */ public Builder clearStaticValue() { @@ -2954,6 +3264,10 @@ public Builder clearStaticValue() { return this; } /** + *
+       * The string static value is for use in the Partitions object
+       * 
+ * * string static_value = 1; */ public Builder setStaticValueBytes( @@ -2968,32 +3282,204 @@ public Builder setStaticValueBytes( return this; } + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> timeValueBuilder_; + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public boolean hasTimeValue() { + return valueCase_ == 2; + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public com.google.protobuf.Timestamp getTimeValue() { + if (timeValueBuilder_ == null) { + if (valueCase_ == 2) { + return (com.google.protobuf.Timestamp) value_; + } + return com.google.protobuf.Timestamp.getDefaultInstance(); + } else { + if (valueCase_ == 2) { + return timeValueBuilder_.getMessage(); + } + return com.google.protobuf.Timestamp.getDefaultInstance(); + } + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public Builder setTimeValue(com.google.protobuf.Timestamp value) { + if (timeValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + timeValueBuilder_.setMessage(value); + } + valueCase_ = 2; + return this; + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public Builder setTimeValue( + com.google.protobuf.Timestamp.Builder builderForValue) { + if (timeValueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + timeValueBuilder_.setMessage(builderForValue.build()); + } + valueCase_ = 2; + return this; + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public Builder mergeTimeValue(com.google.protobuf.Timestamp value) { + if (timeValueBuilder_ == null) { + if (valueCase_ == 2 && + value_ != com.google.protobuf.Timestamp.getDefaultInstance()) { + value_ = com.google.protobuf.Timestamp.newBuilder((com.google.protobuf.Timestamp) value_) + .mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + if (valueCase_ == 2) { + timeValueBuilder_.mergeFrom(value); + } + timeValueBuilder_.setMessage(value); + } + valueCase_ = 2; + return this; + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public Builder clearTimeValue() { + if (timeValueBuilder_ == null) { + if (valueCase_ == 2) { + valueCase_ = 0; + value_ = null; + onChanged(); + } + } else { + if (valueCase_ == 2) { + valueCase_ = 0; + value_ = null; + } + timeValueBuilder_.clear(); + } + return this; + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public com.google.protobuf.Timestamp.Builder getTimeValueBuilder() { + return getTimeValueFieldBuilder().getBuilder(); + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + public com.google.protobuf.TimestampOrBuilder getTimeValueOrBuilder() { + if ((valueCase_ == 2) && (timeValueBuilder_ != null)) { + return timeValueBuilder_.getMessageOrBuilder(); + } else { + if (valueCase_ == 2) { + return (com.google.protobuf.Timestamp) value_; + } + return com.google.protobuf.Timestamp.getDefaultInstance(); + } + } + /** + *
+       * The time value is for use in the TimePartition case
+       * 
+ * + * .google.protobuf.Timestamp time_value = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> + getTimeValueFieldBuilder() { + if (timeValueBuilder_ == null) { + if (!(valueCase_ == 2)) { + value_ = com.google.protobuf.Timestamp.getDefaultInstance(); + } + timeValueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( + (com.google.protobuf.Timestamp) value_, + getParentForChildren(), + isClean()); + value_ = null; + } + valueCase_ = 2; + onChanged();; + return timeValueBuilder_; + } + private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder> triggeredBindingBuilder_; /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public boolean hasTriggeredBinding() { - return valueCase_ == 2; + return valueCase_ == 3; } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public flyteidl.core.ArtifactId.ArtifactBindingData getTriggeredBinding() { if (triggeredBindingBuilder_ == null) { - if (valueCase_ == 2) { + if (valueCase_ == 3) { return (flyteidl.core.ArtifactId.ArtifactBindingData) value_; } return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } else { - if (valueCase_ == 2) { + if (valueCase_ == 3) { return triggeredBindingBuilder_.getMessage(); } return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public Builder setTriggeredBinding(flyteidl.core.ArtifactId.ArtifactBindingData value) { if (triggeredBindingBuilder_ == null) { @@ -3005,11 +3491,11 @@ public Builder setTriggeredBinding(flyteidl.core.ArtifactId.ArtifactBindingData } else { triggeredBindingBuilder_.setMessage(value); } - valueCase_ = 2; + valueCase_ = 3; return this; } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public Builder setTriggeredBinding( flyteidl.core.ArtifactId.ArtifactBindingData.Builder builderForValue) { @@ -3019,15 +3505,15 @@ public Builder setTriggeredBinding( } else { triggeredBindingBuilder_.setMessage(builderForValue.build()); } - valueCase_ = 2; + valueCase_ = 3; return this; } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public Builder mergeTriggeredBinding(flyteidl.core.ArtifactId.ArtifactBindingData value) { if (triggeredBindingBuilder_ == null) { - if (valueCase_ == 2 && + if (valueCase_ == 3 && value_ != flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance()) { value_ = flyteidl.core.ArtifactId.ArtifactBindingData.newBuilder((flyteidl.core.ArtifactId.ArtifactBindingData) value_) .mergeFrom(value).buildPartial(); @@ -3036,26 +3522,26 @@ public Builder mergeTriggeredBinding(flyteidl.core.ArtifactId.ArtifactBindingDat } onChanged(); } else { - if (valueCase_ == 2) { + if (valueCase_ == 3) { triggeredBindingBuilder_.mergeFrom(value); } triggeredBindingBuilder_.setMessage(value); } - valueCase_ = 2; + valueCase_ = 3; return this; } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public Builder clearTriggeredBinding() { if (triggeredBindingBuilder_ == null) { - if (valueCase_ == 2) { + if (valueCase_ == 3) { valueCase_ = 0; value_ = null; onChanged(); } } else { - if (valueCase_ == 2) { + if (valueCase_ == 3) { valueCase_ = 0; value_ = null; } @@ -3064,32 +3550,32 @@ public Builder clearTriggeredBinding() { return this; } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public flyteidl.core.ArtifactId.ArtifactBindingData.Builder getTriggeredBindingBuilder() { return getTriggeredBindingFieldBuilder().getBuilder(); } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getTriggeredBindingOrBuilder() { - if ((valueCase_ == 2) && (triggeredBindingBuilder_ != null)) { + if ((valueCase_ == 3) && (triggeredBindingBuilder_ != null)) { return triggeredBindingBuilder_.getMessageOrBuilder(); } else { - if (valueCase_ == 2) { + if (valueCase_ == 3) { return (flyteidl.core.ArtifactId.ArtifactBindingData) value_; } return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } } /** - * .flyteidl.core.ArtifactBindingData triggered_binding = 2; + * .flyteidl.core.ArtifactBindingData triggered_binding = 3; */ private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder> getTriggeredBindingFieldBuilder() { if (triggeredBindingBuilder_ == null) { - if (!(valueCase_ == 2)) { + if (!(valueCase_ == 3)) { value_ = flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } triggeredBindingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< @@ -3099,7 +3585,7 @@ public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getTriggeredBinding isClean()); value_ = null; } - valueCase_ = 2; + valueCase_ = 3; onChanged();; return triggeredBindingBuilder_; } @@ -3107,29 +3593,29 @@ public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getTriggeredBinding private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.ArtifactId.InputBindingData, flyteidl.core.ArtifactId.InputBindingData.Builder, flyteidl.core.ArtifactId.InputBindingDataOrBuilder> inputBindingBuilder_; /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public boolean hasInputBinding() { - return valueCase_ == 3; + return valueCase_ == 4; } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public flyteidl.core.ArtifactId.InputBindingData getInputBinding() { if (inputBindingBuilder_ == null) { - if (valueCase_ == 3) { + if (valueCase_ == 4) { return (flyteidl.core.ArtifactId.InputBindingData) value_; } return flyteidl.core.ArtifactId.InputBindingData.getDefaultInstance(); } else { - if (valueCase_ == 3) { + if (valueCase_ == 4) { return inputBindingBuilder_.getMessage(); } return flyteidl.core.ArtifactId.InputBindingData.getDefaultInstance(); } } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public Builder setInputBinding(flyteidl.core.ArtifactId.InputBindingData value) { if (inputBindingBuilder_ == null) { @@ -3141,11 +3627,11 @@ public Builder setInputBinding(flyteidl.core.ArtifactId.InputBindingData value) } else { inputBindingBuilder_.setMessage(value); } - valueCase_ = 3; + valueCase_ = 4; return this; } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public Builder setInputBinding( flyteidl.core.ArtifactId.InputBindingData.Builder builderForValue) { @@ -3155,15 +3641,15 @@ public Builder setInputBinding( } else { inputBindingBuilder_.setMessage(builderForValue.build()); } - valueCase_ = 3; + valueCase_ = 4; return this; } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public Builder mergeInputBinding(flyteidl.core.ArtifactId.InputBindingData value) { if (inputBindingBuilder_ == null) { - if (valueCase_ == 3 && + if (valueCase_ == 4 && value_ != flyteidl.core.ArtifactId.InputBindingData.getDefaultInstance()) { value_ = flyteidl.core.ArtifactId.InputBindingData.newBuilder((flyteidl.core.ArtifactId.InputBindingData) value_) .mergeFrom(value).buildPartial(); @@ -3172,26 +3658,26 @@ public Builder mergeInputBinding(flyteidl.core.ArtifactId.InputBindingData value } onChanged(); } else { - if (valueCase_ == 3) { + if (valueCase_ == 4) { inputBindingBuilder_.mergeFrom(value); } inputBindingBuilder_.setMessage(value); } - valueCase_ = 3; + valueCase_ = 4; return this; } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public Builder clearInputBinding() { if (inputBindingBuilder_ == null) { - if (valueCase_ == 3) { + if (valueCase_ == 4) { valueCase_ = 0; value_ = null; onChanged(); } } else { - if (valueCase_ == 3) { + if (valueCase_ == 4) { valueCase_ = 0; value_ = null; } @@ -3200,32 +3686,32 @@ public Builder clearInputBinding() { return this; } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public flyteidl.core.ArtifactId.InputBindingData.Builder getInputBindingBuilder() { return getInputBindingFieldBuilder().getBuilder(); } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ public flyteidl.core.ArtifactId.InputBindingDataOrBuilder getInputBindingOrBuilder() { - if ((valueCase_ == 3) && (inputBindingBuilder_ != null)) { + if ((valueCase_ == 4) && (inputBindingBuilder_ != null)) { return inputBindingBuilder_.getMessageOrBuilder(); } else { - if (valueCase_ == 3) { + if (valueCase_ == 4) { return (flyteidl.core.ArtifactId.InputBindingData) value_; } return flyteidl.core.ArtifactId.InputBindingData.getDefaultInstance(); } } /** - * .flyteidl.core.InputBindingData input_binding = 3; + * .flyteidl.core.InputBindingData input_binding = 4; */ private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.ArtifactId.InputBindingData, flyteidl.core.ArtifactId.InputBindingData.Builder, flyteidl.core.ArtifactId.InputBindingDataOrBuilder> getInputBindingFieldBuilder() { if (inputBindingBuilder_ == null) { - if (!(valueCase_ == 3)) { + if (!(valueCase_ == 4)) { value_ = flyteidl.core.ArtifactId.InputBindingData.getDefaultInstance(); } inputBindingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< @@ -3235,7 +3721,7 @@ public flyteidl.core.ArtifactId.InputBindingDataOrBuilder getInputBindingOrBuild isClean()); value_ = null; } - valueCase_ = 3; + valueCase_ = 4; onChanged();; return inputBindingBuilder_; } @@ -4013,62 +4499,36 @@ public flyteidl.core.ArtifactId.Partitions getDefaultInstanceForType() { } - public interface ArtifactIDOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.core.ArtifactID) + public interface TimePartitionOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.core.TimePartition) com.google.protobuf.MessageOrBuilder { /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - boolean hasArtifactKey(); - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - flyteidl.core.ArtifactId.ArtifactKey getArtifactKey(); - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder(); - - /** - * string version = 2; - */ - java.lang.String getVersion(); - /** - * string version = 2; - */ - com.google.protobuf.ByteString - getVersionBytes(); - - /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ - boolean hasPartitions(); + boolean hasValue(); /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ - flyteidl.core.ArtifactId.Partitions getPartitions(); + flyteidl.core.ArtifactId.LabelValue getValue(); /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ - flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder(); - - public flyteidl.core.ArtifactId.ArtifactID.DimensionsCase getDimensionsCase(); + flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder(); } /** - * Protobuf type {@code flyteidl.core.ArtifactID} + * Protobuf type {@code flyteidl.core.TimePartition} */ - public static final class ArtifactID extends + public static final class TimePartition extends com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.core.ArtifactID) - ArtifactIDOrBuilder { + // @@protoc_insertion_point(message_implements:flyteidl.core.TimePartition) + TimePartitionOrBuilder { private static final long serialVersionUID = 0L; - // Use ArtifactID.newBuilder() to construct. - private ArtifactID(com.google.protobuf.GeneratedMessageV3.Builder builder) { + // Use TimePartition.newBuilder() to construct. + private TimePartition(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } - private ArtifactID() { - version_ = ""; + private TimePartition() { } @java.lang.Override @@ -4076,7 +4536,7 @@ private ArtifactID() { getUnknownFields() { return this.unknownFields; } - private ArtifactID( + private TimePartition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -4096,38 +4556,18 @@ private ArtifactID( done = true; break; case 10: { - flyteidl.core.ArtifactId.ArtifactKey.Builder subBuilder = null; - if (artifactKey_ != null) { - subBuilder = artifactKey_.toBuilder(); + flyteidl.core.ArtifactId.LabelValue.Builder subBuilder = null; + if (value_ != null) { + subBuilder = value_.toBuilder(); } - artifactKey_ = input.readMessage(flyteidl.core.ArtifactId.ArtifactKey.parser(), extensionRegistry); + value_ = input.readMessage(flyteidl.core.ArtifactId.LabelValue.parser(), extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom(artifactKey_); - artifactKey_ = subBuilder.buildPartial(); + subBuilder.mergeFrom(value_); + value_ = subBuilder.buildPartial(); } break; } - case 18: { - java.lang.String s = input.readStringRequireUtf8(); - - version_ = s; - break; - } - case 26: { - flyteidl.core.ArtifactId.Partitions.Builder subBuilder = null; - if (dimensionsCase_ == 3) { - subBuilder = ((flyteidl.core.ArtifactId.Partitions) dimensions_).toBuilder(); - } - dimensions_ = - input.readMessage(flyteidl.core.ArtifactId.Partitions.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom((flyteidl.core.ArtifactId.Partitions) dimensions_); - dimensions_ = subBuilder.buildPartial(); - } - dimensionsCase_ = 3; - break; - } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { @@ -4149,132 +4589,36 @@ private ArtifactID( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_TimePartition_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_TimePartition_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.ArtifactID.class, flyteidl.core.ArtifactId.ArtifactID.Builder.class); + flyteidl.core.ArtifactId.TimePartition.class, flyteidl.core.ArtifactId.TimePartition.Builder.class); } - private int dimensionsCase_ = 0; - private java.lang.Object dimensions_; - public enum DimensionsCase - implements com.google.protobuf.Internal.EnumLite { - PARTITIONS(3), - DIMENSIONS_NOT_SET(0); - private final int value; - private DimensionsCase(int value) { - this.value = value; - } - /** - * @deprecated Use {@link #forNumber(int)} instead. - */ - @java.lang.Deprecated - public static DimensionsCase valueOf(int value) { - return forNumber(value); - } - - public static DimensionsCase forNumber(int value) { - switch (value) { - case 3: return PARTITIONS; - case 0: return DIMENSIONS_NOT_SET; - default: return null; - } - } - public int getNumber() { - return this.value; - } - }; - - public DimensionsCase - getDimensionsCase() { - return DimensionsCase.forNumber( - dimensionsCase_); - } - - public static final int ARTIFACT_KEY_FIELD_NUMBER = 1; - private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public boolean hasArtifactKey() { - return artifactKey_ != null; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { - return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { - return getArtifactKey(); - } - - public static final int VERSION_FIELD_NUMBER = 2; - private volatile java.lang.Object version_; - /** - * string version = 2; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } - } - /** - * string version = 2; - */ - public com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PARTITIONS_FIELD_NUMBER = 3; + public static final int VALUE_FIELD_NUMBER = 1; + private flyteidl.core.ArtifactId.LabelValue value_; /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ - public boolean hasPartitions() { - return dimensionsCase_ == 3; + public boolean hasValue() { + return value_ != null; } /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ - public flyteidl.core.ArtifactId.Partitions getPartitions() { - if (dimensionsCase_ == 3) { - return (flyteidl.core.ArtifactId.Partitions) dimensions_; - } - return flyteidl.core.ArtifactId.Partitions.getDefaultInstance(); + public flyteidl.core.ArtifactId.LabelValue getValue() { + return value_ == null ? flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; } /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ - public flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder() { - if (dimensionsCase_ == 3) { - return (flyteidl.core.ArtifactId.Partitions) dimensions_; - } - return flyteidl.core.ArtifactId.Partitions.getDefaultInstance(); + public flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder() { + return getValue(); } private byte memoizedIsInitialized = -1; @@ -4291,14 +4635,8 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (artifactKey_ != null) { - output.writeMessage(1, getArtifactKey()); - } - if (!getVersionBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_); - } - if (dimensionsCase_ == 3) { - output.writeMessage(3, (flyteidl.core.ArtifactId.Partitions) dimensions_); + if (value_ != null) { + output.writeMessage(1, getValue()); } unknownFields.writeTo(output); } @@ -4309,16 +4647,9 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (artifactKey_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getArtifactKey()); - } - if (!getVersionBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_); - } - if (dimensionsCase_ == 3) { + if (value_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, (flyteidl.core.ArtifactId.Partitions) dimensions_); + .computeMessageSize(1, getValue()); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -4330,26 +4661,15 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof flyteidl.core.ArtifactId.ArtifactID)) { + if (!(obj instanceof flyteidl.core.ArtifactId.TimePartition)) { return super.equals(obj); } - flyteidl.core.ArtifactId.ArtifactID other = (flyteidl.core.ArtifactId.ArtifactID) obj; + flyteidl.core.ArtifactId.TimePartition other = (flyteidl.core.ArtifactId.TimePartition) obj; - if (hasArtifactKey() != other.hasArtifactKey()) return false; - if (hasArtifactKey()) { - if (!getArtifactKey() - .equals(other.getArtifactKey())) return false; - } - if (!getVersion() - .equals(other.getVersion())) return false; - if (!getDimensionsCase().equals(other.getDimensionsCase())) return false; - switch (dimensionsCase_) { - case 3: - if (!getPartitions() - .equals(other.getPartitions())) return false; - break; - case 0: - default: + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (!getValue() + .equals(other.getValue())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -4362,88 +4682,78 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); - if (hasArtifactKey()) { - hash = (37 * hash) + ARTIFACT_KEY_FIELD_NUMBER; - hash = (53 * hash) + getArtifactKey().hashCode(); - } - hash = (37 * hash) + VERSION_FIELD_NUMBER; - hash = (53 * hash) + getVersion().hashCode(); - switch (dimensionsCase_) { - case 3: - hash = (37 * hash) + PARTITIONS_FIELD_NUMBER; - hash = (53 * hash) + getPartitions().hashCode(); - break; - case 0: - default: + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom(byte[] data) + public static flyteidl.core.ArtifactId.TimePartition parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.TimePartition parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactID parseDelimitedFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.TimePartition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactID parseDelimitedFrom( + public static flyteidl.core.ArtifactId.TimePartition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactID parseFrom( + public static flyteidl.core.ArtifactId.TimePartition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -4456,7 +4766,7 @@ public static flyteidl.core.ArtifactId.ArtifactID parseFrom( public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(flyteidl.core.ArtifactId.ArtifactID prototype) { + public static Builder newBuilder(flyteidl.core.ArtifactId.TimePartition prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -4472,26 +4782,26 @@ protected Builder newBuilderForType( return builder; } /** - * Protobuf type {@code flyteidl.core.ArtifactID} + * Protobuf type {@code flyteidl.core.TimePartition} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.core.ArtifactID) - flyteidl.core.ArtifactId.ArtifactIDOrBuilder { + // @@protoc_insertion_point(builder_implements:flyteidl.core.TimePartition) + flyteidl.core.ArtifactId.TimePartitionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_TimePartition_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_TimePartition_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.ArtifactID.class, flyteidl.core.ArtifactId.ArtifactID.Builder.class); + flyteidl.core.ArtifactId.TimePartition.class, flyteidl.core.ArtifactId.TimePartition.Builder.class); } - // Construct using flyteidl.core.ArtifactId.ArtifactID.newBuilder() + // Construct using flyteidl.core.ArtifactId.TimePartition.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -4509,33 +4819,29 @@ private void maybeForceBuilderInitialization() { @java.lang.Override public Builder clear() { super.clear(); - if (artifactKeyBuilder_ == null) { - artifactKey_ = null; + if (valueBuilder_ == null) { + value_ = null; } else { - artifactKey_ = null; - artifactKeyBuilder_ = null; + value_ = null; + valueBuilder_ = null; } - version_ = ""; - - dimensionsCase_ = 0; - dimensions_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_TimePartition_descriptor; } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactID getDefaultInstanceForType() { - return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); + public flyteidl.core.ArtifactId.TimePartition getDefaultInstanceForType() { + return flyteidl.core.ArtifactId.TimePartition.getDefaultInstance(); } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactID build() { - flyteidl.core.ArtifactId.ArtifactID result = buildPartial(); + public flyteidl.core.ArtifactId.TimePartition build() { + flyteidl.core.ArtifactId.TimePartition result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -4543,22 +4849,13 @@ public flyteidl.core.ArtifactId.ArtifactID build() { } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactID buildPartial() { - flyteidl.core.ArtifactId.ArtifactID result = new flyteidl.core.ArtifactId.ArtifactID(this); - if (artifactKeyBuilder_ == null) { - result.artifactKey_ = artifactKey_; + public flyteidl.core.ArtifactId.TimePartition buildPartial() { + flyteidl.core.ArtifactId.TimePartition result = new flyteidl.core.ArtifactId.TimePartition(this); + if (valueBuilder_ == null) { + result.value_ = value_; } else { - result.artifactKey_ = artifactKeyBuilder_.build(); - } - result.version_ = version_; - if (dimensionsCase_ == 3) { - if (partitionsBuilder_ == null) { - result.dimensions_ = dimensions_; - } else { - result.dimensions_ = partitionsBuilder_.build(); - } + result.value_ = valueBuilder_.build(); } - result.dimensionsCase_ = dimensionsCase_; onBuilt(); return result; } @@ -4597,31 +4894,18 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.core.ArtifactId.ArtifactID) { - return mergeFrom((flyteidl.core.ArtifactId.ArtifactID)other); + if (other instanceof flyteidl.core.ArtifactId.TimePartition) { + return mergeFrom((flyteidl.core.ArtifactId.TimePartition)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(flyteidl.core.ArtifactId.ArtifactID other) { - if (other == flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()) return this; - if (other.hasArtifactKey()) { - mergeArtifactKey(other.getArtifactKey()); - } - if (!other.getVersion().isEmpty()) { - version_ = other.version_; - onChanged(); - } - switch (other.getDimensionsCase()) { - case PARTITIONS: { - mergePartitions(other.getPartitions()); - break; - } - case DIMENSIONS_NOT_SET: { - break; - } + public Builder mergeFrom(flyteidl.core.ArtifactId.TimePartition other) { + if (other == flyteidl.core.ArtifactId.TimePartition.getDefaultInstance()) return this; + if (other.hasValue()) { + mergeValue(other.getValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); @@ -4638,11 +4922,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - flyteidl.core.ArtifactId.ArtifactID parsedMessage = null; + flyteidl.core.ArtifactId.TimePartition parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.core.ArtifactId.ArtifactID) e.getUnfinishedMessage(); + parsedMessage = (flyteidl.core.ArtifactId.TimePartition) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -4651,342 +4935,122 @@ public Builder mergeFrom( } return this; } - private int dimensionsCase_ = 0; - private java.lang.Object dimensions_; - public DimensionsCase - getDimensionsCase() { - return DimensionsCase.forNumber( - dimensionsCase_); - } - - public Builder clearDimensions() { - dimensionsCase_ = 0; - dimensions_ = null; - onChanged(); - return this; - } - - private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; + private flyteidl.core.ArtifactId.LabelValue value_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> artifactKeyBuilder_; + flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder> valueBuilder_; /** - * .flyteidl.core.ArtifactKey artifact_key = 1; + * .flyteidl.core.LabelValue value = 1; */ - public boolean hasArtifactKey() { - return artifactKeyBuilder_ != null || artifactKey_ != null; + public boolean hasValue() { + return valueBuilder_ != null || value_ != null; } /** - * .flyteidl.core.ArtifactKey artifact_key = 1; + * .flyteidl.core.LabelValue value = 1; */ - public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { - if (artifactKeyBuilder_ == null) { - return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; + public flyteidl.core.ArtifactId.LabelValue getValue() { + if (valueBuilder_ == null) { + return value_ == null ? flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; } else { - return artifactKeyBuilder_.getMessage(); + return valueBuilder_.getMessage(); } } /** - * .flyteidl.core.ArtifactKey artifact_key = 1; + * .flyteidl.core.LabelValue value = 1; */ - public Builder setArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { - if (artifactKeyBuilder_ == null) { + public Builder setValue(flyteidl.core.ArtifactId.LabelValue value) { + if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - artifactKey_ = value; + value_ = value; onChanged(); } else { - artifactKeyBuilder_.setMessage(value); + valueBuilder_.setMessage(value); } return this; } /** - * .flyteidl.core.ArtifactKey artifact_key = 1; + * .flyteidl.core.LabelValue value = 1; */ - public Builder setArtifactKey( - flyteidl.core.ArtifactId.ArtifactKey.Builder builderForValue) { - if (artifactKeyBuilder_ == null) { - artifactKey_ = builderForValue.build(); + public Builder setValue( + flyteidl.core.ArtifactId.LabelValue.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); onChanged(); } else { - artifactKeyBuilder_.setMessage(builderForValue.build()); + valueBuilder_.setMessage(builderForValue.build()); } return this; } /** - * .flyteidl.core.ArtifactKey artifact_key = 1; + * .flyteidl.core.LabelValue value = 1; */ - public Builder mergeArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { - if (artifactKeyBuilder_ == null) { - if (artifactKey_ != null) { - artifactKey_ = - flyteidl.core.ArtifactId.ArtifactKey.newBuilder(artifactKey_).mergeFrom(value).buildPartial(); + public Builder mergeValue(flyteidl.core.ArtifactId.LabelValue value) { + if (valueBuilder_ == null) { + if (value_ != null) { + value_ = + flyteidl.core.ArtifactId.LabelValue.newBuilder(value_).mergeFrom(value).buildPartial(); } else { - artifactKey_ = value; + value_ = value; } onChanged(); } else { - artifactKeyBuilder_.mergeFrom(value); + valueBuilder_.mergeFrom(value); } return this; } /** - * .flyteidl.core.ArtifactKey artifact_key = 1; + * .flyteidl.core.LabelValue value = 1; */ - public Builder clearArtifactKey() { - if (artifactKeyBuilder_ == null) { - artifactKey_ = null; - onChanged(); - } else { - artifactKey_ = null; - artifactKeyBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKey.Builder getArtifactKeyBuilder() { - - onChanged(); - return getArtifactKeyFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { - if (artifactKeyBuilder_ != null) { - return artifactKeyBuilder_.getMessageOrBuilder(); + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = null; + onChanged(); } else { - return artifactKey_ == null ? - flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; - } - } - /** - * .flyteidl.core.ArtifactKey artifact_key = 1; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> - getArtifactKeyFieldBuilder() { - if (artifactKeyBuilder_ == null) { - artifactKeyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder>( - getArtifactKey(), - getParentForChildren(), - isClean()); - artifactKey_ = null; + value_ = null; + valueBuilder_ = null; } - return artifactKeyBuilder_; - } - private java.lang.Object version_ = ""; - /** - * string version = 2; - */ - public java.lang.String getVersion() { - java.lang.Object ref = version_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - version_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string version = 2; - */ - public com.google.protobuf.ByteString - getVersionBytes() { - java.lang.Object ref = version_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - version_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string version = 2; - */ - public Builder setVersion( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - - version_ = value; - onChanged(); - return this; - } - /** - * string version = 2; - */ - public Builder clearVersion() { - - version_ = getDefaultInstance().getVersion(); - onChanged(); return this; } /** - * string version = 2; + * .flyteidl.core.LabelValue value = 1; */ - public Builder setVersionBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); + public flyteidl.core.ArtifactId.LabelValue.Builder getValueBuilder() { - version_ = value; onChanged(); - return this; - } - - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder> partitionsBuilder_; - /** - * .flyteidl.core.Partitions partitions = 3; - */ - public boolean hasPartitions() { - return dimensionsCase_ == 3; - } - /** - * .flyteidl.core.Partitions partitions = 3; - */ - public flyteidl.core.ArtifactId.Partitions getPartitions() { - if (partitionsBuilder_ == null) { - if (dimensionsCase_ == 3) { - return (flyteidl.core.ArtifactId.Partitions) dimensions_; - } - return flyteidl.core.ArtifactId.Partitions.getDefaultInstance(); - } else { - if (dimensionsCase_ == 3) { - return partitionsBuilder_.getMessage(); - } - return flyteidl.core.ArtifactId.Partitions.getDefaultInstance(); - } - } - /** - * .flyteidl.core.Partitions partitions = 3; - */ - public Builder setPartitions(flyteidl.core.ArtifactId.Partitions value) { - if (partitionsBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - dimensions_ = value; - onChanged(); - } else { - partitionsBuilder_.setMessage(value); - } - dimensionsCase_ = 3; - return this; - } - /** - * .flyteidl.core.Partitions partitions = 3; - */ - public Builder setPartitions( - flyteidl.core.ArtifactId.Partitions.Builder builderForValue) { - if (partitionsBuilder_ == null) { - dimensions_ = builderForValue.build(); - onChanged(); - } else { - partitionsBuilder_.setMessage(builderForValue.build()); - } - dimensionsCase_ = 3; - return this; - } - /** - * .flyteidl.core.Partitions partitions = 3; - */ - public Builder mergePartitions(flyteidl.core.ArtifactId.Partitions value) { - if (partitionsBuilder_ == null) { - if (dimensionsCase_ == 3 && - dimensions_ != flyteidl.core.ArtifactId.Partitions.getDefaultInstance()) { - dimensions_ = flyteidl.core.ArtifactId.Partitions.newBuilder((flyteidl.core.ArtifactId.Partitions) dimensions_) - .mergeFrom(value).buildPartial(); - } else { - dimensions_ = value; - } - onChanged(); - } else { - if (dimensionsCase_ == 3) { - partitionsBuilder_.mergeFrom(value); - } - partitionsBuilder_.setMessage(value); - } - dimensionsCase_ = 3; - return this; - } - /** - * .flyteidl.core.Partitions partitions = 3; - */ - public Builder clearPartitions() { - if (partitionsBuilder_ == null) { - if (dimensionsCase_ == 3) { - dimensionsCase_ = 0; - dimensions_ = null; - onChanged(); - } - } else { - if (dimensionsCase_ == 3) { - dimensionsCase_ = 0; - dimensions_ = null; - } - partitionsBuilder_.clear(); - } - return this; - } - /** - * .flyteidl.core.Partitions partitions = 3; - */ - public flyteidl.core.ArtifactId.Partitions.Builder getPartitionsBuilder() { - return getPartitionsFieldBuilder().getBuilder(); + return getValueFieldBuilder().getBuilder(); } /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ - public flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder() { - if ((dimensionsCase_ == 3) && (partitionsBuilder_ != null)) { - return partitionsBuilder_.getMessageOrBuilder(); + public flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); } else { - if (dimensionsCase_ == 3) { - return (flyteidl.core.ArtifactId.Partitions) dimensions_; - } - return flyteidl.core.ArtifactId.Partitions.getDefaultInstance(); + return value_ == null ? + flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; } } /** - * .flyteidl.core.Partitions partitions = 3; + * .flyteidl.core.LabelValue value = 1; */ private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder> - getPartitionsFieldBuilder() { - if (partitionsBuilder_ == null) { - if (!(dimensionsCase_ == 3)) { - dimensions_ = flyteidl.core.ArtifactId.Partitions.getDefaultInstance(); - } - partitionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder>( - (flyteidl.core.ArtifactId.Partitions) dimensions_, + flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder>( + getValue(), getParentForChildren(), isClean()); - dimensions_ = null; + value_ = null; } - dimensionsCase_ = 3; - onChanged();; - return partitionsBuilder_; + return valueBuilder_; } @java.lang.Override public final Builder setUnknownFields( @@ -5001,48 +5065,48 @@ public final Builder mergeUnknownFields( } - // @@protoc_insertion_point(builder_scope:flyteidl.core.ArtifactID) + // @@protoc_insertion_point(builder_scope:flyteidl.core.TimePartition) } - // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactID) - private static final flyteidl.core.ArtifactId.ArtifactID DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:flyteidl.core.TimePartition) + private static final flyteidl.core.ArtifactId.TimePartition DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.ArtifactID(); + DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.TimePartition(); } - public static flyteidl.core.ArtifactId.ArtifactID getDefaultInstance() { + public static flyteidl.core.ArtifactId.TimePartition getDefaultInstance() { return DEFAULT_INSTANCE; } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override - public ArtifactID parsePartialFrom( + public TimePartition parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new ArtifactID(input, extensionRegistry); + return new TimePartition(input, extensionRegistry); } }; - public static com.google.protobuf.Parser parser() { + public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactID getDefaultInstanceForType() { + public flyteidl.core.ArtifactId.TimePartition getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } - public interface ArtifactTagOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.core.ArtifactTag) + public interface ArtifactIDOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.core.ArtifactID) com.google.protobuf.MessageOrBuilder { /** @@ -5059,31 +5123,82 @@ public interface ArtifactTagOrBuilder extends flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder(); /** - * .flyteidl.core.LabelValue value = 2; + * string version = 2; */ - boolean hasValue(); + java.lang.String getVersion(); /** - * .flyteidl.core.LabelValue value = 2; + * string version = 2; */ - flyteidl.core.ArtifactId.LabelValue getValue(); + com.google.protobuf.ByteString + getVersionBytes(); + /** - * .flyteidl.core.LabelValue value = 2; + *
+     * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+     * Different partitions naturally have different versions (execution ids).
+     * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder(); - } - /** - * Protobuf type {@code flyteidl.core.ArtifactTag} - */ - public static final class ArtifactTag extends - com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.core.ArtifactTag) - ArtifactTagOrBuilder { - private static final long serialVersionUID = 0L; - // Use ArtifactTag.newBuilder() to construct. - private ArtifactTag(com.google.protobuf.GeneratedMessageV3.Builder builder) { - super(builder); + boolean hasPartitions(); + /** + *
+     * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+     * Different partitions naturally have different versions (execution ids).
+     * 
+ * + * .flyteidl.core.Partitions partitions = 3; + */ + flyteidl.core.ArtifactId.Partitions getPartitions(); + /** + *
+     * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+     * Different partitions naturally have different versions (execution ids).
+     * 
+ * + * .flyteidl.core.Partitions partitions = 3; + */ + flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder(); + + /** + *
+     * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+     * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + boolean hasTimePartition(); + /** + *
+     * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+     * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + flyteidl.core.ArtifactId.TimePartition getTimePartition(); + /** + *
+     * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+     * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + flyteidl.core.ArtifactId.TimePartitionOrBuilder getTimePartitionOrBuilder(); + } + /** + * Protobuf type {@code flyteidl.core.ArtifactID} + */ + public static final class ArtifactID extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.core.ArtifactID) + ArtifactIDOrBuilder { + private static final long serialVersionUID = 0L; + // Use ArtifactID.newBuilder() to construct. + private ArtifactID(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); } - private ArtifactTag() { + private ArtifactID() { + version_ = ""; } @java.lang.Override @@ -5091,7 +5206,7 @@ private ArtifactTag() { getUnknownFields() { return this.unknownFields; } - private ArtifactTag( + private ArtifactID( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -5124,14 +5239,33 @@ private ArtifactTag( break; } case 18: { - flyteidl.core.ArtifactId.LabelValue.Builder subBuilder = null; - if (value_ != null) { - subBuilder = value_.toBuilder(); + java.lang.String s = input.readStringRequireUtf8(); + + version_ = s; + break; + } + case 26: { + flyteidl.core.ArtifactId.Partitions.Builder subBuilder = null; + if (partitions_ != null) { + subBuilder = partitions_.toBuilder(); } - value_ = input.readMessage(flyteidl.core.ArtifactId.LabelValue.parser(), extensionRegistry); + partitions_ = input.readMessage(flyteidl.core.ArtifactId.Partitions.parser(), extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom(value_); - value_ = subBuilder.buildPartial(); + subBuilder.mergeFrom(partitions_); + partitions_ = subBuilder.buildPartial(); + } + + break; + } + case 34: { + flyteidl.core.ArtifactId.TimePartition.Builder subBuilder = null; + if (timePartition_ != null) { + subBuilder = timePartition_.toBuilder(); + } + timePartition_ = input.readMessage(flyteidl.core.ArtifactId.TimePartition.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(timePartition_); + timePartition_ = subBuilder.buildPartial(); } break; @@ -5157,15 +5291,15 @@ private ArtifactTag( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.ArtifactTag.class, flyteidl.core.ArtifactId.ArtifactTag.Builder.class); + flyteidl.core.ArtifactId.ArtifactID.class, flyteidl.core.ArtifactId.ArtifactID.Builder.class); } public static final int ARTIFACT_KEY_FIELD_NUMBER = 1; @@ -5189,25 +5323,107 @@ public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { return getArtifactKey(); } - public static final int VALUE_FIELD_NUMBER = 2; - private flyteidl.core.ArtifactId.LabelValue value_; + public static final int VERSION_FIELD_NUMBER = 2; + private volatile java.lang.Object version_; /** - * .flyteidl.core.LabelValue value = 2; + * string version = 2; */ - public boolean hasValue() { - return value_ != null; + public java.lang.String getVersion() { + java.lang.Object ref = version_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + version_ = s; + return s; + } } /** - * .flyteidl.core.LabelValue value = 2; + * string version = 2; */ - public flyteidl.core.ArtifactId.LabelValue getValue() { - return value_ == null ? flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; + public com.google.protobuf.ByteString + getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } + + public static final int PARTITIONS_FIELD_NUMBER = 3; + private flyteidl.core.ArtifactId.Partitions partitions_; /** - * .flyteidl.core.LabelValue value = 2; + *
+     * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+     * Different partitions naturally have different versions (execution ids).
+     * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder() { - return getValue(); + public boolean hasPartitions() { + return partitions_ != null; + } + /** + *
+     * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+     * Different partitions naturally have different versions (execution ids).
+     * 
+ * + * .flyteidl.core.Partitions partitions = 3; + */ + public flyteidl.core.ArtifactId.Partitions getPartitions() { + return partitions_ == null ? flyteidl.core.ArtifactId.Partitions.getDefaultInstance() : partitions_; + } + /** + *
+     * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+     * Different partitions naturally have different versions (execution ids).
+     * 
+ * + * .flyteidl.core.Partitions partitions = 3; + */ + public flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder() { + return getPartitions(); + } + + public static final int TIME_PARTITION_FIELD_NUMBER = 4; + private flyteidl.core.ArtifactId.TimePartition timePartition_; + /** + *
+     * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+     * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public boolean hasTimePartition() { + return timePartition_ != null; + } + /** + *
+     * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+     * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public flyteidl.core.ArtifactId.TimePartition getTimePartition() { + return timePartition_ == null ? flyteidl.core.ArtifactId.TimePartition.getDefaultInstance() : timePartition_; + } + /** + *
+     * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+     * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public flyteidl.core.ArtifactId.TimePartitionOrBuilder getTimePartitionOrBuilder() { + return getTimePartition(); } private byte memoizedIsInitialized = -1; @@ -5227,8 +5443,14 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (artifactKey_ != null) { output.writeMessage(1, getArtifactKey()); } - if (value_ != null) { - output.writeMessage(2, getValue()); + if (!getVersionBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_); + } + if (partitions_ != null) { + output.writeMessage(3, getPartitions()); + } + if (timePartition_ != null) { + output.writeMessage(4, getTimePartition()); } unknownFields.writeTo(output); } @@ -5243,9 +5465,16 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getArtifactKey()); } - if (value_ != null) { + if (!getVersionBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_); + } + if (partitions_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getValue()); + .computeMessageSize(3, getPartitions()); + } + if (timePartition_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getTimePartition()); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -5257,20 +5486,27 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof flyteidl.core.ArtifactId.ArtifactTag)) { + if (!(obj instanceof flyteidl.core.ArtifactId.ArtifactID)) { return super.equals(obj); } - flyteidl.core.ArtifactId.ArtifactTag other = (flyteidl.core.ArtifactId.ArtifactTag) obj; + flyteidl.core.ArtifactId.ArtifactID other = (flyteidl.core.ArtifactId.ArtifactID) obj; if (hasArtifactKey() != other.hasArtifactKey()) return false; if (hasArtifactKey()) { if (!getArtifactKey() .equals(other.getArtifactKey())) return false; } - if (hasValue() != other.hasValue()) return false; - if (hasValue()) { - if (!getValue() - .equals(other.getValue())) return false; + if (!getVersion() + .equals(other.getVersion())) return false; + if (hasPartitions() != other.hasPartitions()) return false; + if (hasPartitions()) { + if (!getPartitions() + .equals(other.getPartitions())) return false; + } + if (hasTimePartition() != other.hasTimePartition()) return false; + if (hasTimePartition()) { + if (!getTimePartition() + .equals(other.getTimePartition())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -5287,78 +5523,84 @@ public int hashCode() { hash = (37 * hash) + ARTIFACT_KEY_FIELD_NUMBER; hash = (53 * hash) + getArtifactKey().hashCode(); } - if (hasValue()) { - hash = (37 * hash) + VALUE_FIELD_NUMBER; - hash = (53 * hash) + getValue().hashCode(); + hash = (37 * hash) + VERSION_FIELD_NUMBER; + hash = (53 * hash) + getVersion().hashCode(); + if (hasPartitions()) { + hash = (37 * hash) + PARTITIONS_FIELD_NUMBER; + hash = (53 * hash) + getPartitions().hashCode(); + } + if (hasTimePartition()) { + hash = (37 * hash) + TIME_PARTITION_FIELD_NUMBER; + hash = (53 * hash) + getTimePartition().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom(byte[] data) + public static flyteidl.core.ArtifactId.ArtifactID parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.ArtifactID parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactTag parseDelimitedFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.ArtifactID parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactTag parseDelimitedFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( + public static flyteidl.core.ArtifactId.ArtifactID parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -5371,7 +5613,7 @@ public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(flyteidl.core.ArtifactId.ArtifactTag prototype) { + public static Builder newBuilder(flyteidl.core.ArtifactId.ArtifactID prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -5387,26 +5629,26 @@ protected Builder newBuilderForType( return builder; } /** - * Protobuf type {@code flyteidl.core.ArtifactTag} + * Protobuf type {@code flyteidl.core.ArtifactID} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.core.ArtifactTag) - flyteidl.core.ArtifactId.ArtifactTagOrBuilder { + // @@protoc_insertion_point(builder_implements:flyteidl.core.ArtifactID) + flyteidl.core.ArtifactId.ArtifactIDOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.ArtifactTag.class, flyteidl.core.ArtifactId.ArtifactTag.Builder.class); + flyteidl.core.ArtifactId.ArtifactID.class, flyteidl.core.ArtifactId.ArtifactID.Builder.class); } - // Construct using flyteidl.core.ArtifactId.ArtifactTag.newBuilder() + // Construct using flyteidl.core.ArtifactId.ArtifactID.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -5430,11 +5672,19 @@ public Builder clear() { artifactKey_ = null; artifactKeyBuilder_ = null; } - if (valueBuilder_ == null) { - value_ = null; + version_ = ""; + + if (partitionsBuilder_ == null) { + partitions_ = null; } else { - value_ = null; - valueBuilder_ = null; + partitions_ = null; + partitionsBuilder_ = null; + } + if (timePartitionBuilder_ == null) { + timePartition_ = null; + } else { + timePartition_ = null; + timePartitionBuilder_ = null; } return this; } @@ -5442,17 +5692,17 @@ public Builder clear() { @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactID_descriptor; } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactTag getDefaultInstanceForType() { - return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); + public flyteidl.core.ArtifactId.ArtifactID getDefaultInstanceForType() { + return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactTag build() { - flyteidl.core.ArtifactId.ArtifactTag result = buildPartial(); + public flyteidl.core.ArtifactId.ArtifactID build() { + flyteidl.core.ArtifactId.ArtifactID result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -5460,17 +5710,23 @@ public flyteidl.core.ArtifactId.ArtifactTag build() { } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactTag buildPartial() { - flyteidl.core.ArtifactId.ArtifactTag result = new flyteidl.core.ArtifactId.ArtifactTag(this); + public flyteidl.core.ArtifactId.ArtifactID buildPartial() { + flyteidl.core.ArtifactId.ArtifactID result = new flyteidl.core.ArtifactId.ArtifactID(this); if (artifactKeyBuilder_ == null) { result.artifactKey_ = artifactKey_; } else { result.artifactKey_ = artifactKeyBuilder_.build(); } - if (valueBuilder_ == null) { - result.value_ = value_; + result.version_ = version_; + if (partitionsBuilder_ == null) { + result.partitions_ = partitions_; } else { - result.value_ = valueBuilder_.build(); + result.partitions_ = partitionsBuilder_.build(); + } + if (timePartitionBuilder_ == null) { + result.timePartition_ = timePartition_; + } else { + result.timePartition_ = timePartitionBuilder_.build(); } onBuilt(); return result; @@ -5510,21 +5766,28 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.core.ArtifactId.ArtifactTag) { - return mergeFrom((flyteidl.core.ArtifactId.ArtifactTag)other); + if (other instanceof flyteidl.core.ArtifactId.ArtifactID) { + return mergeFrom((flyteidl.core.ArtifactId.ArtifactID)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(flyteidl.core.ArtifactId.ArtifactTag other) { - if (other == flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance()) return this; + public Builder mergeFrom(flyteidl.core.ArtifactId.ArtifactID other) { + if (other == flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()) return this; if (other.hasArtifactKey()) { mergeArtifactKey(other.getArtifactKey()); } - if (other.hasValue()) { - mergeValue(other.getValue()); + if (!other.getVersion().isEmpty()) { + version_ = other.version_; + onChanged(); + } + if (other.hasPartitions()) { + mergePartitions(other.getPartitions()); + } + if (other.hasTimePartition()) { + mergeTimePartition(other.getTimePartition()); } this.mergeUnknownFields(other.unknownFields); onChanged(); @@ -5541,11 +5804,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - flyteidl.core.ArtifactId.ArtifactTag parsedMessage = null; + flyteidl.core.ArtifactId.ArtifactID parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.core.ArtifactId.ArtifactTag) e.getUnfinishedMessage(); + parsedMessage = (flyteidl.core.ArtifactId.ArtifactID) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -5672,266 +5935,485 @@ public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { return artifactKeyBuilder_; } - private flyteidl.core.ArtifactId.LabelValue value_; + private java.lang.Object version_ = ""; + /** + * string version = 2; + */ + public java.lang.String getVersion() { + java.lang.Object ref = version_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + version_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string version = 2; + */ + public com.google.protobuf.ByteString + getVersionBytes() { + java.lang.Object ref = version_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + version_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string version = 2; + */ + public Builder setVersion( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + version_ = value; + onChanged(); + return this; + } + /** + * string version = 2; + */ + public Builder clearVersion() { + + version_ = getDefaultInstance().getVersion(); + onChanged(); + return this; + } + /** + * string version = 2; + */ + public Builder setVersionBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + version_ = value; + onChanged(); + return this; + } + + private flyteidl.core.ArtifactId.Partitions partitions_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder> valueBuilder_; + flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder> partitionsBuilder_; /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public boolean hasValue() { - return valueBuilder_ != null || value_ != null; + public boolean hasPartitions() { + return partitionsBuilder_ != null || partitions_ != null; } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public flyteidl.core.ArtifactId.LabelValue getValue() { - if (valueBuilder_ == null) { - return value_ == null ? flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; + public flyteidl.core.ArtifactId.Partitions getPartitions() { + if (partitionsBuilder_ == null) { + return partitions_ == null ? flyteidl.core.ArtifactId.Partitions.getDefaultInstance() : partitions_; } else { - return valueBuilder_.getMessage(); + return partitionsBuilder_.getMessage(); } } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public Builder setValue(flyteidl.core.ArtifactId.LabelValue value) { - if (valueBuilder_ == null) { + public Builder setPartitions(flyteidl.core.ArtifactId.Partitions value) { + if (partitionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - value_ = value; + partitions_ = value; onChanged(); } else { - valueBuilder_.setMessage(value); + partitionsBuilder_.setMessage(value); } return this; } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public Builder setValue( - flyteidl.core.ArtifactId.LabelValue.Builder builderForValue) { - if (valueBuilder_ == null) { - value_ = builderForValue.build(); + public Builder setPartitions( + flyteidl.core.ArtifactId.Partitions.Builder builderForValue) { + if (partitionsBuilder_ == null) { + partitions_ = builderForValue.build(); onChanged(); } else { - valueBuilder_.setMessage(builderForValue.build()); + partitionsBuilder_.setMessage(builderForValue.build()); } return this; } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public Builder mergeValue(flyteidl.core.ArtifactId.LabelValue value) { - if (valueBuilder_ == null) { - if (value_ != null) { - value_ = - flyteidl.core.ArtifactId.LabelValue.newBuilder(value_).mergeFrom(value).buildPartial(); + public Builder mergePartitions(flyteidl.core.ArtifactId.Partitions value) { + if (partitionsBuilder_ == null) { + if (partitions_ != null) { + partitions_ = + flyteidl.core.ArtifactId.Partitions.newBuilder(partitions_).mergeFrom(value).buildPartial(); } else { - value_ = value; + partitions_ = value; } onChanged(); } else { - valueBuilder_.mergeFrom(value); + partitionsBuilder_.mergeFrom(value); } return this; } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public Builder clearValue() { - if (valueBuilder_ == null) { - value_ = null; + public Builder clearPartitions() { + if (partitionsBuilder_ == null) { + partitions_ = null; onChanged(); } else { - value_ = null; - valueBuilder_ = null; + partitions_ = null; + partitionsBuilder_ = null; } return this; } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public flyteidl.core.ArtifactId.LabelValue.Builder getValueBuilder() { + public flyteidl.core.ArtifactId.Partitions.Builder getPartitionsBuilder() { onChanged(); - return getValueFieldBuilder().getBuilder(); + return getPartitionsFieldBuilder().getBuilder(); } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ - public flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder() { - if (valueBuilder_ != null) { - return valueBuilder_.getMessageOrBuilder(); + public flyteidl.core.ArtifactId.PartitionsOrBuilder getPartitionsOrBuilder() { + if (partitionsBuilder_ != null) { + return partitionsBuilder_.getMessageOrBuilder(); } else { - return value_ == null ? - flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; + return partitions_ == null ? + flyteidl.core.ArtifactId.Partitions.getDefaultInstance() : partitions_; } } /** - * .flyteidl.core.LabelValue value = 2; + *
+       * Think of a partition as a tag on an Artifact, except it's a key-value pair.
+       * Different partitions naturally have different versions (execution ids).
+       * 
+ * + * .flyteidl.core.Partitions partitions = 3; */ private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder> - getValueFieldBuilder() { - if (valueBuilder_ == null) { - valueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder>( - getValue(), + flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder> + getPartitionsFieldBuilder() { + if (partitionsBuilder_ == null) { + partitionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.Partitions, flyteidl.core.ArtifactId.Partitions.Builder, flyteidl.core.ArtifactId.PartitionsOrBuilder>( + getPartitions(), getParentForChildren(), isClean()); - value_ = null; + partitions_ = null; } - return valueBuilder_; + return partitionsBuilder_; } - @java.lang.Override - public final Builder setUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.setUnknownFields(unknownFields); + + private flyteidl.core.ArtifactId.TimePartition timePartition_; + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.TimePartition, flyteidl.core.ArtifactId.TimePartition.Builder, flyteidl.core.ArtifactId.TimePartitionOrBuilder> timePartitionBuilder_; + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public boolean hasTimePartition() { + return timePartitionBuilder_ != null || timePartition_ != null; + } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public flyteidl.core.ArtifactId.TimePartition getTimePartition() { + if (timePartitionBuilder_ == null) { + return timePartition_ == null ? flyteidl.core.ArtifactId.TimePartition.getDefaultInstance() : timePartition_; + } else { + return timePartitionBuilder_.getMessage(); + } } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public Builder setTimePartition(flyteidl.core.ArtifactId.TimePartition value) { + if (timePartitionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timePartition_ = value; + onChanged(); + } else { + timePartitionBuilder_.setMessage(value); + } - @java.lang.Override - public final Builder mergeUnknownFields( - final com.google.protobuf.UnknownFieldSet unknownFields) { - return super.mergeUnknownFields(unknownFields); + return this; } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public Builder setTimePartition( + flyteidl.core.ArtifactId.TimePartition.Builder builderForValue) { + if (timePartitionBuilder_ == null) { + timePartition_ = builderForValue.build(); + onChanged(); + } else { + timePartitionBuilder_.setMessage(builderForValue.build()); + } + return this; + } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public Builder mergeTimePartition(flyteidl.core.ArtifactId.TimePartition value) { + if (timePartitionBuilder_ == null) { + if (timePartition_ != null) { + timePartition_ = + flyteidl.core.ArtifactId.TimePartition.newBuilder(timePartition_).mergeFrom(value).buildPartial(); + } else { + timePartition_ = value; + } + onChanged(); + } else { + timePartitionBuilder_.mergeFrom(value); + } - // @@protoc_insertion_point(builder_scope:flyteidl.core.ArtifactTag) + return this; + } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public Builder clearTimePartition() { + if (timePartitionBuilder_ == null) { + timePartition_ = null; + onChanged(); + } else { + timePartition_ = null; + timePartitionBuilder_ = null; + } + + return this; + } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public flyteidl.core.ArtifactId.TimePartition.Builder getTimePartitionBuilder() { + + onChanged(); + return getTimePartitionFieldBuilder().getBuilder(); + } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + public flyteidl.core.ArtifactId.TimePartitionOrBuilder getTimePartitionOrBuilder() { + if (timePartitionBuilder_ != null) { + return timePartitionBuilder_.getMessageOrBuilder(); + } else { + return timePartition_ == null ? + flyteidl.core.ArtifactId.TimePartition.getDefaultInstance() : timePartition_; + } + } + /** + *
+       * There is no such thing as an empty time partition - if it's not set, then there is no time partition.
+       * 
+ * + * .flyteidl.core.TimePartition time_partition = 4; + */ + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.TimePartition, flyteidl.core.ArtifactId.TimePartition.Builder, flyteidl.core.ArtifactId.TimePartitionOrBuilder> + getTimePartitionFieldBuilder() { + if (timePartitionBuilder_ == null) { + timePartitionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.TimePartition, flyteidl.core.ArtifactId.TimePartition.Builder, flyteidl.core.ArtifactId.TimePartitionOrBuilder>( + getTimePartition(), + getParentForChildren(), + isClean()); + timePartition_ = null; + } + return timePartitionBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.core.ArtifactID) } - // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactTag) - private static final flyteidl.core.ArtifactId.ArtifactTag DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactID) + private static final flyteidl.core.ArtifactId.ArtifactID DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.ArtifactTag(); + DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.ArtifactID(); } - public static flyteidl.core.ArtifactId.ArtifactTag getDefaultInstance() { + public static flyteidl.core.ArtifactId.ArtifactID getDefaultInstance() { return DEFAULT_INSTANCE; } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override - public ArtifactTag parsePartialFrom( + public ArtifactID parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new ArtifactTag(input, extensionRegistry); + return new ArtifactID(input, extensionRegistry); } }; - public static com.google.protobuf.Parser parser() { + public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactTag getDefaultInstanceForType() { + public flyteidl.core.ArtifactId.ArtifactID getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } - public interface ArtifactQueryOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.core.ArtifactQuery) + public interface ArtifactTagOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.core.ArtifactTag) com.google.protobuf.MessageOrBuilder { /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - boolean hasArtifactId(); - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - flyteidl.core.ArtifactId.ArtifactID getArtifactId(); - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder(); - - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - boolean hasArtifactTag(); - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - flyteidl.core.ArtifactId.ArtifactTag getArtifactTag(); - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - flyteidl.core.ArtifactId.ArtifactTagOrBuilder getArtifactTagOrBuilder(); - + boolean hasArtifactKey(); /** - * string uri = 3; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - java.lang.String getUri(); + flyteidl.core.ArtifactId.ArtifactKey getArtifactKey(); /** - * string uri = 3; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - com.google.protobuf.ByteString - getUriBytes(); + flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder(); /** - *
-     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-     * artifacts, or a partition value derived from a triggering artifact.
-     * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - boolean hasBinding(); + boolean hasValue(); /** - *
-     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-     * artifacts, or a partition value derived from a triggering artifact.
-     * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - flyteidl.core.ArtifactId.ArtifactBindingData getBinding(); + flyteidl.core.ArtifactId.LabelValue getValue(); /** - *
-     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-     * artifacts, or a partition value derived from a triggering artifact.
-     * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getBindingOrBuilder(); - - public flyteidl.core.ArtifactId.ArtifactQuery.IdentifierCase getIdentifierCase(); + flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder(); } /** - *
-   * Uniqueness constraints for Artifacts
-   *  - project, domain, name, version, partitions
-   * Option 2 (tags are standalone, point to an individual artifact id):
-   *  - project, domain, name, alias (points to one partition if partitioned)
-   *  - project, domain, name, partition key, partition value
-   * 
- * - * Protobuf type {@code flyteidl.core.ArtifactQuery} + * Protobuf type {@code flyteidl.core.ArtifactTag} */ - public static final class ArtifactQuery extends + public static final class ArtifactTag extends com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.core.ArtifactQuery) - ArtifactQueryOrBuilder { + // @@protoc_insertion_point(message_implements:flyteidl.core.ArtifactTag) + ArtifactTagOrBuilder { private static final long serialVersionUID = 0L; - // Use ArtifactQuery.newBuilder() to construct. - private ArtifactQuery(com.google.protobuf.GeneratedMessageV3.Builder builder) { + // Use ArtifactTag.newBuilder() to construct. + private ArtifactTag(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } - private ArtifactQuery() { + private ArtifactTag() { } @java.lang.Override @@ -5939,7 +6421,7 @@ private ArtifactQuery() { getUnknownFields() { return this.unknownFields; } - private ArtifactQuery( + private ArtifactTag( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -5959,51 +6441,29 @@ private ArtifactQuery( done = true; break; case 10: { - flyteidl.core.ArtifactId.ArtifactID.Builder subBuilder = null; - if (identifierCase_ == 1) { - subBuilder = ((flyteidl.core.ArtifactId.ArtifactID) identifier_).toBuilder(); + flyteidl.core.ArtifactId.ArtifactKey.Builder subBuilder = null; + if (artifactKey_ != null) { + subBuilder = artifactKey_.toBuilder(); } - identifier_ = - input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry); + artifactKey_ = input.readMessage(flyteidl.core.ArtifactId.ArtifactKey.parser(), extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom((flyteidl.core.ArtifactId.ArtifactID) identifier_); - identifier_ = subBuilder.buildPartial(); + subBuilder.mergeFrom(artifactKey_); + artifactKey_ = subBuilder.buildPartial(); } - identifierCase_ = 1; + break; } case 18: { - flyteidl.core.ArtifactId.ArtifactTag.Builder subBuilder = null; - if (identifierCase_ == 2) { - subBuilder = ((flyteidl.core.ArtifactId.ArtifactTag) identifier_).toBuilder(); - } - identifier_ = - input.readMessage(flyteidl.core.ArtifactId.ArtifactTag.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom((flyteidl.core.ArtifactId.ArtifactTag) identifier_); - identifier_ = subBuilder.buildPartial(); - } - identifierCase_ = 2; - break; - } - case 26: { - java.lang.String s = input.readStringRequireUtf8(); - identifierCase_ = 3; - identifier_ = s; - break; - } - case 34: { - flyteidl.core.ArtifactId.ArtifactBindingData.Builder subBuilder = null; - if (identifierCase_ == 4) { - subBuilder = ((flyteidl.core.ArtifactId.ArtifactBindingData) identifier_).toBuilder(); + flyteidl.core.ArtifactId.LabelValue.Builder subBuilder = null; + if (value_ != null) { + subBuilder = value_.toBuilder(); } - identifier_ = - input.readMessage(flyteidl.core.ArtifactId.ArtifactBindingData.parser(), extensionRegistry); + value_ = input.readMessage(flyteidl.core.ArtifactId.LabelValue.parser(), extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom((flyteidl.core.ArtifactId.ArtifactBindingData) identifier_); - identifier_ = subBuilder.buildPartial(); + subBuilder.mergeFrom(value_); + value_ = subBuilder.buildPartial(); } - identifierCase_ = 4; + break; } default: { @@ -6027,193 +6487,57 @@ private ArtifactQuery( } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.ArtifactQuery.class, flyteidl.core.ArtifactId.ArtifactQuery.Builder.class); + flyteidl.core.ArtifactId.ArtifactTag.class, flyteidl.core.ArtifactId.ArtifactTag.Builder.class); } - private int identifierCase_ = 0; - private java.lang.Object identifier_; - public enum IdentifierCase - implements com.google.protobuf.Internal.EnumLite { - ARTIFACT_ID(1), - ARTIFACT_TAG(2), - URI(3), - BINDING(4), - IDENTIFIER_NOT_SET(0); - private final int value; - private IdentifierCase(int value) { - this.value = value; - } - /** - * @deprecated Use {@link #forNumber(int)} instead. - */ - @java.lang.Deprecated - public static IdentifierCase valueOf(int value) { - return forNumber(value); - } - - public static IdentifierCase forNumber(int value) { - switch (value) { - case 1: return ARTIFACT_ID; - case 2: return ARTIFACT_TAG; - case 3: return URI; - case 4: return BINDING; - case 0: return IDENTIFIER_NOT_SET; - default: return null; - } - } - public int getNumber() { - return this.value; - } - }; - - public IdentifierCase - getIdentifierCase() { - return IdentifierCase.forNumber( - identifierCase_); - } - - public static final int ARTIFACT_ID_FIELD_NUMBER = 1; - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public boolean hasArtifactId() { - return identifierCase_ == 1; - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { - if (identifierCase_ == 1) { - return (flyteidl.core.ArtifactId.ArtifactID) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); - } - /** - * .flyteidl.core.ArtifactID artifact_id = 1; - */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { - if (identifierCase_ == 1) { - return (flyteidl.core.ArtifactId.ArtifactID) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); - } - - public static final int ARTIFACT_TAG_FIELD_NUMBER = 2; - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - public boolean hasArtifactTag() { - return identifierCase_ == 2; - } - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - public flyteidl.core.ArtifactId.ArtifactTag getArtifactTag() { - if (identifierCase_ == 2) { - return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); - } + public static final int ARTIFACT_KEY_FIELD_NUMBER = 1; + private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public flyteidl.core.ArtifactId.ArtifactTagOrBuilder getArtifactTagOrBuilder() { - if (identifierCase_ == 2) { - return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); + public boolean hasArtifactKey() { + return artifactKey_ != null; } - - public static final int URI_FIELD_NUMBER = 3; /** - * string uri = 3; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public java.lang.String getUri() { - java.lang.Object ref = ""; - if (identifierCase_ == 3) { - ref = identifier_; - } - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (identifierCase_ == 3) { - identifier_ = s; - } - return s; - } + public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { + return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; } /** - * string uri = 3; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public com.google.protobuf.ByteString - getUriBytes() { - java.lang.Object ref = ""; - if (identifierCase_ == 3) { - ref = identifier_; - } - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - if (identifierCase_ == 3) { - identifier_ = b; - } - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } + public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { + return getArtifactKey(); } - public static final int BINDING_FIELD_NUMBER = 4; + public static final int VALUE_FIELD_NUMBER = 2; + private flyteidl.core.ArtifactId.LabelValue value_; /** - *
-     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-     * artifacts, or a partition value derived from a triggering artifact.
-     * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - public boolean hasBinding() { - return identifierCase_ == 4; + public boolean hasValue() { + return value_ != null; } /** - *
-     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-     * artifacts, or a partition value derived from a triggering artifact.
-     * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - public flyteidl.core.ArtifactId.ArtifactBindingData getBinding() { - if (identifierCase_ == 4) { - return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); + public flyteidl.core.ArtifactId.LabelValue getValue() { + return value_ == null ? flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; } /** - *
-     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-     * artifacts, or a partition value derived from a triggering artifact.
-     * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getBindingOrBuilder() { - if (identifierCase_ == 4) { - return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); + public flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder() { + return getValue(); } private byte memoizedIsInitialized = -1; @@ -6230,17 +6554,11 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (identifierCase_ == 1) { - output.writeMessage(1, (flyteidl.core.ArtifactId.ArtifactID) identifier_); - } - if (identifierCase_ == 2) { - output.writeMessage(2, (flyteidl.core.ArtifactId.ArtifactTag) identifier_); - } - if (identifierCase_ == 3) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 3, identifier_); + if (artifactKey_ != null) { + output.writeMessage(1, getArtifactKey()); } - if (identifierCase_ == 4) { - output.writeMessage(4, (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_); + if (value_ != null) { + output.writeMessage(2, getValue()); } unknownFields.writeTo(output); } @@ -6251,20 +6569,13 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (identifierCase_ == 1) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, (flyteidl.core.ArtifactId.ArtifactID) identifier_); - } - if (identifierCase_ == 2) { + if (artifactKey_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, (flyteidl.core.ArtifactId.ArtifactTag) identifier_); - } - if (identifierCase_ == 3) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, identifier_); + .computeMessageSize(1, getArtifactKey()); } - if (identifierCase_ == 4) { + if (value_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_); + .computeMessageSize(2, getValue()); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -6276,31 +6587,20 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof flyteidl.core.ArtifactId.ArtifactQuery)) { + if (!(obj instanceof flyteidl.core.ArtifactId.ArtifactTag)) { return super.equals(obj); } - flyteidl.core.ArtifactId.ArtifactQuery other = (flyteidl.core.ArtifactId.ArtifactQuery) obj; + flyteidl.core.ArtifactId.ArtifactTag other = (flyteidl.core.ArtifactId.ArtifactTag) obj; - if (!getIdentifierCase().equals(other.getIdentifierCase())) return false; - switch (identifierCase_) { - case 1: - if (!getArtifactId() - .equals(other.getArtifactId())) return false; - break; - case 2: - if (!getArtifactTag() - .equals(other.getArtifactTag())) return false; - break; - case 3: - if (!getUri() - .equals(other.getUri())) return false; - break; - case 4: - if (!getBinding() - .equals(other.getBinding())) return false; - break; - case 0: - default: + if (hasArtifactKey() != other.hasArtifactKey()) return false; + if (hasArtifactKey()) { + if (!getArtifactKey() + .equals(other.getArtifactKey())) return false; + } + if (hasValue() != other.hasValue()) return false; + if (hasValue()) { + if (!getValue() + .equals(other.getValue())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; @@ -6313,94 +6613,82 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); - switch (identifierCase_) { - case 1: - hash = (37 * hash) + ARTIFACT_ID_FIELD_NUMBER; - hash = (53 * hash) + getArtifactId().hashCode(); - break; - case 2: - hash = (37 * hash) + ARTIFACT_TAG_FIELD_NUMBER; - hash = (53 * hash) + getArtifactTag().hashCode(); - break; - case 3: - hash = (37 * hash) + URI_FIELD_NUMBER; - hash = (53 * hash) + getUri().hashCode(); - break; - case 4: - hash = (37 * hash) + BINDING_FIELD_NUMBER; - hash = (53 * hash) + getBinding().hashCode(); - break; - case 0: - default: + if (hasArtifactKey()) { + hash = (37 * hash) + ARTIFACT_KEY_FIELD_NUMBER; + hash = (53 * hash) + getArtifactKey().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom(byte[] data) + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseDelimitedFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.ArtifactTag parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseDelimitedFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( + public static flyteidl.core.ArtifactId.ArtifactTag parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -6413,7 +6701,7 @@ public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(flyteidl.core.ArtifactId.ArtifactQuery prototype) { + public static Builder newBuilder(flyteidl.core.ArtifactId.ArtifactTag prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -6429,34 +6717,26 @@ protected Builder newBuilderForType( return builder; } /** - *
-     * Uniqueness constraints for Artifacts
-     *  - project, domain, name, version, partitions
-     * Option 2 (tags are standalone, point to an individual artifact id):
-     *  - project, domain, name, alias (points to one partition if partitioned)
-     *  - project, domain, name, partition key, partition value
-     * 
- * - * Protobuf type {@code flyteidl.core.ArtifactQuery} + * Protobuf type {@code flyteidl.core.ArtifactTag} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.core.ArtifactQuery) - flyteidl.core.ArtifactId.ArtifactQueryOrBuilder { + // @@protoc_insertion_point(builder_implements:flyteidl.core.ArtifactTag) + flyteidl.core.ArtifactId.ArtifactTagOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.ArtifactQuery.class, flyteidl.core.ArtifactId.ArtifactQuery.Builder.class); + flyteidl.core.ArtifactId.ArtifactTag.class, flyteidl.core.ArtifactId.ArtifactTag.Builder.class); } - // Construct using flyteidl.core.ArtifactId.ArtifactQuery.newBuilder() + // Construct using flyteidl.core.ArtifactId.ArtifactTag.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -6474,25 +6754,35 @@ private void maybeForceBuilderInitialization() { @java.lang.Override public Builder clear() { super.clear(); - identifierCase_ = 0; - identifier_ = null; + if (artifactKeyBuilder_ == null) { + artifactKey_ = null; + } else { + artifactKey_ = null; + artifactKeyBuilder_ = null; + } + if (valueBuilder_ == null) { + value_ = null; + } else { + value_ = null; + valueBuilder_ = null; + } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactTag_descriptor; } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactQuery getDefaultInstanceForType() { - return flyteidl.core.ArtifactId.ArtifactQuery.getDefaultInstance(); + public flyteidl.core.ArtifactId.ArtifactTag getDefaultInstanceForType() { + return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactQuery build() { - flyteidl.core.ArtifactId.ArtifactQuery result = buildPartial(); + public flyteidl.core.ArtifactId.ArtifactTag build() { + flyteidl.core.ArtifactId.ArtifactTag result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -6500,33 +6790,18 @@ public flyteidl.core.ArtifactId.ArtifactQuery build() { } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactQuery buildPartial() { - flyteidl.core.ArtifactId.ArtifactQuery result = new flyteidl.core.ArtifactId.ArtifactQuery(this); - if (identifierCase_ == 1) { - if (artifactIdBuilder_ == null) { - result.identifier_ = identifier_; - } else { - result.identifier_ = artifactIdBuilder_.build(); - } - } - if (identifierCase_ == 2) { - if (artifactTagBuilder_ == null) { - result.identifier_ = identifier_; - } else { - result.identifier_ = artifactTagBuilder_.build(); - } - } - if (identifierCase_ == 3) { - result.identifier_ = identifier_; + public flyteidl.core.ArtifactId.ArtifactTag buildPartial() { + flyteidl.core.ArtifactId.ArtifactTag result = new flyteidl.core.ArtifactId.ArtifactTag(this); + if (artifactKeyBuilder_ == null) { + result.artifactKey_ = artifactKey_; + } else { + result.artifactKey_ = artifactKeyBuilder_.build(); } - if (identifierCase_ == 4) { - if (bindingBuilder_ == null) { - result.identifier_ = identifier_; - } else { - result.identifier_ = bindingBuilder_.build(); - } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); } - result.identifierCase_ = identifierCase_; onBuilt(); return result; } @@ -6565,38 +6840,21 @@ public Builder addRepeatedField( } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.core.ArtifactId.ArtifactQuery) { - return mergeFrom((flyteidl.core.ArtifactId.ArtifactQuery)other); + if (other instanceof flyteidl.core.ArtifactId.ArtifactTag) { + return mergeFrom((flyteidl.core.ArtifactId.ArtifactTag)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(flyteidl.core.ArtifactId.ArtifactQuery other) { - if (other == flyteidl.core.ArtifactId.ArtifactQuery.getDefaultInstance()) return this; - switch (other.getIdentifierCase()) { - case ARTIFACT_ID: { - mergeArtifactId(other.getArtifactId()); - break; - } - case ARTIFACT_TAG: { - mergeArtifactTag(other.getArtifactTag()); - break; - } - case URI: { - identifierCase_ = 3; - identifier_ = other.identifier_; - onChanged(); - break; - } - case BINDING: { - mergeBinding(other.getBinding()); - break; - } - case IDENTIFIER_NOT_SET: { - break; - } + public Builder mergeFrom(flyteidl.core.ArtifactId.ArtifactTag other) { + if (other == flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance()) return this; + if (other.hasArtifactKey()) { + mergeArtifactKey(other.getArtifactKey()); + } + if (other.hasValue()) { + mergeValue(other.getValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); @@ -6613,11 +6871,11 @@ public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - flyteidl.core.ArtifactId.ArtifactQuery parsedMessage = null; + flyteidl.core.ArtifactId.ArtifactTag parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.core.ArtifactId.ArtifactQuery) e.getUnfinishedMessage(); + parsedMessage = (flyteidl.core.ArtifactId.ArtifactTag) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { @@ -6626,553 +6884,239 @@ public Builder mergeFrom( } return this; } - private int identifierCase_ = 0; - private java.lang.Object identifier_; - public IdentifierCase - getIdentifierCase() { - return IdentifierCase.forNumber( - identifierCase_); - } - - public Builder clearIdentifier() { - identifierCase_ = 0; - identifier_ = null; - onChanged(); - return this; - } - + private flyteidl.core.ArtifactId.ArtifactKey artifactKey_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> artifactIdBuilder_; + flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> artifactKeyBuilder_; /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public boolean hasArtifactId() { - return identifierCase_ == 1; + public boolean hasArtifactKey() { + return artifactKeyBuilder_ != null || artifactKey_ != null; } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { - if (artifactIdBuilder_ == null) { - if (identifierCase_ == 1) { - return (flyteidl.core.ArtifactId.ArtifactID) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); + public flyteidl.core.ArtifactId.ArtifactKey getArtifactKey() { + if (artifactKeyBuilder_ == null) { + return artifactKey_ == null ? flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; } else { - if (identifierCase_ == 1) { - return artifactIdBuilder_.getMessage(); - } - return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); + return artifactKeyBuilder_.getMessage(); } } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public Builder setArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { - if (artifactIdBuilder_ == null) { + public Builder setArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { + if (artifactKeyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - identifier_ = value; + artifactKey_ = value; onChanged(); } else { - artifactIdBuilder_.setMessage(value); + artifactKeyBuilder_.setMessage(value); } - identifierCase_ = 1; + return this; } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public Builder setArtifactId( - flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (artifactIdBuilder_ == null) { - identifier_ = builderForValue.build(); + public Builder setArtifactKey( + flyteidl.core.ArtifactId.ArtifactKey.Builder builderForValue) { + if (artifactKeyBuilder_ == null) { + artifactKey_ = builderForValue.build(); onChanged(); } else { - artifactIdBuilder_.setMessage(builderForValue.build()); + artifactKeyBuilder_.setMessage(builderForValue.build()); } - identifierCase_ = 1; + return this; } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public Builder mergeArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { - if (artifactIdBuilder_ == null) { - if (identifierCase_ == 1 && - identifier_ != flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()) { - identifier_ = flyteidl.core.ArtifactId.ArtifactID.newBuilder((flyteidl.core.ArtifactId.ArtifactID) identifier_) - .mergeFrom(value).buildPartial(); + public Builder mergeArtifactKey(flyteidl.core.ArtifactId.ArtifactKey value) { + if (artifactKeyBuilder_ == null) { + if (artifactKey_ != null) { + artifactKey_ = + flyteidl.core.ArtifactId.ArtifactKey.newBuilder(artifactKey_).mergeFrom(value).buildPartial(); } else { - identifier_ = value; + artifactKey_ = value; } onChanged(); } else { - if (identifierCase_ == 1) { - artifactIdBuilder_.mergeFrom(value); - } - artifactIdBuilder_.setMessage(value); + artifactKeyBuilder_.mergeFrom(value); } - identifierCase_ = 1; + return this; } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public Builder clearArtifactId() { - if (artifactIdBuilder_ == null) { - if (identifierCase_ == 1) { - identifierCase_ = 0; - identifier_ = null; - onChanged(); - } + public Builder clearArtifactKey() { + if (artifactKeyBuilder_ == null) { + artifactKey_ = null; + onChanged(); } else { - if (identifierCase_ == 1) { - identifierCase_ = 0; - identifier_ = null; - } - artifactIdBuilder_.clear(); + artifactKey_ = null; + artifactKeyBuilder_ = null; } + return this; } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdBuilder() { - return getArtifactIdFieldBuilder().getBuilder(); + public flyteidl.core.ArtifactId.ArtifactKey.Builder getArtifactKeyBuilder() { + + onChanged(); + return getArtifactKeyFieldBuilder().getBuilder(); } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { - if ((identifierCase_ == 1) && (artifactIdBuilder_ != null)) { - return artifactIdBuilder_.getMessageOrBuilder(); + public flyteidl.core.ArtifactId.ArtifactKeyOrBuilder getArtifactKeyOrBuilder() { + if (artifactKeyBuilder_ != null) { + return artifactKeyBuilder_.getMessageOrBuilder(); } else { - if (identifierCase_ == 1) { - return (flyteidl.core.ArtifactId.ArtifactID) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); + return artifactKey_ == null ? + flyteidl.core.ArtifactId.ArtifactKey.getDefaultInstance() : artifactKey_; } } /** - * .flyteidl.core.ArtifactID artifact_id = 1; + * .flyteidl.core.ArtifactKey artifact_key = 1; */ private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> - getArtifactIdFieldBuilder() { - if (artifactIdBuilder_ == null) { - if (!(identifierCase_ == 1)) { - identifier_ = flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); - } - artifactIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( - (flyteidl.core.ArtifactId.ArtifactID) identifier_, + flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder> + getArtifactKeyFieldBuilder() { + if (artifactKeyBuilder_ == null) { + artifactKeyBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactKey, flyteidl.core.ArtifactId.ArtifactKey.Builder, flyteidl.core.ArtifactId.ArtifactKeyOrBuilder>( + getArtifactKey(), getParentForChildren(), isClean()); - identifier_ = null; + artifactKey_ = null; } - identifierCase_ = 1; - onChanged();; - return artifactIdBuilder_; + return artifactKeyBuilder_; } + private flyteidl.core.ArtifactId.LabelValue value_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactTag, flyteidl.core.ArtifactId.ArtifactTag.Builder, flyteidl.core.ArtifactId.ArtifactTagOrBuilder> artifactTagBuilder_; + flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder> valueBuilder_; /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; + * .flyteidl.core.LabelValue value = 2; */ - public boolean hasArtifactTag() { - return identifierCase_ == 2; + public boolean hasValue() { + return valueBuilder_ != null || value_ != null; } /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; + * .flyteidl.core.LabelValue value = 2; */ - public flyteidl.core.ArtifactId.ArtifactTag getArtifactTag() { - if (artifactTagBuilder_ == null) { - if (identifierCase_ == 2) { - return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); + public flyteidl.core.ArtifactId.LabelValue getValue() { + if (valueBuilder_ == null) { + return value_ == null ? flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; } else { - if (identifierCase_ == 2) { - return artifactTagBuilder_.getMessage(); - } - return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); + return valueBuilder_.getMessage(); } } /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; + * .flyteidl.core.LabelValue value = 2; */ - public Builder setArtifactTag(flyteidl.core.ArtifactId.ArtifactTag value) { - if (artifactTagBuilder_ == null) { + public Builder setValue(flyteidl.core.ArtifactId.LabelValue value) { + if (valueBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - identifier_ = value; + value_ = value; onChanged(); } else { - artifactTagBuilder_.setMessage(value); + valueBuilder_.setMessage(value); } - identifierCase_ = 2; + return this; } /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; + * .flyteidl.core.LabelValue value = 2; */ - public Builder setArtifactTag( - flyteidl.core.ArtifactId.ArtifactTag.Builder builderForValue) { - if (artifactTagBuilder_ == null) { - identifier_ = builderForValue.build(); + public Builder setValue( + flyteidl.core.ArtifactId.LabelValue.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); onChanged(); } else { - artifactTagBuilder_.setMessage(builderForValue.build()); + valueBuilder_.setMessage(builderForValue.build()); } - identifierCase_ = 2; + return this; } /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; + * .flyteidl.core.LabelValue value = 2; */ - public Builder mergeArtifactTag(flyteidl.core.ArtifactId.ArtifactTag value) { - if (artifactTagBuilder_ == null) { - if (identifierCase_ == 2 && - identifier_ != flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance()) { - identifier_ = flyteidl.core.ArtifactId.ArtifactTag.newBuilder((flyteidl.core.ArtifactId.ArtifactTag) identifier_) - .mergeFrom(value).buildPartial(); + public Builder mergeValue(flyteidl.core.ArtifactId.LabelValue value) { + if (valueBuilder_ == null) { + if (value_ != null) { + value_ = + flyteidl.core.ArtifactId.LabelValue.newBuilder(value_).mergeFrom(value).buildPartial(); } else { - identifier_ = value; + value_ = value; } onChanged(); } else { - if (identifierCase_ == 2) { - artifactTagBuilder_.mergeFrom(value); - } - artifactTagBuilder_.setMessage(value); - } - identifierCase_ = 2; - return this; - } - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - public Builder clearArtifactTag() { - if (artifactTagBuilder_ == null) { - if (identifierCase_ == 2) { - identifierCase_ = 0; - identifier_ = null; - onChanged(); - } - } else { - if (identifierCase_ == 2) { - identifierCase_ = 0; - identifier_ = null; - } - artifactTagBuilder_.clear(); - } - return this; - } - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - public flyteidl.core.ArtifactId.ArtifactTag.Builder getArtifactTagBuilder() { - return getArtifactTagFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - public flyteidl.core.ArtifactId.ArtifactTagOrBuilder getArtifactTagOrBuilder() { - if ((identifierCase_ == 2) && (artifactTagBuilder_ != null)) { - return artifactTagBuilder_.getMessageOrBuilder(); - } else { - if (identifierCase_ == 2) { - return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); - } - } - /** - * .flyteidl.core.ArtifactTag artifact_tag = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactTag, flyteidl.core.ArtifactId.ArtifactTag.Builder, flyteidl.core.ArtifactId.ArtifactTagOrBuilder> - getArtifactTagFieldBuilder() { - if (artifactTagBuilder_ == null) { - if (!(identifierCase_ == 2)) { - identifier_ = flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); - } - artifactTagBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactTag, flyteidl.core.ArtifactId.ArtifactTag.Builder, flyteidl.core.ArtifactId.ArtifactTagOrBuilder>( - (flyteidl.core.ArtifactId.ArtifactTag) identifier_, - getParentForChildren(), - isClean()); - identifier_ = null; - } - identifierCase_ = 2; - onChanged();; - return artifactTagBuilder_; - } - - /** - * string uri = 3; - */ - public java.lang.String getUri() { - java.lang.Object ref = ""; - if (identifierCase_ == 3) { - ref = identifier_; - } - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (identifierCase_ == 3) { - identifier_ = s; - } - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string uri = 3; - */ - public com.google.protobuf.ByteString - getUriBytes() { - java.lang.Object ref = ""; - if (identifierCase_ == 3) { - ref = identifier_; - } - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - if (identifierCase_ == 3) { - identifier_ = b; - } - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string uri = 3; - */ - public Builder setUri( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - identifierCase_ = 3; - identifier_ = value; - onChanged(); - return this; - } - /** - * string uri = 3; - */ - public Builder clearUri() { - if (identifierCase_ == 3) { - identifierCase_ = 0; - identifier_ = null; - onChanged(); + valueBuilder_.mergeFrom(value); } - return this; - } - /** - * string uri = 3; - */ - public Builder setUriBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - checkByteStringIsUtf8(value); - identifierCase_ = 3; - identifier_ = value; - onChanged(); - return this; - } - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder> bindingBuilder_; - /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; - */ - public boolean hasBinding() { - return identifierCase_ == 4; - } - /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; - */ - public flyteidl.core.ArtifactId.ArtifactBindingData getBinding() { - if (bindingBuilder_ == null) { - if (identifierCase_ == 4) { - return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); - } else { - if (identifierCase_ == 4) { - return bindingBuilder_.getMessage(); - } - return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); - } - } - /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; - */ - public Builder setBinding(flyteidl.core.ArtifactId.ArtifactBindingData value) { - if (bindingBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - identifier_ = value; - onChanged(); - } else { - bindingBuilder_.setMessage(value); - } - identifierCase_ = 4; - return this; - } - /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; - */ - public Builder setBinding( - flyteidl.core.ArtifactId.ArtifactBindingData.Builder builderForValue) { - if (bindingBuilder_ == null) { - identifier_ = builderForValue.build(); - onChanged(); - } else { - bindingBuilder_.setMessage(builderForValue.build()); - } - identifierCase_ = 4; return this; } /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - public Builder mergeBinding(flyteidl.core.ArtifactId.ArtifactBindingData value) { - if (bindingBuilder_ == null) { - if (identifierCase_ == 4 && - identifier_ != flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance()) { - identifier_ = flyteidl.core.ArtifactId.ArtifactBindingData.newBuilder((flyteidl.core.ArtifactId.ArtifactBindingData) identifier_) - .mergeFrom(value).buildPartial(); - } else { - identifier_ = value; - } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = null; onChanged(); } else { - if (identifierCase_ == 4) { - bindingBuilder_.mergeFrom(value); - } - bindingBuilder_.setMessage(value); - } - identifierCase_ = 4; - return this; - } - /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; - */ - public Builder clearBinding() { - if (bindingBuilder_ == null) { - if (identifierCase_ == 4) { - identifierCase_ = 0; - identifier_ = null; - onChanged(); - } - } else { - if (identifierCase_ == 4) { - identifierCase_ = 0; - identifier_ = null; - } - bindingBuilder_.clear(); + value_ = null; + valueBuilder_ = null; } + return this; } /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - public flyteidl.core.ArtifactId.ArtifactBindingData.Builder getBindingBuilder() { - return getBindingFieldBuilder().getBuilder(); + public flyteidl.core.ArtifactId.LabelValue.Builder getValueBuilder() { + + onChanged(); + return getValueFieldBuilder().getBuilder(); } /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ - public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getBindingOrBuilder() { - if ((identifierCase_ == 4) && (bindingBuilder_ != null)) { - return bindingBuilder_.getMessageOrBuilder(); + public flyteidl.core.ArtifactId.LabelValueOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); } else { - if (identifierCase_ == 4) { - return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; - } - return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); + return value_ == null ? + flyteidl.core.ArtifactId.LabelValue.getDefaultInstance() : value_; } } /** - *
-       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
-       * artifacts, or a partition value derived from a triggering artifact.
-       * 
- * - * .flyteidl.core.ArtifactBindingData binding = 4; + * .flyteidl.core.LabelValue value = 2; */ private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder> - getBindingFieldBuilder() { - if (bindingBuilder_ == null) { - if (!(identifierCase_ == 4)) { - identifier_ = flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); - } - bindingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder>( - (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_, + flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.LabelValue, flyteidl.core.ArtifactId.LabelValue.Builder, flyteidl.core.ArtifactId.LabelValueOrBuilder>( + getValue(), getParentForChildren(), isClean()); - identifier_ = null; + value_ = null; } - identifierCase_ = 4; - onChanged();; - return bindingBuilder_; + return valueBuilder_; } @java.lang.Override public final Builder setUnknownFields( @@ -7187,138 +7131,137 @@ public final Builder mergeUnknownFields( } - // @@protoc_insertion_point(builder_scope:flyteidl.core.ArtifactQuery) + // @@protoc_insertion_point(builder_scope:flyteidl.core.ArtifactTag) } - // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactQuery) - private static final flyteidl.core.ArtifactId.ArtifactQuery DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactTag) + private static final flyteidl.core.ArtifactId.ArtifactTag DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.ArtifactQuery(); + DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.ArtifactTag(); } - public static flyteidl.core.ArtifactId.ArtifactQuery getDefaultInstance() { + public static flyteidl.core.ArtifactId.ArtifactTag getDefaultInstance() { return DEFAULT_INSTANCE; } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override - public ArtifactQuery parsePartialFrom( + public ArtifactTag parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new ArtifactQuery(input, extensionRegistry); + return new ArtifactTag(input, extensionRegistry); } }; - public static com.google.protobuf.Parser parser() { + public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override - public flyteidl.core.ArtifactId.ArtifactQuery getDefaultInstanceForType() { + public flyteidl.core.ArtifactId.ArtifactTag getDefaultInstanceForType() { return DEFAULT_INSTANCE; } } - public interface TriggerOrBuilder extends - // @@protoc_insertion_point(interface_extends:flyteidl.core.Trigger) + public interface ArtifactQueryOrBuilder extends + // @@protoc_insertion_point(interface_extends:flyteidl.core.ArtifactQuery) com.google.protobuf.MessageOrBuilder { /** - *
-     * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-     * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactID artifact_id = 1; */ - boolean hasTriggerId(); + boolean hasArtifactId(); /** - *
-     * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-     * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactID artifact_id = 1; */ - flyteidl.core.IdentifierOuterClass.Identifier getTriggerId(); + flyteidl.core.ArtifactId.ArtifactID getArtifactId(); /** - *
-     * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-     * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactID artifact_id = 1; */ - flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTriggerIdOrBuilder(); + flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder(); /** - *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
-     * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - java.util.List - getTriggersList(); + boolean hasArtifactTag(); /** - *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
-     * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactTag artifact_tag = 2; + */ + flyteidl.core.ArtifactId.ArtifactTag getArtifactTag(); + /** + * .flyteidl.core.ArtifactTag artifact_tag = 2; + */ + flyteidl.core.ArtifactId.ArtifactTagOrBuilder getArtifactTagOrBuilder(); + + /** + * string uri = 3; + */ + java.lang.String getUri(); + /** + * string uri = 3; */ - flyteidl.core.ArtifactId.ArtifactID getTriggers(int index); + com.google.protobuf.ByteString + getUriBytes(); + /** *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
+     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+     * artifacts, or a partition value derived from a triggering artifact.
      * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - int getTriggersCount(); + boolean hasBinding(); /** *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
+     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+     * artifacts, or a partition value derived from a triggering artifact.
      * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - java.util.List - getTriggersOrBuilderList(); + flyteidl.core.ArtifactId.ArtifactBindingData getBinding(); /** *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
+     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+     * artifacts, or a partition value derived from a triggering artifact.
      * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - flyteidl.core.ArtifactId.ArtifactIDOrBuilder getTriggersOrBuilder( - int index); + flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getBindingOrBuilder(); + + public flyteidl.core.ArtifactId.ArtifactQuery.IdentifierCase getIdentifierCase(); } /** - * Protobuf type {@code flyteidl.core.Trigger} + *
+   * Uniqueness constraints for Artifacts
+   *  - project, domain, name, version, partitions
+   * Option 2 (tags are standalone, point to an individual artifact id):
+   *  - project, domain, name, alias (points to one partition if partitioned)
+   *  - project, domain, name, partition key, partition value
+   * 
+ * + * Protobuf type {@code flyteidl.core.ArtifactQuery} */ - public static final class Trigger extends + public static final class ArtifactQuery extends com.google.protobuf.GeneratedMessageV3 implements - // @@protoc_insertion_point(message_implements:flyteidl.core.Trigger) - TriggerOrBuilder { + // @@protoc_insertion_point(message_implements:flyteidl.core.ArtifactQuery) + ArtifactQueryOrBuilder { private static final long serialVersionUID = 0L; - // Use Trigger.newBuilder() to construct. - private Trigger(com.google.protobuf.GeneratedMessageV3.Builder builder) { + // Use ArtifactQuery.newBuilder() to construct. + private ArtifactQuery(com.google.protobuf.GeneratedMessageV3.Builder builder) { super(builder); } - private Trigger() { - triggers_ = java.util.Collections.emptyList(); + private ArtifactQuery() { } @java.lang.Override @@ -7326,7 +7269,7 @@ private Trigger() { getUnknownFields() { return this.unknownFields; } - private Trigger( + private ArtifactQuery( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -7346,25 +7289,51 @@ private Trigger( done = true; break; case 10: { - flyteidl.core.IdentifierOuterClass.Identifier.Builder subBuilder = null; - if (triggerId_ != null) { - subBuilder = triggerId_.toBuilder(); + flyteidl.core.ArtifactId.ArtifactID.Builder subBuilder = null; + if (identifierCase_ == 1) { + subBuilder = ((flyteidl.core.ArtifactId.ArtifactID) identifier_).toBuilder(); } - triggerId_ = input.readMessage(flyteidl.core.IdentifierOuterClass.Identifier.parser(), extensionRegistry); + identifier_ = + input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry); if (subBuilder != null) { - subBuilder.mergeFrom(triggerId_); - triggerId_ = subBuilder.buildPartial(); + subBuilder.mergeFrom((flyteidl.core.ArtifactId.ArtifactID) identifier_); + identifier_ = subBuilder.buildPartial(); } - + identifierCase_ = 1; break; } case 18: { - if (!((mutable_bitField0_ & 0x00000002) != 0)) { - triggers_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000002; + flyteidl.core.ArtifactId.ArtifactTag.Builder subBuilder = null; + if (identifierCase_ == 2) { + subBuilder = ((flyteidl.core.ArtifactId.ArtifactTag) identifier_).toBuilder(); + } + identifier_ = + input.readMessage(flyteidl.core.ArtifactId.ArtifactTag.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((flyteidl.core.ArtifactId.ArtifactTag) identifier_); + identifier_ = subBuilder.buildPartial(); + } + identifierCase_ = 2; + break; + } + case 26: { + java.lang.String s = input.readStringRequireUtf8(); + identifierCase_ = 3; + identifier_ = s; + break; + } + case 34: { + flyteidl.core.ArtifactId.ArtifactBindingData.Builder subBuilder = null; + if (identifierCase_ == 4) { + subBuilder = ((flyteidl.core.ArtifactId.ArtifactBindingData) identifier_).toBuilder(); } - triggers_.add( - input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry)); + identifier_ = + input.readMessage(flyteidl.core.ArtifactId.ArtifactBindingData.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom((flyteidl.core.ArtifactId.ArtifactBindingData) identifier_); + identifier_ = subBuilder.buildPartial(); + } + identifierCase_ = 4; break; } default: { @@ -7382,118 +7351,199 @@ private Trigger( throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000002) != 0)) { - triggers_ = java.util.Collections.unmodifiableList(triggers_); - } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_Trigger_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_Trigger_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.Trigger.class, flyteidl.core.ArtifactId.Trigger.Builder.class); + flyteidl.core.ArtifactId.ArtifactQuery.class, flyteidl.core.ArtifactId.ArtifactQuery.Builder.class); + } + + private int identifierCase_ = 0; + private java.lang.Object identifier_; + public enum IdentifierCase + implements com.google.protobuf.Internal.EnumLite { + ARTIFACT_ID(1), + ARTIFACT_TAG(2), + URI(3), + BINDING(4), + IDENTIFIER_NOT_SET(0); + private final int value; + private IdentifierCase(int value) { + this.value = value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static IdentifierCase valueOf(int value) { + return forNumber(value); + } + + public static IdentifierCase forNumber(int value) { + switch (value) { + case 1: return ARTIFACT_ID; + case 2: return ARTIFACT_TAG; + case 3: return URI; + case 4: return BINDING; + case 0: return IDENTIFIER_NOT_SET; + default: return null; + } + } + public int getNumber() { + return this.value; + } + }; + + public IdentifierCase + getIdentifierCase() { + return IdentifierCase.forNumber( + identifierCase_); } - private int bitField0_; - public static final int TRIGGER_ID_FIELD_NUMBER = 1; - private flyteidl.core.IdentifierOuterClass.Identifier triggerId_; + public static final int ARTIFACT_ID_FIELD_NUMBER = 1; /** - *
-     * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-     * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactID artifact_id = 1; */ - public boolean hasTriggerId() { - return triggerId_ != null; + public boolean hasArtifactId() { + return identifierCase_ == 1; } /** - *
-     * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-     * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactID artifact_id = 1; */ - public flyteidl.core.IdentifierOuterClass.Identifier getTriggerId() { - return triggerId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : triggerId_; + public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { + if (identifierCase_ == 1) { + return (flyteidl.core.ArtifactId.ArtifactID) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); } /** - *
-     * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-     * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactID artifact_id = 1; */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTriggerIdOrBuilder() { - return getTriggerId(); + public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { + if (identifierCase_ == 1) { + return (flyteidl.core.ArtifactId.ArtifactID) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); } - public static final int TRIGGERS_FIELD_NUMBER = 2; - private java.util.List triggers_; + public static final int ARTIFACT_TAG_FIELD_NUMBER = 2; /** - *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
-     * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public java.util.List getTriggersList() { - return triggers_; + public boolean hasArtifactTag() { + return identifierCase_ == 2; } /** - *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
-     * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactTag artifact_tag = 2; + */ + public flyteidl.core.ArtifactId.ArtifactTag getArtifactTag() { + if (identifierCase_ == 2) { + return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); + } + /** + * .flyteidl.core.ArtifactTag artifact_tag = 2; + */ + public flyteidl.core.ArtifactId.ArtifactTagOrBuilder getArtifactTagOrBuilder() { + if (identifierCase_ == 2) { + return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); + } + + public static final int URI_FIELD_NUMBER = 3; + /** + * string uri = 3; + */ + public java.lang.String getUri() { + java.lang.Object ref = ""; + if (identifierCase_ == 3) { + ref = identifier_; + } + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (identifierCase_ == 3) { + identifier_ = s; + } + return s; + } + } + /** + * string uri = 3; */ - public java.util.List - getTriggersOrBuilderList() { - return triggers_; + public com.google.protobuf.ByteString + getUriBytes() { + java.lang.Object ref = ""; + if (identifierCase_ == 3) { + ref = identifier_; + } + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (identifierCase_ == 3) { + identifier_ = b; + } + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } + + public static final int BINDING_FIELD_NUMBER = 4; /** *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
+     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+     * artifacts, or a partition value derived from a triggering artifact.
      * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public int getTriggersCount() { - return triggers_.size(); + public boolean hasBinding() { + return identifierCase_ == 4; } /** *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
+     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+     * artifacts, or a partition value derived from a triggering artifact.
      * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public flyteidl.core.ArtifactId.ArtifactID getTriggers(int index) { - return triggers_.get(index); + public flyteidl.core.ArtifactId.ArtifactBindingData getBinding() { + if (identifierCase_ == 4) { + return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } /** *
-     * These are partial artifact IDs that will be triggered on
-     * Consider making these ArtifactQuery instead.
+     * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+     * artifacts, or a partition value derived from a triggering artifact.
      * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getTriggersOrBuilder( - int index) { - return triggers_.get(index); + public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getBindingOrBuilder() { + if (identifierCase_ == 4) { + return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @@ -7510,11 +7560,17 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (triggerId_ != null) { - output.writeMessage(1, getTriggerId()); + if (identifierCase_ == 1) { + output.writeMessage(1, (flyteidl.core.ArtifactId.ArtifactID) identifier_); + } + if (identifierCase_ == 2) { + output.writeMessage(2, (flyteidl.core.ArtifactId.ArtifactTag) identifier_); + } + if (identifierCase_ == 3) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 3, identifier_); } - for (int i = 0; i < triggers_.size(); i++) { - output.writeMessage(2, triggers_.get(i)); + if (identifierCase_ == 4) { + output.writeMessage(4, (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_); } unknownFields.writeTo(output); } @@ -7525,13 +7581,20 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (triggerId_ != null) { + if (identifierCase_ == 1) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, (flyteidl.core.ArtifactId.ArtifactID) identifier_); + } + if (identifierCase_ == 2) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, getTriggerId()); + .computeMessageSize(2, (flyteidl.core.ArtifactId.ArtifactTag) identifier_); + } + if (identifierCase_ == 3) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, identifier_); } - for (int i = 0; i < triggers_.size(); i++) { + if (identifierCase_ == 4) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, triggers_.get(i)); + .computeMessageSize(4, (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -7543,18 +7606,32 @@ public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } - if (!(obj instanceof flyteidl.core.ArtifactId.Trigger)) { + if (!(obj instanceof flyteidl.core.ArtifactId.ArtifactQuery)) { return super.equals(obj); } - flyteidl.core.ArtifactId.Trigger other = (flyteidl.core.ArtifactId.Trigger) obj; + flyteidl.core.ArtifactId.ArtifactQuery other = (flyteidl.core.ArtifactId.ArtifactQuery) obj; - if (hasTriggerId() != other.hasTriggerId()) return false; - if (hasTriggerId()) { - if (!getTriggerId() - .equals(other.getTriggerId())) return false; + if (!getIdentifierCase().equals(other.getIdentifierCase())) return false; + switch (identifierCase_) { + case 1: + if (!getArtifactId() + .equals(other.getArtifactId())) return false; + break; + case 2: + if (!getArtifactTag() + .equals(other.getArtifactTag())) return false; + break; + case 3: + if (!getUri() + .equals(other.getUri())) return false; + break; + case 4: + if (!getBinding() + .equals(other.getBinding())) return false; + break; + case 0: + default: } - if (!getTriggersList() - .equals(other.getTriggersList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -7566,82 +7643,94 @@ public int hashCode() { } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); - if (hasTriggerId()) { - hash = (37 * hash) + TRIGGER_ID_FIELD_NUMBER; - hash = (53 * hash) + getTriggerId().hashCode(); - } - if (getTriggersCount() > 0) { - hash = (37 * hash) + TRIGGERS_FIELD_NUMBER; - hash = (53 * hash) + getTriggersList().hashCode(); + switch (identifierCase_) { + case 1: + hash = (37 * hash) + ARTIFACT_ID_FIELD_NUMBER; + hash = (53 * hash) + getArtifactId().hashCode(); + break; + case 2: + hash = (37 * hash) + ARTIFACT_TAG_FIELD_NUMBER; + hash = (53 * hash) + getArtifactTag().hashCode(); + break; + case 3: + hash = (37 * hash) + URI_FIELD_NUMBER; + hash = (53 * hash) + getUri().hashCode(); + break; + case 4: + hash = (37 * hash) + BINDING_FIELD_NUMBER; + hash = (53 * hash) + getBinding().hashCode(); + break; + case 0: + default: } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.Trigger parseFrom(byte[] data) + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static flyteidl.core.ArtifactId.Trigger parseFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.Trigger parseDelimitedFrom(java.io.InputStream input) + public static flyteidl.core.ArtifactId.ArtifactQuery parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.Trigger parseDelimitedFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } - public static flyteidl.core.ArtifactId.Trigger parseFrom( + public static flyteidl.core.ArtifactId.ArtifactQuery parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -7654,7 +7743,7 @@ public static flyteidl.core.ArtifactId.Trigger parseFrom( public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } - public static Builder newBuilder(flyteidl.core.ArtifactId.Trigger prototype) { + public static Builder newBuilder(flyteidl.core.ArtifactId.ArtifactQuery prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override @@ -7670,26 +7759,34 @@ protected Builder newBuilderForType( return builder; } /** - * Protobuf type {@code flyteidl.core.Trigger} + *
+     * Uniqueness constraints for Artifacts
+     *  - project, domain, name, version, partitions
+     * Option 2 (tags are standalone, point to an individual artifact id):
+     *  - project, domain, name, alias (points to one partition if partitioned)
+     *  - project, domain, name, partition key, partition value
+     * 
+ * + * Protobuf type {@code flyteidl.core.ArtifactQuery} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder implements - // @@protoc_insertion_point(builder_implements:flyteidl.core.Trigger) - flyteidl.core.ArtifactId.TriggerOrBuilder { + // @@protoc_insertion_point(builder_implements:flyteidl.core.ArtifactQuery) + flyteidl.core.ArtifactId.ArtifactQueryOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_Trigger_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_Trigger_fieldAccessorTable + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_fieldAccessorTable .ensureFieldAccessorsInitialized( - flyteidl.core.ArtifactId.Trigger.class, flyteidl.core.ArtifactId.Trigger.Builder.class); + flyteidl.core.ArtifactId.ArtifactQuery.class, flyteidl.core.ArtifactId.ArtifactQuery.Builder.class); } - // Construct using flyteidl.core.ArtifactId.Trigger.newBuilder() + // Construct using flyteidl.core.ArtifactId.ArtifactQuery.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -7702,41 +7799,30 @@ private Builder( private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { - getTriggersFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); - if (triggerIdBuilder_ == null) { - triggerId_ = null; - } else { - triggerId_ = null; - triggerIdBuilder_ = null; - } - if (triggersBuilder_ == null) { - triggers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - triggersBuilder_.clear(); - } + identifierCase_ = 0; + identifier_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return flyteidl.core.ArtifactId.internal_static_flyteidl_core_Trigger_descriptor; + return flyteidl.core.ArtifactId.internal_static_flyteidl_core_ArtifactQuery_descriptor; } @java.lang.Override - public flyteidl.core.ArtifactId.Trigger getDefaultInstanceForType() { - return flyteidl.core.ArtifactId.Trigger.getDefaultInstance(); + public flyteidl.core.ArtifactId.ArtifactQuery getDefaultInstanceForType() { + return flyteidl.core.ArtifactId.ArtifactQuery.getDefaultInstance(); } @java.lang.Override - public flyteidl.core.ArtifactId.Trigger build() { - flyteidl.core.ArtifactId.Trigger result = buildPartial(); + public flyteidl.core.ArtifactId.ArtifactQuery build() { + flyteidl.core.ArtifactId.ArtifactQuery result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } @@ -7744,25 +7830,33 @@ public flyteidl.core.ArtifactId.Trigger build() { } @java.lang.Override - public flyteidl.core.ArtifactId.Trigger buildPartial() { - flyteidl.core.ArtifactId.Trigger result = new flyteidl.core.ArtifactId.Trigger(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (triggerIdBuilder_ == null) { - result.triggerId_ = triggerId_; - } else { - result.triggerId_ = triggerIdBuilder_.build(); + public flyteidl.core.ArtifactId.ArtifactQuery buildPartial() { + flyteidl.core.ArtifactId.ArtifactQuery result = new flyteidl.core.ArtifactId.ArtifactQuery(this); + if (identifierCase_ == 1) { + if (artifactIdBuilder_ == null) { + result.identifier_ = identifier_; + } else { + result.identifier_ = artifactIdBuilder_.build(); + } } - if (triggersBuilder_ == null) { - if (((bitField0_ & 0x00000002) != 0)) { - triggers_ = java.util.Collections.unmodifiableList(triggers_); - bitField0_ = (bitField0_ & ~0x00000002); + if (identifierCase_ == 2) { + if (artifactTagBuilder_ == null) { + result.identifier_ = identifier_; + } else { + result.identifier_ = artifactTagBuilder_.build(); + } + } + if (identifierCase_ == 3) { + result.identifier_ = identifier_; + } + if (identifierCase_ == 4) { + if (bindingBuilder_ == null) { + result.identifier_ = identifier_; + } else { + result.identifier_ = bindingBuilder_.build(); } - result.triggers_ = triggers_; - } else { - result.triggers_ = triggersBuilder_.build(); } - result.bitField0_ = to_bitField0_; + result.identifierCase_ = identifierCase_; onBuilt(); return result; } @@ -7793,564 +7887,622 @@ public Builder setRepeatedField( int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } - @java.lang.Override - public Builder addRepeatedField( - com.google.protobuf.Descriptors.FieldDescriptor field, - java.lang.Object value) { - return super.addRepeatedField(field, value); + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.core.ArtifactId.ArtifactQuery) { + return mergeFrom((flyteidl.core.ArtifactId.ArtifactQuery)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.core.ArtifactId.ArtifactQuery other) { + if (other == flyteidl.core.ArtifactId.ArtifactQuery.getDefaultInstance()) return this; + switch (other.getIdentifierCase()) { + case ARTIFACT_ID: { + mergeArtifactId(other.getArtifactId()); + break; + } + case ARTIFACT_TAG: { + mergeArtifactTag(other.getArtifactTag()); + break; + } + case URI: { + identifierCase_ = 3; + identifier_ = other.identifier_; + onChanged(); + break; + } + case BINDING: { + mergeBinding(other.getBinding()); + break; + } + case IDENTIFIER_NOT_SET: { + break; + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.core.ArtifactId.ArtifactQuery parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.core.ArtifactId.ArtifactQuery) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int identifierCase_ = 0; + private java.lang.Object identifier_; + public IdentifierCase + getIdentifierCase() { + return IdentifierCase.forNumber( + identifierCase_); + } + + public Builder clearIdentifier() { + identifierCase_ = 0; + identifier_ = null; + onChanged(); + return this; + } + + + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> artifactIdBuilder_; + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public boolean hasArtifactId() { + return identifierCase_ == 1; + } + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public flyteidl.core.ArtifactId.ArtifactID getArtifactId() { + if (artifactIdBuilder_ == null) { + if (identifierCase_ == 1) { + return (flyteidl.core.ArtifactId.ArtifactID) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); + } else { + if (identifierCase_ == 1) { + return artifactIdBuilder_.getMessage(); + } + return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); + } + } + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public Builder setArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { + if (artifactIdBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + identifier_ = value; + onChanged(); + } else { + artifactIdBuilder_.setMessage(value); + } + identifierCase_ = 1; + return this; } - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof flyteidl.core.ArtifactId.Trigger) { - return mergeFrom((flyteidl.core.ArtifactId.Trigger)other); + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public Builder setArtifactId( + flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { + if (artifactIdBuilder_ == null) { + identifier_ = builderForValue.build(); + onChanged(); } else { - super.mergeFrom(other); - return this; + artifactIdBuilder_.setMessage(builderForValue.build()); } + identifierCase_ = 1; + return this; } - - public Builder mergeFrom(flyteidl.core.ArtifactId.Trigger other) { - if (other == flyteidl.core.ArtifactId.Trigger.getDefaultInstance()) return this; - if (other.hasTriggerId()) { - mergeTriggerId(other.getTriggerId()); + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public Builder mergeArtifactId(flyteidl.core.ArtifactId.ArtifactID value) { + if (artifactIdBuilder_ == null) { + if (identifierCase_ == 1 && + identifier_ != flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()) { + identifier_ = flyteidl.core.ArtifactId.ArtifactID.newBuilder((flyteidl.core.ArtifactId.ArtifactID) identifier_) + .mergeFrom(value).buildPartial(); + } else { + identifier_ = value; + } + onChanged(); + } else { + if (identifierCase_ == 1) { + artifactIdBuilder_.mergeFrom(value); + } + artifactIdBuilder_.setMessage(value); } - if (triggersBuilder_ == null) { - if (!other.triggers_.isEmpty()) { - if (triggers_.isEmpty()) { - triggers_ = other.triggers_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureTriggersIsMutable(); - triggers_.addAll(other.triggers_); - } + identifierCase_ = 1; + return this; + } + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public Builder clearArtifactId() { + if (artifactIdBuilder_ == null) { + if (identifierCase_ == 1) { + identifierCase_ = 0; + identifier_ = null; onChanged(); } } else { - if (!other.triggers_.isEmpty()) { - if (triggersBuilder_.isEmpty()) { - triggersBuilder_.dispose(); - triggersBuilder_ = null; - triggers_ = other.triggers_; - bitField0_ = (bitField0_ & ~0x00000002); - triggersBuilder_ = - com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? - getTriggersFieldBuilder() : null; - } else { - triggersBuilder_.addAllMessages(other.triggers_); - } + if (identifierCase_ == 1) { + identifierCase_ = 0; + identifier_ = null; } + artifactIdBuilder_.clear(); } - this.mergeUnknownFields(other.unknownFields); - onChanged(); return this; } - - @java.lang.Override - public final boolean isInitialized() { - return true; + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdBuilder() { + return getArtifactIdFieldBuilder().getBuilder(); } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - flyteidl.core.ArtifactId.Trigger parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (flyteidl.core.ArtifactId.Trigger) e.getUnfinishedMessage(); - throw e.unwrapIOException(); - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdOrBuilder() { + if ((identifierCase_ == 1) && (artifactIdBuilder_ != null)) { + return artifactIdBuilder_.getMessageOrBuilder(); + } else { + if (identifierCase_ == 1) { + return (flyteidl.core.ArtifactId.ArtifactID) identifier_; } + return flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); } - return this; } - private int bitField0_; + /** + * .flyteidl.core.ArtifactID artifact_id = 1; + */ + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> + getArtifactIdFieldBuilder() { + if (artifactIdBuilder_ == null) { + if (!(identifierCase_ == 1)) { + identifier_ = flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance(); + } + artifactIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( + (flyteidl.core.ArtifactId.ArtifactID) identifier_, + getParentForChildren(), + isClean()); + identifier_ = null; + } + identifierCase_ = 1; + onChanged();; + return artifactIdBuilder_; + } - private flyteidl.core.IdentifierOuterClass.Identifier triggerId_; private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> triggerIdBuilder_; + flyteidl.core.ArtifactId.ArtifactTag, flyteidl.core.ArtifactId.ArtifactTag.Builder, flyteidl.core.ArtifactId.ArtifactTagOrBuilder> artifactTagBuilder_; /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public boolean hasTriggerId() { - return triggerIdBuilder_ != null || triggerId_ != null; + public boolean hasArtifactTag() { + return identifierCase_ == 2; } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public flyteidl.core.IdentifierOuterClass.Identifier getTriggerId() { - if (triggerIdBuilder_ == null) { - return triggerId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : triggerId_; + public flyteidl.core.ArtifactId.ArtifactTag getArtifactTag() { + if (artifactTagBuilder_ == null) { + if (identifierCase_ == 2) { + return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); } else { - return triggerIdBuilder_.getMessage(); + if (identifierCase_ == 2) { + return artifactTagBuilder_.getMessage(); + } + return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); } } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public Builder setTriggerId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (triggerIdBuilder_ == null) { + public Builder setArtifactTag(flyteidl.core.ArtifactId.ArtifactTag value) { + if (artifactTagBuilder_ == null) { if (value == null) { throw new NullPointerException(); } - triggerId_ = value; + identifier_ = value; onChanged(); } else { - triggerIdBuilder_.setMessage(value); + artifactTagBuilder_.setMessage(value); } - + identifierCase_ = 2; return this; } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public Builder setTriggerId( - flyteidl.core.IdentifierOuterClass.Identifier.Builder builderForValue) { - if (triggerIdBuilder_ == null) { - triggerId_ = builderForValue.build(); + public Builder setArtifactTag( + flyteidl.core.ArtifactId.ArtifactTag.Builder builderForValue) { + if (artifactTagBuilder_ == null) { + identifier_ = builderForValue.build(); onChanged(); } else { - triggerIdBuilder_.setMessage(builderForValue.build()); + artifactTagBuilder_.setMessage(builderForValue.build()); } - + identifierCase_ = 2; return this; } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public Builder mergeTriggerId(flyteidl.core.IdentifierOuterClass.Identifier value) { - if (triggerIdBuilder_ == null) { - if (triggerId_ != null) { - triggerId_ = - flyteidl.core.IdentifierOuterClass.Identifier.newBuilder(triggerId_).mergeFrom(value).buildPartial(); + public Builder mergeArtifactTag(flyteidl.core.ArtifactId.ArtifactTag value) { + if (artifactTagBuilder_ == null) { + if (identifierCase_ == 2 && + identifier_ != flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance()) { + identifier_ = flyteidl.core.ArtifactId.ArtifactTag.newBuilder((flyteidl.core.ArtifactId.ArtifactTag) identifier_) + .mergeFrom(value).buildPartial(); } else { - triggerId_ = value; + identifier_ = value; } onChanged(); } else { - triggerIdBuilder_.mergeFrom(value); + if (identifierCase_ == 2) { + artifactTagBuilder_.mergeFrom(value); + } + artifactTagBuilder_.setMessage(value); } - + identifierCase_ = 2; return this; } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public Builder clearTriggerId() { - if (triggerIdBuilder_ == null) { - triggerId_ = null; - onChanged(); + public Builder clearArtifactTag() { + if (artifactTagBuilder_ == null) { + if (identifierCase_ == 2) { + identifierCase_ = 0; + identifier_ = null; + onChanged(); + } } else { - triggerId_ = null; - triggerIdBuilder_ = null; + if (identifierCase_ == 2) { + identifierCase_ = 0; + identifier_ = null; + } + artifactTagBuilder_.clear(); } - return this; } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public flyteidl.core.IdentifierOuterClass.Identifier.Builder getTriggerIdBuilder() { - - onChanged(); - return getTriggerIdFieldBuilder().getBuilder(); + public flyteidl.core.ArtifactId.ArtifactTag.Builder getArtifactTagBuilder() { + return getArtifactTagFieldBuilder().getBuilder(); } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ - public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getTriggerIdOrBuilder() { - if (triggerIdBuilder_ != null) { - return triggerIdBuilder_.getMessageOrBuilder(); + public flyteidl.core.ArtifactId.ArtifactTagOrBuilder getArtifactTagOrBuilder() { + if ((identifierCase_ == 2) && (artifactTagBuilder_ != null)) { + return artifactTagBuilder_.getMessageOrBuilder(); } else { - return triggerId_ == null ? - flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : triggerId_; + if (identifierCase_ == 2) { + return (flyteidl.core.ArtifactId.ArtifactTag) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); } } /** - *
-       * This will be set to a launch plan type, but note that this is different than the actual launch plan type.
-       * 
- * - * .flyteidl.core.Identifier trigger_id = 1; + * .flyteidl.core.ArtifactTag artifact_tag = 2; */ private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> - getTriggerIdFieldBuilder() { - if (triggerIdBuilder_ == null) { - triggerIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder>( - getTriggerId(), + flyteidl.core.ArtifactId.ArtifactTag, flyteidl.core.ArtifactId.ArtifactTag.Builder, flyteidl.core.ArtifactId.ArtifactTagOrBuilder> + getArtifactTagFieldBuilder() { + if (artifactTagBuilder_ == null) { + if (!(identifierCase_ == 2)) { + identifier_ = flyteidl.core.ArtifactId.ArtifactTag.getDefaultInstance(); + } + artifactTagBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactTag, flyteidl.core.ArtifactId.ArtifactTag.Builder, flyteidl.core.ArtifactId.ArtifactTagOrBuilder>( + (flyteidl.core.ArtifactId.ArtifactTag) identifier_, getParentForChildren(), isClean()); - triggerId_ = null; - } - return triggerIdBuilder_; - } - - private java.util.List triggers_ = - java.util.Collections.emptyList(); - private void ensureTriggersIsMutable() { - if (!((bitField0_ & 0x00000002) != 0)) { - triggers_ = new java.util.ArrayList(triggers_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> triggersBuilder_; - - /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; - */ - public java.util.List getTriggersList() { - if (triggersBuilder_ == null) { - return java.util.Collections.unmodifiableList(triggers_); - } else { - return triggersBuilder_.getMessageList(); + identifier_ = null; } + identifierCase_ = 2; + onChanged();; + return artifactTagBuilder_; } + /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * string uri = 3; */ - public int getTriggersCount() { - if (triggersBuilder_ == null) { - return triggers_.size(); + public java.lang.String getUri() { + java.lang.Object ref = ""; + if (identifierCase_ == 3) { + ref = identifier_; + } + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (identifierCase_ == 3) { + identifier_ = s; + } + return s; } else { - return triggersBuilder_.getCount(); + return (java.lang.String) ref; } } /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * string uri = 3; */ - public flyteidl.core.ArtifactId.ArtifactID getTriggers(int index) { - if (triggersBuilder_ == null) { - return triggers_.get(index); + public com.google.protobuf.ByteString + getUriBytes() { + java.lang.Object ref = ""; + if (identifierCase_ == 3) { + ref = identifier_; + } + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + if (identifierCase_ == 3) { + identifier_ = b; + } + return b; } else { - return triggersBuilder_.getMessage(index); + return (com.google.protobuf.ByteString) ref; } } /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * string uri = 3; */ - public Builder setTriggers( - int index, flyteidl.core.ArtifactId.ArtifactID value) { - if (triggersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureTriggersIsMutable(); - triggers_.set(index, value); - onChanged(); - } else { - triggersBuilder_.setMessage(index, value); - } + public Builder setUri( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + identifierCase_ = 3; + identifier_ = value; + onChanged(); return this; } /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * string uri = 3; */ - public Builder setTriggers( - int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (triggersBuilder_ == null) { - ensureTriggersIsMutable(); - triggers_.set(index, builderForValue.build()); + public Builder clearUri() { + if (identifierCase_ == 3) { + identifierCase_ = 0; + identifier_ = null; onChanged(); - } else { - triggersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; + * string uri = 3; */ - public Builder addTriggers(flyteidl.core.ArtifactId.ArtifactID value) { - if (triggersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureTriggersIsMutable(); - triggers_.add(value); - onChanged(); - } else { - triggersBuilder_.addMessage(value); - } + public Builder setUriBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + identifierCase_ = 3; + identifier_ = value; + onChanged(); return this; } + + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder> bindingBuilder_; /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public Builder addTriggers( - int index, flyteidl.core.ArtifactId.ArtifactID value) { - if (triggersBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureTriggersIsMutable(); - triggers_.add(index, value); - onChanged(); - } else { - triggersBuilder_.addMessage(index, value); - } - return this; + public boolean hasBinding() { + return identifierCase_ == 4; } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public Builder addTriggers( - flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (triggersBuilder_ == null) { - ensureTriggersIsMutable(); - triggers_.add(builderForValue.build()); - onChanged(); + public flyteidl.core.ArtifactId.ArtifactBindingData getBinding() { + if (bindingBuilder_ == null) { + if (identifierCase_ == 4) { + return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } else { - triggersBuilder_.addMessage(builderForValue.build()); + if (identifierCase_ == 4) { + return bindingBuilder_.getMessage(); + } + return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } - return this; } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public Builder addTriggers( - int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { - if (triggersBuilder_ == null) { - ensureTriggersIsMutable(); - triggers_.add(index, builderForValue.build()); + public Builder setBinding(flyteidl.core.ArtifactId.ArtifactBindingData value) { + if (bindingBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + identifier_ = value; onChanged(); } else { - triggersBuilder_.addMessage(index, builderForValue.build()); + bindingBuilder_.setMessage(value); } + identifierCase_ = 4; return this; } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; - */ - public Builder addAllTriggers( - java.lang.Iterable values) { - if (triggersBuilder_ == null) { - ensureTriggersIsMutable(); - com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, triggers_); + * .flyteidl.core.ArtifactBindingData binding = 4; + */ + public Builder setBinding( + flyteidl.core.ArtifactId.ArtifactBindingData.Builder builderForValue) { + if (bindingBuilder_ == null) { + identifier_ = builderForValue.build(); onChanged(); } else { - triggersBuilder_.addAllMessages(values); + bindingBuilder_.setMessage(builderForValue.build()); } + identifierCase_ = 4; return this; } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public Builder clearTriggers() { - if (triggersBuilder_ == null) { - triggers_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); + public Builder mergeBinding(flyteidl.core.ArtifactId.ArtifactBindingData value) { + if (bindingBuilder_ == null) { + if (identifierCase_ == 4 && + identifier_ != flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance()) { + identifier_ = flyteidl.core.ArtifactId.ArtifactBindingData.newBuilder((flyteidl.core.ArtifactId.ArtifactBindingData) identifier_) + .mergeFrom(value).buildPartial(); + } else { + identifier_ = value; + } onChanged(); } else { - triggersBuilder_.clear(); + if (identifierCase_ == 4) { + bindingBuilder_.mergeFrom(value); + } + bindingBuilder_.setMessage(value); } + identifierCase_ = 4; return this; } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public Builder removeTriggers(int index) { - if (triggersBuilder_ == null) { - ensureTriggersIsMutable(); - triggers_.remove(index); - onChanged(); + public Builder clearBinding() { + if (bindingBuilder_ == null) { + if (identifierCase_ == 4) { + identifierCase_ = 0; + identifier_ = null; + onChanged(); + } } else { - triggersBuilder_.remove(index); + if (identifierCase_ == 4) { + identifierCase_ = 0; + identifier_ = null; + } + bindingBuilder_.clear(); } return this; } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder getTriggersBuilder( - int index) { - return getTriggersFieldBuilder().getBuilder(index); - } - /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getTriggersOrBuilder( - int index) { - if (triggersBuilder_ == null) { - return triggers_.get(index); } else { - return triggersBuilder_.getMessageOrBuilder(index); - } + public flyteidl.core.ArtifactId.ArtifactBindingData.Builder getBindingBuilder() { + return getBindingFieldBuilder().getBuilder(); } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public java.util.List - getTriggersOrBuilderList() { - if (triggersBuilder_ != null) { - return triggersBuilder_.getMessageOrBuilderList(); + public flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder getBindingOrBuilder() { + if ((identifierCase_ == 4) && (bindingBuilder_ != null)) { + return bindingBuilder_.getMessageOrBuilder(); } else { - return java.util.Collections.unmodifiableList(triggers_); + if (identifierCase_ == 4) { + return (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_; + } + return flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); } } /** *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder addTriggersBuilder() { - return getTriggersFieldBuilder().addBuilder( - flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()); - } - /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
-       * 
- * - * repeated .flyteidl.core.ArtifactID triggers = 2; - */ - public flyteidl.core.ArtifactId.ArtifactID.Builder addTriggersBuilder( - int index) { - return getTriggersFieldBuilder().addBuilder( - index, flyteidl.core.ArtifactId.ArtifactID.getDefaultInstance()); - } - /** - *
-       * These are partial artifact IDs that will be triggered on
-       * Consider making these ArtifactQuery instead.
+       * This is used in the trigger case, where a user specifies a value for an input that is one of the triggering
+       * artifacts, or a partition value derived from a triggering artifact.
        * 
* - * repeated .flyteidl.core.ArtifactID triggers = 2; + * .flyteidl.core.ArtifactBindingData binding = 4; */ - public java.util.List - getTriggersBuilderList() { - return getTriggersFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder> - getTriggersFieldBuilder() { - if (triggersBuilder_ == null) { - triggersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< - flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( - triggers_, - ((bitField0_ & 0x00000002) != 0), + private com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder> + getBindingFieldBuilder() { + if (bindingBuilder_ == null) { + if (!(identifierCase_ == 4)) { + identifier_ = flyteidl.core.ArtifactId.ArtifactBindingData.getDefaultInstance(); + } + bindingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + flyteidl.core.ArtifactId.ArtifactBindingData, flyteidl.core.ArtifactId.ArtifactBindingData.Builder, flyteidl.core.ArtifactId.ArtifactBindingDataOrBuilder>( + (flyteidl.core.ArtifactId.ArtifactBindingData) identifier_, getParentForChildren(), isClean()); - triggers_ = null; + identifier_ = null; } - return triggersBuilder_; + identifierCase_ = 4; + onChanged();; + return bindingBuilder_; } @java.lang.Override public final Builder setUnknownFields( @@ -8365,41 +8517,41 @@ public final Builder mergeUnknownFields( } - // @@protoc_insertion_point(builder_scope:flyteidl.core.Trigger) + // @@protoc_insertion_point(builder_scope:flyteidl.core.ArtifactQuery) } - // @@protoc_insertion_point(class_scope:flyteidl.core.Trigger) - private static final flyteidl.core.ArtifactId.Trigger DEFAULT_INSTANCE; + // @@protoc_insertion_point(class_scope:flyteidl.core.ArtifactQuery) + private static final flyteidl.core.ArtifactId.ArtifactQuery DEFAULT_INSTANCE; static { - DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.Trigger(); + DEFAULT_INSTANCE = new flyteidl.core.ArtifactId.ArtifactQuery(); } - public static flyteidl.core.ArtifactId.Trigger getDefaultInstance() { + public static flyteidl.core.ArtifactId.ArtifactQuery getDefaultInstance() { return DEFAULT_INSTANCE; } - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { @java.lang.Override - public Trigger parsePartialFrom( + public ArtifactQuery parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Trigger(input, extensionRegistry); + return new ArtifactQuery(input, extensionRegistry); } }; - public static com.google.protobuf.Parser parser() { + public static com.google.protobuf.Parser parser() { return PARSER; } @java.lang.Override - public com.google.protobuf.Parser getParserForType() { + public com.google.protobuf.Parser getParserForType() { return PARSER; } @java.lang.Override - public flyteidl.core.ArtifactId.Trigger getDefaultInstanceForType() { + public flyteidl.core.ArtifactId.ArtifactQuery getDefaultInstanceForType() { return DEFAULT_INSTANCE; } @@ -8435,6 +8587,11 @@ public flyteidl.core.ArtifactId.Trigger getDefaultInstanceForType() { private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_flyteidl_core_Partitions_ValueEntry_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_core_TimePartition_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_core_TimePartition_fieldAccessorTable; private static final com.google.protobuf.Descriptors.Descriptor internal_static_flyteidl_core_ArtifactID_descriptor; private static final @@ -8450,11 +8607,6 @@ public flyteidl.core.ArtifactId.Trigger getDefaultInstanceForType() { private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_flyteidl_core_ArtifactQuery_fieldAccessorTable; - private static final com.google.protobuf.Descriptors.Descriptor - internal_static_flyteidl_core_Trigger_descriptor; - private static final - com.google.protobuf.GeneratedMessageV3.FieldAccessorTable - internal_static_flyteidl_core_Trigger_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -8465,35 +8617,39 @@ public flyteidl.core.ArtifactId.Trigger getDefaultInstanceForType() { static { java.lang.String[] descriptorData = { "\n\037flyteidl/core/artifact_id.proto\022\rflyte" + - "idl.core\032\036flyteidl/core/identifier.proto" + - "\"<\n\013ArtifactKey\022\017\n\007project\030\001 \001(\t\022\016\n\006doma" + - "in\030\002 \001(\t\022\014\n\004name\030\003 \001(\t\"N\n\023ArtifactBindin" + - "gData\022\r\n\005index\030\001 \001(\r\022\025\n\rpartition_key\030\002 " + - "\001(\t\022\021\n\ttransform\030\003 \001(\t\"\037\n\020InputBindingDa" + - "ta\022\013\n\003var\030\001 \001(\t\"\250\001\n\nLabelValue\022\026\n\014static" + - "_value\030\001 \001(\tH\000\022?\n\021triggered_binding\030\002 \001(" + - "\0132\".flyteidl.core.ArtifactBindingDataH\000\022" + - "8\n\rinput_binding\030\003 \001(\0132\037.flyteidl.core.I" + - "nputBindingDataH\000B\007\n\005value\"\212\001\n\nPartition" + - "s\0223\n\005value\030\001 \003(\0132$.flyteidl.core.Partiti" + - "ons.ValueEntry\032G\n\nValueEntry\022\013\n\003key\030\001 \001(" + - "\t\022(\n\005value\030\002 \001(\0132\031.flyteidl.core.LabelVa" + - "lue:\0028\001\"\216\001\n\nArtifactID\0220\n\014artifact_key\030\001" + - " \001(\0132\032.flyteidl.core.ArtifactKey\022\017\n\007vers" + - "ion\030\002 \001(\t\022/\n\npartitions\030\003 \001(\0132\031.flyteidl" + - ".core.PartitionsH\000B\014\n\ndimensions\"i\n\013Arti" + - "factTag\0220\n\014artifact_key\030\001 \001(\0132\032.flyteidl" + - ".core.ArtifactKey\022(\n\005value\030\002 \001(\0132\031.flyte" + - "idl.core.LabelValue\"\311\001\n\rArtifactQuery\0220\n" + - "\013artifact_id\030\001 \001(\0132\031.flyteidl.core.Artif" + - "actIDH\000\0222\n\014artifact_tag\030\002 \001(\0132\032.flyteidl" + - ".core.ArtifactTagH\000\022\r\n\003uri\030\003 \001(\tH\000\0225\n\007bi" + - "nding\030\004 \001(\0132\".flyteidl.core.ArtifactBind" + - "ingDataH\000B\014\n\nidentifier\"e\n\007Trigger\022-\n\ntr" + - "igger_id\030\001 \001(\0132\031.flyteidl.core.Identifie" + - "r\022+\n\010triggers\030\002 \003(\0132\031.flyteidl.core.Arti" + - "factIDBCACHE_SKIPPED = 6; */ CACHE_SKIPPED(6), + /** + *
+     * Used to indicate that the cache was evicted
+     * 
+ * + * CACHE_EVICTED = 7; + */ + CACHE_EVICTED(7), UNRECOGNIZED(-1), ; @@ -138,6 +146,14 @@ public enum CatalogCacheStatus * CACHE_SKIPPED = 6; */ public static final int CACHE_SKIPPED_VALUE = 6; + /** + *
+     * Used to indicate that the cache was evicted
+     * 
+ * + * CACHE_EVICTED = 7; + */ + public static final int CACHE_EVICTED_VALUE = 7; public final int getNumber() { @@ -165,6 +181,7 @@ public static CatalogCacheStatus forNumber(int value) { case 4: return CACHE_LOOKUP_FAILURE; case 5: return CACHE_PUT_FAILURE; case 6: return CACHE_SKIPPED; + case 7: return CACHE_EVICTED; default: return null; } } @@ -2864,13 +2881,14 @@ public flyteidl.core.Catalog.CatalogReservation getDefaultInstanceForType() { "ogReservation\"\207\001\n\006Status\022\030\n\024RESERVATION_" + "DISABLED\020\000\022\030\n\024RESERVATION_ACQUIRED\020\001\022\026\n\022" + "RESERVATION_EXISTS\020\002\022\030\n\024RESERVATION_RELE" + - "ASED\020\003\022\027\n\023RESERVATION_FAILURE\020\004*\240\001\n\022Cata" + + "ASED\020\003\022\027\n\023RESERVATION_FAILURE\020\004*\263\001\n\022Cata" + "logCacheStatus\022\022\n\016CACHE_DISABLED\020\000\022\016\n\nCA" + "CHE_MISS\020\001\022\r\n\tCACHE_HIT\020\002\022\023\n\017CACHE_POPUL" + "ATED\020\003\022\030\n\024CACHE_LOOKUP_FAILURE\020\004\022\025\n\021CACH" + - "E_PUT_FAILURE\020\005\022\021\n\rCACHE_SKIPPED\020\006B + * Optional, org key applied to the resource. + *
+ * + * string org = 6; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -272,6 +290,7 @@ private Identifier() {
       domain_ = "";
       name_ = "";
       version_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -328,6 +347,12 @@ private Identifier(
               version_ = s;
               break;
             }
+            case 50: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -555,6 +580,48 @@ public java.lang.String getVersion() {
       }
     }
 
+    public static final int ORG_FIELD_NUMBER = 6;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -584,6 +651,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getVersionBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, version_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 6, org_); + } unknownFields.writeTo(output); } @@ -609,6 +679,9 @@ public int getSerializedSize() { if (!getVersionBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, version_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -633,6 +706,8 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getName())) return false; if (!getVersion() .equals(other.getVersion())) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -654,6 +729,8 @@ public int hashCode() { hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -801,6 +878,8 @@ public Builder clear() { version_ = ""; + org_ = ""; + return this; } @@ -832,6 +911,7 @@ public flyteidl.core.IdentifierOuterClass.Identifier buildPartial() { result.domain_ = domain_; result.name_ = name_; result.version_ = version_; + result.org_ = org_; onBuilt(); return result; } @@ -899,6 +979,10 @@ public Builder mergeFrom(flyteidl.core.IdentifierOuterClass.Identifier other) { version_ = other.version_; onChanged(); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -1353,6 +1437,95 @@ public Builder setVersionBytes( onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 6; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -1465,6 +1638,24 @@ public interface WorkflowExecutionIdentifierOrBuilder extends */ com.google.protobuf.ByteString getNameBytes(); + + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 5; + */ + java.lang.String getOrg(); + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 5; + */ + com.google.protobuf.ByteString + getOrgBytes(); } /** *
@@ -1486,6 +1677,7 @@ private WorkflowExecutionIdentifier() {
       project_ = "";
       domain_ = "";
       name_ = "";
+      org_ = "";
     }
 
     @java.lang.Override
@@ -1530,6 +1722,12 @@ private WorkflowExecutionIdentifier(
               name_ = s;
               break;
             }
+            case 42: {
+              java.lang.String s = input.readStringRequireUtf8();
+
+              org_ = s;
+              break;
+            }
             default: {
               if (!parseUnknownField(
                   input, unknownFields, extensionRegistry, tag)) {
@@ -1690,6 +1888,48 @@ public java.lang.String getName() {
       }
     }
 
+    public static final int ORG_FIELD_NUMBER = 5;
+    private volatile java.lang.Object org_;
+    /**
+     * 
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } + } + /** + *
+     * Optional, org key applied to the resource.
+     * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { @@ -1713,6 +1953,9 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, name_); } + if (!getOrgBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, org_); + } unknownFields.writeTo(output); } @@ -1731,6 +1974,9 @@ public int getSerializedSize() { if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, name_); } + if (!getOrgBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, org_); + } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; @@ -1752,6 +1998,8 @@ public boolean equals(final java.lang.Object obj) { .equals(other.getDomain())) return false; if (!getName() .equals(other.getName())) return false; + if (!getOrg() + .equals(other.getOrg())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @@ -1769,6 +2017,8 @@ public int hashCode() { hash = (53 * hash) + getDomain().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + ORG_FIELD_NUMBER; + hash = (53 * hash) + getOrg().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; @@ -1912,6 +2162,8 @@ public Builder clear() { name_ = ""; + org_ = ""; + return this; } @@ -1941,6 +2193,7 @@ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier buildParti result.project_ = project_; result.domain_ = domain_; result.name_ = name_; + result.org_ = org_; onBuilt(); return result; } @@ -2001,6 +2254,10 @@ public Builder mergeFrom(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIde name_ = other.name_; onChanged(); } + if (!other.getOrg().isEmpty()) { + org_ = other.org_; + onChanged(); + } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; @@ -2301,6 +2558,95 @@ public Builder setNameBytes( onChanged(); return this; } + + private java.lang.Object org_ = ""; + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 5; + */ + public java.lang.String getOrg() { + java.lang.Object ref = org_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + org_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 5; + */ + public com.google.protobuf.ByteString + getOrgBytes() { + java.lang.Object ref = org_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + org_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 5; + */ + public Builder setOrg( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + org_ = value; + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 5; + */ + public Builder clearOrg() { + + org_ = getDefaultInstance().getOrg(); + onChanged(); + return this; + } + /** + *
+       * Optional, org key applied to the resource.
+       * 
+ * + * string org = 5; + */ + public Builder setOrgBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + org_ = value; + onChanged(); + return this; + } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { @@ -4857,25 +5203,26 @@ public flyteidl.core.IdentifierOuterClass.SignalIdentifier getDefaultInstanceFor static { java.lang.String[] descriptorData = { "\n\036flyteidl/core/identifier.proto\022\rflytei" + - "dl.core\"\200\001\n\nIdentifier\0222\n\rresource_type\030" + + "dl.core\"\215\001\n\nIdentifier\0222\n\rresource_type\030" + "\001 \001(\0162\033.flyteidl.core.ResourceType\022\017\n\007pr" + "oject\030\002 \001(\t\022\016\n\006domain\030\003 \001(\t\022\014\n\004name\030\004 \001(" + - "\t\022\017\n\007version\030\005 \001(\t\"L\n\033WorkflowExecutionI" + - "dentifier\022\017\n\007project\030\001 \001(\t\022\016\n\006domain\030\002 \001" + - "(\t\022\014\n\004name\030\004 \001(\t\"l\n\027NodeExecutionIdentif" + - "ier\022\017\n\007node_id\030\001 \001(\t\022@\n\014execution_id\030\002 \001" + - "(\0132*.flyteidl.core.WorkflowExecutionIden" + - "tifier\"\237\001\n\027TaskExecutionIdentifier\022*\n\007ta" + - "sk_id\030\001 \001(\0132\031.flyteidl.core.Identifier\022A" + - "\n\021node_execution_id\030\002 \001(\0132&.flyteidl.cor" + - "e.NodeExecutionIdentifier\022\025\n\rretry_attem" + - "pt\030\003 \001(\r\"g\n\020SignalIdentifier\022\021\n\tsignal_i" + - "d\030\001 \001(\t\022@\n\014execution_id\030\002 \001(\0132*.flyteidl" + - ".core.WorkflowExecutionIdentifier*U\n\014Res" + - "ourceType\022\017\n\013UNSPECIFIED\020\000\022\010\n\004TASK\020\001\022\014\n\010" + - "WORKFLOW\020\002\022\017\n\013LAUNCH_PLAN\020\003\022\013\n\007DATASET\020\004" + - "B + * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG. + *
+ * + * string metric = 1; + */ + java.lang.String getMetric(); + /** + *
+     * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+     * 
+ * + * string metric = 1; + */ + com.google.protobuf.ByteString + getMetricBytes(); + + /** + *
+     * The result data in prometheus range query result format
+     * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+     * This may include multiple time series, differentiated by their metric labels.
+     * Start time is greater of (execution attempt start, 48h ago)
+     * End time is lesser of (execution attempt end, now)
+     * 
+ * + * .google.protobuf.Struct data = 2; + */ + boolean hasData(); + /** + *
+     * The result data in prometheus range query result format
+     * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+     * This may include multiple time series, differentiated by their metric labels.
+     * Start time is greater of (execution attempt start, 48h ago)
+     * End time is lesser of (execution attempt end, now)
+     * 
+ * + * .google.protobuf.Struct data = 2; + */ + com.google.protobuf.Struct getData(); + /** + *
+     * The result data in prometheus range query result format
+     * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+     * This may include multiple time series, differentiated by their metric labels.
+     * Start time is greater of (execution attempt start, 48h ago)
+     * End time is lesser of (execution attempt end, now)
+     * 
+ * + * .google.protobuf.Struct data = 2; + */ + com.google.protobuf.StructOrBuilder getDataOrBuilder(); + } + /** + *
+   * ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task.
+   * 
+ * + * Protobuf type {@code flyteidl.core.ExecutionMetricResult} + */ + public static final class ExecutionMetricResult extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:flyteidl.core.ExecutionMetricResult) + ExecutionMetricResultOrBuilder { + private static final long serialVersionUID = 0L; + // Use ExecutionMetricResult.newBuilder() to construct. + private ExecutionMetricResult(com.google.protobuf.GeneratedMessageV3.Builder builder) { + super(builder); + } + private ExecutionMetricResult() { + metric_ = ""; + } + + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExecutionMetricResult( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + this(); + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 10: { + java.lang.String s = input.readStringRequireUtf8(); + + metric_ = s; + break; + } + case 18: { + com.google.protobuf.Struct.Builder subBuilder = null; + if (data_ != null) { + subBuilder = data_.toBuilder(); + } + data_ = input.readMessage(com.google.protobuf.Struct.parser(), extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(data_); + data_ = subBuilder.buildPartial(); + } + + break; + } + default: { + if (!parseUnknownField( + input, unknownFields, extensionRegistry, tag)) { + done = true; + } + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.core.Metrics.internal_static_flyteidl_core_ExecutionMetricResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.core.Metrics.internal_static_flyteidl_core_ExecutionMetricResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.core.Metrics.ExecutionMetricResult.class, flyteidl.core.Metrics.ExecutionMetricResult.Builder.class); + } + + public static final int METRIC_FIELD_NUMBER = 1; + private volatile java.lang.Object metric_; + /** + *
+     * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+     * 
+ * + * string metric = 1; + */ + public java.lang.String getMetric() { + java.lang.Object ref = metric_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + metric_ = s; + return s; + } + } + /** + *
+     * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+     * 
+ * + * string metric = 1; + */ + public com.google.protobuf.ByteString + getMetricBytes() { + java.lang.Object ref = metric_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + metric_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int DATA_FIELD_NUMBER = 2; + private com.google.protobuf.Struct data_; + /** + *
+     * The result data in prometheus range query result format
+     * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+     * This may include multiple time series, differentiated by their metric labels.
+     * Start time is greater of (execution attempt start, 48h ago)
+     * End time is lesser of (execution attempt end, now)
+     * 
+ * + * .google.protobuf.Struct data = 2; + */ + public boolean hasData() { + return data_ != null; + } + /** + *
+     * The result data in prometheus range query result format
+     * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+     * This may include multiple time series, differentiated by their metric labels.
+     * Start time is greater of (execution attempt start, 48h ago)
+     * End time is lesser of (execution attempt end, now)
+     * 
+ * + * .google.protobuf.Struct data = 2; + */ + public com.google.protobuf.Struct getData() { + return data_ == null ? com.google.protobuf.Struct.getDefaultInstance() : data_; + } + /** + *
+     * The result data in prometheus range query result format
+     * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+     * This may include multiple time series, differentiated by their metric labels.
+     * Start time is greater of (execution attempt start, 48h ago)
+     * End time is lesser of (execution attempt end, now)
+     * 
+ * + * .google.protobuf.Struct data = 2; + */ + public com.google.protobuf.StructOrBuilder getDataOrBuilder() { + return getData(); + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (!getMetricBytes().isEmpty()) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 1, metric_); + } + if (data_ != null) { + output.writeMessage(2, getData()); + } + unknownFields.writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (!getMetricBytes().isEmpty()) { + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, metric_); + } + if (data_ != null) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getData()); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof flyteidl.core.Metrics.ExecutionMetricResult)) { + return super.equals(obj); + } + flyteidl.core.Metrics.ExecutionMetricResult other = (flyteidl.core.Metrics.ExecutionMetricResult) obj; + + if (!getMetric() + .equals(other.getMetric())) return false; + if (hasData() != other.hasData()) return false; + if (hasData()) { + if (!getData() + .equals(other.getData())) return false; + } + if (!unknownFields.equals(other.unknownFields)) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + METRIC_FIELD_NUMBER; + hash = (53 * hash) + getMetric().hashCode(); + if (hasData()) { + hash = (37 * hash) + DATA_FIELD_NUMBER; + hash = (53 * hash) + getData().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static flyteidl.core.Metrics.ExecutionMetricResult parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(flyteidl.core.Metrics.ExecutionMetricResult prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + *
+     * ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task.
+     * 
+ * + * Protobuf type {@code flyteidl.core.ExecutionMetricResult} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessageV3.Builder implements + // @@protoc_insertion_point(builder_implements:flyteidl.core.ExecutionMetricResult) + flyteidl.core.Metrics.ExecutionMetricResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return flyteidl.core.Metrics.internal_static_flyteidl_core_ExecutionMetricResult_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return flyteidl.core.Metrics.internal_static_flyteidl_core_ExecutionMetricResult_fieldAccessorTable + .ensureFieldAccessorsInitialized( + flyteidl.core.Metrics.ExecutionMetricResult.class, flyteidl.core.Metrics.ExecutionMetricResult.Builder.class); + } + + // Construct using flyteidl.core.Metrics.ExecutionMetricResult.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + } + } + @java.lang.Override + public Builder clear() { + super.clear(); + metric_ = ""; + + if (dataBuilder_ == null) { + data_ = null; + } else { + data_ = null; + dataBuilder_ = null; + } + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return flyteidl.core.Metrics.internal_static_flyteidl_core_ExecutionMetricResult_descriptor; + } + + @java.lang.Override + public flyteidl.core.Metrics.ExecutionMetricResult getDefaultInstanceForType() { + return flyteidl.core.Metrics.ExecutionMetricResult.getDefaultInstance(); + } + + @java.lang.Override + public flyteidl.core.Metrics.ExecutionMetricResult build() { + flyteidl.core.Metrics.ExecutionMetricResult result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public flyteidl.core.Metrics.ExecutionMetricResult buildPartial() { + flyteidl.core.Metrics.ExecutionMetricResult result = new flyteidl.core.Metrics.ExecutionMetricResult(this); + result.metric_ = metric_; + if (dataBuilder_ == null) { + result.data_ = data_; + } else { + result.data_ = dataBuilder_.build(); + } + onBuilt(); + return result; + } + + @java.lang.Override + public Builder clone() { + return super.clone(); + } + @java.lang.Override + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.setField(field, value); + } + @java.lang.Override + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return super.clearField(field); + } + @java.lang.Override + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return super.clearOneof(oneof); + } + @java.lang.Override + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, java.lang.Object value) { + return super.setRepeatedField(field, index, value); + } + @java.lang.Override + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + java.lang.Object value) { + return super.addRepeatedField(field, value); + } + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof flyteidl.core.Metrics.ExecutionMetricResult) { + return mergeFrom((flyteidl.core.Metrics.ExecutionMetricResult)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(flyteidl.core.Metrics.ExecutionMetricResult other) { + if (other == flyteidl.core.Metrics.ExecutionMetricResult.getDefaultInstance()) return this; + if (!other.getMetric().isEmpty()) { + metric_ = other.metric_; + onChanged(); + } + if (other.hasData()) { + mergeData(other.getData()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + flyteidl.core.Metrics.ExecutionMetricResult parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (flyteidl.core.Metrics.ExecutionMetricResult) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + + private java.lang.Object metric_ = ""; + /** + *
+       * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+       * 
+ * + * string metric = 1; + */ + public java.lang.String getMetric() { + java.lang.Object ref = metric_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + metric_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
+       * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+       * 
+ * + * string metric = 1; + */ + public com.google.protobuf.ByteString + getMetricBytes() { + java.lang.Object ref = metric_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + metric_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
+       * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+       * 
+ * + * string metric = 1; + */ + public Builder setMetric( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + + metric_ = value; + onChanged(); + return this; + } + /** + *
+       * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+       * 
+ * + * string metric = 1; + */ + public Builder clearMetric() { + + metric_ = getDefaultInstance().getMetric(); + onChanged(); + return this; + } + /** + *
+       * The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG.
+       * 
+ * + * string metric = 1; + */ + public Builder setMetricBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + checkByteStringIsUtf8(value); + + metric_ = value; + onChanged(); + return this; + } + + private com.google.protobuf.Struct data_; + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> dataBuilder_; + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public boolean hasData() { + return dataBuilder_ != null || data_ != null; + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public com.google.protobuf.Struct getData() { + if (dataBuilder_ == null) { + return data_ == null ? com.google.protobuf.Struct.getDefaultInstance() : data_; + } else { + return dataBuilder_.getMessage(); + } + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public Builder setData(com.google.protobuf.Struct value) { + if (dataBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + data_ = value; + onChanged(); + } else { + dataBuilder_.setMessage(value); + } + + return this; + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public Builder setData( + com.google.protobuf.Struct.Builder builderForValue) { + if (dataBuilder_ == null) { + data_ = builderForValue.build(); + onChanged(); + } else { + dataBuilder_.setMessage(builderForValue.build()); + } + + return this; + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public Builder mergeData(com.google.protobuf.Struct value) { + if (dataBuilder_ == null) { + if (data_ != null) { + data_ = + com.google.protobuf.Struct.newBuilder(data_).mergeFrom(value).buildPartial(); + } else { + data_ = value; + } + onChanged(); + } else { + dataBuilder_.mergeFrom(value); + } + + return this; + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public Builder clearData() { + if (dataBuilder_ == null) { + data_ = null; + onChanged(); + } else { + data_ = null; + dataBuilder_ = null; + } + + return this; + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public com.google.protobuf.Struct.Builder getDataBuilder() { + + onChanged(); + return getDataFieldBuilder().getBuilder(); + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + public com.google.protobuf.StructOrBuilder getDataOrBuilder() { + if (dataBuilder_ != null) { + return dataBuilder_.getMessageOrBuilder(); + } else { + return data_ == null ? + com.google.protobuf.Struct.getDefaultInstance() : data_; + } + } + /** + *
+       * The result data in prometheus range query result format
+       * https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats.
+       * This may include multiple time series, differentiated by their metric labels.
+       * Start time is greater of (execution attempt start, 48h ago)
+       * End time is lesser of (execution attempt end, now)
+       * 
+ * + * .google.protobuf.Struct data = 2; + */ + private com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> + getDataFieldBuilder() { + if (dataBuilder_ == null) { + dataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< + com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder>( + getData(), + getParentForChildren(), + isClean()); + data_ = null; + } + return dataBuilder_; + } + @java.lang.Override + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + @java.lang.Override + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:flyteidl.core.ExecutionMetricResult) + } + + // @@protoc_insertion_point(class_scope:flyteidl.core.ExecutionMetricResult) + private static final flyteidl.core.Metrics.ExecutionMetricResult DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new flyteidl.core.Metrics.ExecutionMetricResult(); + } + + public static flyteidl.core.Metrics.ExecutionMetricResult getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ExecutionMetricResult parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExecutionMetricResult(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public flyteidl.core.Metrics.ExecutionMetricResult getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + private static final com.google.protobuf.Descriptors.Descriptor internal_static_flyteidl_core_Span_descriptor; private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_flyteidl_core_Span_fieldAccessorTable; + private static final com.google.protobuf.Descriptors.Descriptor + internal_static_flyteidl_core_ExecutionMetricResult_descriptor; + private static final + com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internal_static_flyteidl_core_ExecutionMetricResult_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -2545,18 +3457,21 @@ public flyteidl.core.Metrics.Span getDefaultInstanceForType() { java.lang.String[] descriptorData = { "\n\033flyteidl/core/metrics.proto\022\rflyteidl." + "core\032\036flyteidl/core/identifier.proto\032\037go" + - "ogle/protobuf/timestamp.proto\"\337\002\n\004Span\022." + - "\n\nstart_time\030\001 \001(\0132\032.google.protobuf.Tim" + - "estamp\022,\n\010end_time\030\002 \001(\0132\032.google.protob" + - "uf.Timestamp\022A\n\013workflow_id\030\003 \001(\0132*.flyt" + - "eidl.core.WorkflowExecutionIdentifierH\000\022" + - "9\n\007node_id\030\004 \001(\0132&.flyteidl.core.NodeExe" + - "cutionIdentifierH\000\0229\n\007task_id\030\005 \001(\0132&.fl" + - "yteidl.core.TaskExecutionIdentifierH\000\022\026\n" + - "\014operation_id\030\006 \001(\tH\000\022\"\n\005spans\030\007 \003(\0132\023.f" + - "lyteidl.core.SpanB\004\n\002idB.flyteidl.core.LiteralMap output_data = 2; - */ - boolean hasOutputData(); - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - flyteidl.core.Literals.LiteralMap getOutputData(); - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - flyteidl.core.Literals.LiteralMapOrBuilder getOutputDataOrBuilder(); - - /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ boolean hasOutputInterface(); /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ flyteidl.core.Interface.TypedInterface getOutputInterface(); /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuilder(); - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - boolean hasInputData(); - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - flyteidl.core.Literals.LiteralMap getInputData(); - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder(); - /** *
      * The following are ExecutionMetadata fields
      * We can't have the ExecutionMetadata object directly because of import cycle
      * 
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ java.util.List getArtifactIdsList(); @@ -86,7 +60,7 @@ public interface CloudEventWorkflowExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index); /** @@ -95,7 +69,7 @@ public interface CloudEventWorkflowExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ int getArtifactIdsCount(); /** @@ -104,7 +78,7 @@ public interface CloudEventWorkflowExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ java.util.List getArtifactIdsOrBuilderList(); @@ -114,30 +88,30 @@ public interface CloudEventWorkflowExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( int index); /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ boolean hasReferenceExecution(); /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getReferenceExecution(); /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getReferenceExecutionOrBuilder(); /** - * string principal = 7; + * string principal = 5; */ java.lang.String getPrincipal(); /** - * string principal = 7; + * string principal = 5; */ com.google.protobuf.ByteString getPrincipalBytes(); @@ -149,7 +123,7 @@ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. *
* - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ boolean hasLaunchPlanId(); /** @@ -159,7 +133,7 @@ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. *
* - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId(); /** @@ -169,7 +143,7 @@ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. *
* - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrBuilder(); } @@ -233,19 +207,6 @@ private CloudEventWorkflowExecution( break; } case 18: { - flyteidl.core.Literals.LiteralMap.Builder subBuilder = null; - if (outputData_ != null) { - subBuilder = outputData_.toBuilder(); - } - outputData_ = input.readMessage(flyteidl.core.Literals.LiteralMap.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(outputData_); - outputData_ = subBuilder.buildPartial(); - } - - break; - } - case 26: { flyteidl.core.Interface.TypedInterface.Builder subBuilder = null; if (outputInterface_ != null) { subBuilder = outputInterface_.toBuilder(); @@ -258,29 +219,16 @@ private CloudEventWorkflowExecution( break; } - case 34: { - flyteidl.core.Literals.LiteralMap.Builder subBuilder = null; - if (inputData_ != null) { - subBuilder = inputData_.toBuilder(); - } - inputData_ = input.readMessage(flyteidl.core.Literals.LiteralMap.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(inputData_); - inputData_ = subBuilder.buildPartial(); - } - - break; - } - case 42: { - if (!((mutable_bitField0_ & 0x00000010) != 0)) { + case 26: { + if (!((mutable_bitField0_ & 0x00000004) != 0)) { artifactIds_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000010; + mutable_bitField0_ |= 0x00000004; } artifactIds_.add( input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry)); break; } - case 50: { + case 34: { flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder subBuilder = null; if (referenceExecution_ != null) { subBuilder = referenceExecution_.toBuilder(); @@ -293,13 +241,13 @@ private CloudEventWorkflowExecution( break; } - case 58: { + case 42: { java.lang.String s = input.readStringRequireUtf8(); principal_ = s; break; } - case 66: { + case 50: { flyteidl.core.IdentifierOuterClass.Identifier.Builder subBuilder = null; if (launchPlanId_ != null) { subBuilder = launchPlanId_.toBuilder(); @@ -327,7 +275,7 @@ private CloudEventWorkflowExecution( throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000010) != 0)) { + if (((mutable_bitField0_ & 0x00000004) != 0)) { artifactIds_ = java.util.Collections.unmodifiableList(artifactIds_); } this.unknownFields = unknownFields.build(); @@ -369,70 +317,28 @@ public flyteidl.event.Event.WorkflowExecutionEventOrBuilder getRawEventOrBuilder return getRawEvent(); } - public static final int OUTPUT_DATA_FIELD_NUMBER = 2; - private flyteidl.core.Literals.LiteralMap outputData_; - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public boolean hasOutputData() { - return outputData_ != null; - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public flyteidl.core.Literals.LiteralMap getOutputData() { - return outputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : outputData_; - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getOutputDataOrBuilder() { - return getOutputData(); - } - - public static final int OUTPUT_INTERFACE_FIELD_NUMBER = 3; + public static final int OUTPUT_INTERFACE_FIELD_NUMBER = 2; private flyteidl.core.Interface.TypedInterface outputInterface_; /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public boolean hasOutputInterface() { return outputInterface_ != null; } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public flyteidl.core.Interface.TypedInterface getOutputInterface() { return outputInterface_ == null ? flyteidl.core.Interface.TypedInterface.getDefaultInstance() : outputInterface_; } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuilder() { return getOutputInterface(); } - public static final int INPUT_DATA_FIELD_NUMBER = 4; - private flyteidl.core.Literals.LiteralMap inputData_; - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public boolean hasInputData() { - return inputData_ != null; - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public flyteidl.core.Literals.LiteralMap getInputData() { - return inputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : inputData_; - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder() { - return getInputData(); - } - - public static final int ARTIFACT_IDS_FIELD_NUMBER = 5; + public static final int ARTIFACT_IDS_FIELD_NUMBER = 3; private java.util.List artifactIds_; /** *
@@ -440,7 +346,7 @@ public flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder() {
      * We can't have the ExecutionMetadata object directly because of import cycle
      * 
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public java.util.List getArtifactIdsList() { return artifactIds_; @@ -451,7 +357,7 @@ public java.util.List getArtifactIdsList() * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public java.util.List getArtifactIdsOrBuilderList() { @@ -463,7 +369,7 @@ public java.util.List getArtifactIdsList() * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public int getArtifactIdsCount() { return artifactIds_.size(); @@ -474,7 +380,7 @@ public int getArtifactIdsCount() { * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { return artifactIds_.get(index); @@ -485,38 +391,38 @@ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( int index) { return artifactIds_.get(index); } - public static final int REFERENCE_EXECUTION_FIELD_NUMBER = 6; + public static final int REFERENCE_EXECUTION_FIELD_NUMBER = 4; private flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier referenceExecution_; /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public boolean hasReferenceExecution() { return referenceExecution_ != null; } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getReferenceExecution() { return referenceExecution_ == null ? flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.getDefaultInstance() : referenceExecution_; } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getReferenceExecutionOrBuilder() { return getReferenceExecution(); } - public static final int PRINCIPAL_FIELD_NUMBER = 7; + public static final int PRINCIPAL_FIELD_NUMBER = 5; private volatile java.lang.Object principal_; /** - * string principal = 7; + * string principal = 5; */ public java.lang.String getPrincipal() { java.lang.Object ref = principal_; @@ -531,7 +437,7 @@ public java.lang.String getPrincipal() { } } /** - * string principal = 7; + * string principal = 5; */ public com.google.protobuf.ByteString getPrincipalBytes() { @@ -547,7 +453,7 @@ public java.lang.String getPrincipal() { } } - public static final int LAUNCH_PLAN_ID_FIELD_NUMBER = 8; + public static final int LAUNCH_PLAN_ID_FIELD_NUMBER = 6; private flyteidl.core.IdentifierOuterClass.Identifier launchPlanId_; /** *
@@ -556,7 +462,7 @@ public java.lang.String getPrincipal() {
      * Launch plan IDs are easier to get than workflow IDs so we'll use these for now.
      * 
* - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public boolean hasLaunchPlanId() { return launchPlanId_ != null; @@ -568,7 +474,7 @@ public boolean hasLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. *
* - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { return launchPlanId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : launchPlanId_; @@ -580,7 +486,7 @@ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. *
* - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrBuilder() { return getLaunchPlanId(); @@ -603,26 +509,20 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (rawEvent_ != null) { output.writeMessage(1, getRawEvent()); } - if (outputData_ != null) { - output.writeMessage(2, getOutputData()); - } if (outputInterface_ != null) { - output.writeMessage(3, getOutputInterface()); - } - if (inputData_ != null) { - output.writeMessage(4, getInputData()); + output.writeMessage(2, getOutputInterface()); } for (int i = 0; i < artifactIds_.size(); i++) { - output.writeMessage(5, artifactIds_.get(i)); + output.writeMessage(3, artifactIds_.get(i)); } if (referenceExecution_ != null) { - output.writeMessage(6, getReferenceExecution()); + output.writeMessage(4, getReferenceExecution()); } if (!getPrincipalBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 7, principal_); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, principal_); } if (launchPlanId_ != null) { - output.writeMessage(8, getLaunchPlanId()); + output.writeMessage(6, getLaunchPlanId()); } unknownFields.writeTo(output); } @@ -637,32 +537,24 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getRawEvent()); } - if (outputData_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, getOutputData()); - } if (outputInterface_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, getOutputInterface()); - } - if (inputData_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, getInputData()); + .computeMessageSize(2, getOutputInterface()); } for (int i = 0; i < artifactIds_.size(); i++) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, artifactIds_.get(i)); + .computeMessageSize(3, artifactIds_.get(i)); } if (referenceExecution_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, getReferenceExecution()); + .computeMessageSize(4, getReferenceExecution()); } if (!getPrincipalBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, principal_); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, principal_); } if (launchPlanId_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(8, getLaunchPlanId()); + .computeMessageSize(6, getLaunchPlanId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -684,21 +576,11 @@ public boolean equals(final java.lang.Object obj) { if (!getRawEvent() .equals(other.getRawEvent())) return false; } - if (hasOutputData() != other.hasOutputData()) return false; - if (hasOutputData()) { - if (!getOutputData() - .equals(other.getOutputData())) return false; - } if (hasOutputInterface() != other.hasOutputInterface()) return false; if (hasOutputInterface()) { if (!getOutputInterface() .equals(other.getOutputInterface())) return false; } - if (hasInputData() != other.hasInputData()) return false; - if (hasInputData()) { - if (!getInputData() - .equals(other.getInputData())) return false; - } if (!getArtifactIdsList() .equals(other.getArtifactIdsList())) return false; if (hasReferenceExecution() != other.hasReferenceExecution()) return false; @@ -728,18 +610,10 @@ public int hashCode() { hash = (37 * hash) + RAW_EVENT_FIELD_NUMBER; hash = (53 * hash) + getRawEvent().hashCode(); } - if (hasOutputData()) { - hash = (37 * hash) + OUTPUT_DATA_FIELD_NUMBER; - hash = (53 * hash) + getOutputData().hashCode(); - } if (hasOutputInterface()) { hash = (37 * hash) + OUTPUT_INTERFACE_FIELD_NUMBER; hash = (53 * hash) + getOutputInterface().hashCode(); } - if (hasInputData()) { - hash = (37 * hash) + INPUT_DATA_FIELD_NUMBER; - hash = (53 * hash) + getInputData().hashCode(); - } if (getArtifactIdsCount() > 0) { hash = (37 * hash) + ARTIFACT_IDS_FIELD_NUMBER; hash = (53 * hash) + getArtifactIdsList().hashCode(); @@ -899,27 +773,15 @@ public Builder clear() { rawEvent_ = null; rawEventBuilder_ = null; } - if (outputDataBuilder_ == null) { - outputData_ = null; - } else { - outputData_ = null; - outputDataBuilder_ = null; - } if (outputInterfaceBuilder_ == null) { outputInterface_ = null; } else { outputInterface_ = null; outputInterfaceBuilder_ = null; } - if (inputDataBuilder_ == null) { - inputData_ = null; - } else { - inputData_ = null; - inputDataBuilder_ = null; - } if (artifactIdsBuilder_ == null) { artifactIds_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); } else { artifactIdsBuilder_.clear(); } @@ -970,25 +832,15 @@ public flyteidl.event.Cloudevents.CloudEventWorkflowExecution buildPartial() { } else { result.rawEvent_ = rawEventBuilder_.build(); } - if (outputDataBuilder_ == null) { - result.outputData_ = outputData_; - } else { - result.outputData_ = outputDataBuilder_.build(); - } if (outputInterfaceBuilder_ == null) { result.outputInterface_ = outputInterface_; } else { result.outputInterface_ = outputInterfaceBuilder_.build(); } - if (inputDataBuilder_ == null) { - result.inputData_ = inputData_; - } else { - result.inputData_ = inputDataBuilder_.build(); - } if (artifactIdsBuilder_ == null) { - if (((bitField0_ & 0x00000010) != 0)) { + if (((bitField0_ & 0x00000004) != 0)) { artifactIds_ = java.util.Collections.unmodifiableList(artifactIds_); - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); } result.artifactIds_ = artifactIds_; } else { @@ -1057,20 +909,14 @@ public Builder mergeFrom(flyteidl.event.Cloudevents.CloudEventWorkflowExecution if (other.hasRawEvent()) { mergeRawEvent(other.getRawEvent()); } - if (other.hasOutputData()) { - mergeOutputData(other.getOutputData()); - } if (other.hasOutputInterface()) { mergeOutputInterface(other.getOutputInterface()); } - if (other.hasInputData()) { - mergeInputData(other.getInputData()); - } if (artifactIdsBuilder_ == null) { if (!other.artifactIds_.isEmpty()) { if (artifactIds_.isEmpty()) { artifactIds_ = other.artifactIds_; - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); } else { ensureArtifactIdsIsMutable(); artifactIds_.addAll(other.artifactIds_); @@ -1083,7 +929,7 @@ public Builder mergeFrom(flyteidl.event.Cloudevents.CloudEventWorkflowExecution artifactIdsBuilder_.dispose(); artifactIdsBuilder_ = null; artifactIds_ = other.artifactIds_; - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); artifactIdsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getArtifactIdsFieldBuilder() : null; @@ -1249,134 +1095,17 @@ public flyteidl.event.Event.WorkflowExecutionEventOrBuilder getRawEventOrBuilder return rawEventBuilder_; } - private flyteidl.core.Literals.LiteralMap outputData_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> outputDataBuilder_; - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public boolean hasOutputData() { - return outputDataBuilder_ != null || outputData_ != null; - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public flyteidl.core.Literals.LiteralMap getOutputData() { - if (outputDataBuilder_ == null) { - return outputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : outputData_; - } else { - return outputDataBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public Builder setOutputData(flyteidl.core.Literals.LiteralMap value) { - if (outputDataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - outputData_ = value; - onChanged(); - } else { - outputDataBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public Builder setOutputData( - flyteidl.core.Literals.LiteralMap.Builder builderForValue) { - if (outputDataBuilder_ == null) { - outputData_ = builderForValue.build(); - onChanged(); - } else { - outputDataBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public Builder mergeOutputData(flyteidl.core.Literals.LiteralMap value) { - if (outputDataBuilder_ == null) { - if (outputData_ != null) { - outputData_ = - flyteidl.core.Literals.LiteralMap.newBuilder(outputData_).mergeFrom(value).buildPartial(); - } else { - outputData_ = value; - } - onChanged(); - } else { - outputDataBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public Builder clearOutputData() { - if (outputDataBuilder_ == null) { - outputData_ = null; - onChanged(); - } else { - outputData_ = null; - outputDataBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public flyteidl.core.Literals.LiteralMap.Builder getOutputDataBuilder() { - - onChanged(); - return getOutputDataFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getOutputDataOrBuilder() { - if (outputDataBuilder_ != null) { - return outputDataBuilder_.getMessageOrBuilder(); - } else { - return outputData_ == null ? - flyteidl.core.Literals.LiteralMap.getDefaultInstance() : outputData_; - } - } - /** - * .flyteidl.core.LiteralMap output_data = 2; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> - getOutputDataFieldBuilder() { - if (outputDataBuilder_ == null) { - outputDataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder>( - getOutputData(), - getParentForChildren(), - isClean()); - outputData_ = null; - } - return outputDataBuilder_; - } - private flyteidl.core.Interface.TypedInterface outputInterface_; private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.Interface.TypedInterface, flyteidl.core.Interface.TypedInterface.Builder, flyteidl.core.Interface.TypedInterfaceOrBuilder> outputInterfaceBuilder_; /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public boolean hasOutputInterface() { return outputInterfaceBuilder_ != null || outputInterface_ != null; } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public flyteidl.core.Interface.TypedInterface getOutputInterface() { if (outputInterfaceBuilder_ == null) { @@ -1386,7 +1115,7 @@ public flyteidl.core.Interface.TypedInterface getOutputInterface() { } } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public Builder setOutputInterface(flyteidl.core.Interface.TypedInterface value) { if (outputInterfaceBuilder_ == null) { @@ -1402,7 +1131,7 @@ public Builder setOutputInterface(flyteidl.core.Interface.TypedInterface value) return this; } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public Builder setOutputInterface( flyteidl.core.Interface.TypedInterface.Builder builderForValue) { @@ -1416,7 +1145,7 @@ public Builder setOutputInterface( return this; } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public Builder mergeOutputInterface(flyteidl.core.Interface.TypedInterface value) { if (outputInterfaceBuilder_ == null) { @@ -1434,7 +1163,7 @@ public Builder mergeOutputInterface(flyteidl.core.Interface.TypedInterface value return this; } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public Builder clearOutputInterface() { if (outputInterfaceBuilder_ == null) { @@ -1448,7 +1177,7 @@ public Builder clearOutputInterface() { return this; } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public flyteidl.core.Interface.TypedInterface.Builder getOutputInterfaceBuilder() { @@ -1456,7 +1185,7 @@ public flyteidl.core.Interface.TypedInterface.Builder getOutputInterfaceBuilder( return getOutputInterfaceFieldBuilder().getBuilder(); } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuilder() { if (outputInterfaceBuilder_ != null) { @@ -1467,7 +1196,7 @@ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuild } } /** - * .flyteidl.core.TypedInterface output_interface = 3; + * .flyteidl.core.TypedInterface output_interface = 2; */ private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.Interface.TypedInterface, flyteidl.core.Interface.TypedInterface.Builder, flyteidl.core.Interface.TypedInterfaceOrBuilder> @@ -1483,129 +1212,12 @@ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuild return outputInterfaceBuilder_; } - private flyteidl.core.Literals.LiteralMap inputData_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> inputDataBuilder_; - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public boolean hasInputData() { - return inputDataBuilder_ != null || inputData_ != null; - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public flyteidl.core.Literals.LiteralMap getInputData() { - if (inputDataBuilder_ == null) { - return inputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : inputData_; - } else { - return inputDataBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public Builder setInputData(flyteidl.core.Literals.LiteralMap value) { - if (inputDataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - inputData_ = value; - onChanged(); - } else { - inputDataBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public Builder setInputData( - flyteidl.core.Literals.LiteralMap.Builder builderForValue) { - if (inputDataBuilder_ == null) { - inputData_ = builderForValue.build(); - onChanged(); - } else { - inputDataBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public Builder mergeInputData(flyteidl.core.Literals.LiteralMap value) { - if (inputDataBuilder_ == null) { - if (inputData_ != null) { - inputData_ = - flyteidl.core.Literals.LiteralMap.newBuilder(inputData_).mergeFrom(value).buildPartial(); - } else { - inputData_ = value; - } - onChanged(); - } else { - inputDataBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public Builder clearInputData() { - if (inputDataBuilder_ == null) { - inputData_ = null; - onChanged(); - } else { - inputData_ = null; - inputDataBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public flyteidl.core.Literals.LiteralMap.Builder getInputDataBuilder() { - - onChanged(); - return getInputDataFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder() { - if (inputDataBuilder_ != null) { - return inputDataBuilder_.getMessageOrBuilder(); - } else { - return inputData_ == null ? - flyteidl.core.Literals.LiteralMap.getDefaultInstance() : inputData_; - } - } - /** - * .flyteidl.core.LiteralMap input_data = 4; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> - getInputDataFieldBuilder() { - if (inputDataBuilder_ == null) { - inputDataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder>( - getInputData(), - getParentForChildren(), - isClean()); - inputData_ = null; - } - return inputDataBuilder_; - } - private java.util.List artifactIds_ = java.util.Collections.emptyList(); private void ensureArtifactIdsIsMutable() { - if (!((bitField0_ & 0x00000010) != 0)) { + if (!((bitField0_ & 0x00000004) != 0)) { artifactIds_ = new java.util.ArrayList(artifactIds_); - bitField0_ |= 0x00000010; + bitField0_ |= 0x00000004; } } @@ -1618,7 +1230,7 @@ private void ensureArtifactIdsIsMutable() { * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public java.util.List getArtifactIdsList() { if (artifactIdsBuilder_ == null) { @@ -1633,7 +1245,7 @@ public java.util.List getArtifactIdsList() * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public int getArtifactIdsCount() { if (artifactIdsBuilder_ == null) { @@ -1648,7 +1260,7 @@ public int getArtifactIdsCount() { * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { if (artifactIdsBuilder_ == null) { @@ -1663,7 +1275,7 @@ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder setArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID value) { @@ -1685,7 +1297,7 @@ public Builder setArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle *
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder setArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { @@ -1704,7 +1316,7 @@ public Builder setArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder addArtifactIds(flyteidl.core.ArtifactId.ArtifactID value) { if (artifactIdsBuilder_ == null) { @@ -1725,7 +1337,7 @@ public Builder addArtifactIds(flyteidl.core.ArtifactId.ArtifactID value) { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder addArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID value) { @@ -1747,7 +1359,7 @@ public Builder addArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder addArtifactIds( flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { @@ -1766,7 +1378,7 @@ public Builder addArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder addArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { @@ -1785,7 +1397,7 @@ public Builder addArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder addAllArtifactIds( java.lang.Iterable values) { @@ -1805,12 +1417,12 @@ public Builder addAllArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder clearArtifactIds() { if (artifactIdsBuilder_ == null) { artifactIds_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000010); + bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { artifactIdsBuilder_.clear(); @@ -1823,7 +1435,7 @@ public Builder clearArtifactIds() { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public Builder removeArtifactIds(int index) { if (artifactIdsBuilder_ == null) { @@ -1841,7 +1453,7 @@ public Builder removeArtifactIds(int index) { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdsBuilder( int index) { @@ -1853,7 +1465,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdsBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( int index) { @@ -1868,7 +1480,7 @@ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public java.util.List getArtifactIdsOrBuilderList() { @@ -1884,7 +1496,7 @@ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder() { return getArtifactIdsFieldBuilder().addBuilder( @@ -1896,7 +1508,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder() { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( int index) { @@ -1909,7 +1521,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 5; + * repeated .flyteidl.core.ArtifactID artifact_ids = 3; */ public java.util.List getArtifactIdsBuilderList() { @@ -1922,7 +1534,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( artifactIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( artifactIds_, - ((bitField0_ & 0x00000010) != 0), + ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean()); artifactIds_ = null; @@ -1934,13 +1546,13 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> referenceExecutionBuilder_; /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public boolean hasReferenceExecution() { return referenceExecutionBuilder_ != null || referenceExecution_ != null; } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getReferenceExecution() { if (referenceExecutionBuilder_ == null) { @@ -1950,7 +1562,7 @@ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier getReferen } } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public Builder setReferenceExecution(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { if (referenceExecutionBuilder_ == null) { @@ -1966,7 +1578,7 @@ public Builder setReferenceExecution(flyteidl.core.IdentifierOuterClass.Workflow return this; } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public Builder setReferenceExecution( flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder builderForValue) { @@ -1980,7 +1592,7 @@ public Builder setReferenceExecution( return this; } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public Builder mergeReferenceExecution(flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier value) { if (referenceExecutionBuilder_ == null) { @@ -1998,7 +1610,7 @@ public Builder mergeReferenceExecution(flyteidl.core.IdentifierOuterClass.Workfl return this; } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public Builder clearReferenceExecution() { if (referenceExecutionBuilder_ == null) { @@ -2012,7 +1624,7 @@ public Builder clearReferenceExecution() { return this; } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder getReferenceExecutionBuilder() { @@ -2020,7 +1632,7 @@ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder ge return getReferenceExecutionFieldBuilder().getBuilder(); } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder getReferenceExecutionOrBuilder() { if (referenceExecutionBuilder_ != null) { @@ -2031,7 +1643,7 @@ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder g } } /** - * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 6; + * .flyteidl.core.WorkflowExecutionIdentifier reference_execution = 4; */ private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifier.Builder, flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder> @@ -2049,7 +1661,7 @@ public flyteidl.core.IdentifierOuterClass.WorkflowExecutionIdentifierOrBuilder g private java.lang.Object principal_ = ""; /** - * string principal = 7; + * string principal = 5; */ public java.lang.String getPrincipal() { java.lang.Object ref = principal_; @@ -2064,7 +1676,7 @@ public java.lang.String getPrincipal() { } } /** - * string principal = 7; + * string principal = 5; */ public com.google.protobuf.ByteString getPrincipalBytes() { @@ -2080,7 +1692,7 @@ public java.lang.String getPrincipal() { } } /** - * string principal = 7; + * string principal = 5; */ public Builder setPrincipal( java.lang.String value) { @@ -2093,7 +1705,7 @@ public Builder setPrincipal( return this; } /** - * string principal = 7; + * string principal = 5; */ public Builder clearPrincipal() { @@ -2102,7 +1714,7 @@ public Builder clearPrincipal() { return this; } /** - * string principal = 7; + * string principal = 5; */ public Builder setPrincipalBytes( com.google.protobuf.ByteString value) { @@ -2126,7 +1738,7 @@ public Builder setPrincipalBytes( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public boolean hasLaunchPlanId() { return launchPlanIdBuilder_ != null || launchPlanId_ != null; @@ -2138,7 +1750,7 @@ public boolean hasLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { if (launchPlanIdBuilder_ == null) { @@ -2154,7 +1766,7 @@ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder setLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier value) { if (launchPlanIdBuilder_ == null) { @@ -2176,7 +1788,7 @@ public Builder setLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier val * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder setLaunchPlanId( flyteidl.core.IdentifierOuterClass.Identifier.Builder builderForValue) { @@ -2196,7 +1808,7 @@ public Builder setLaunchPlanId( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder mergeLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier value) { if (launchPlanIdBuilder_ == null) { @@ -2220,7 +1832,7 @@ public Builder mergeLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier v * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder clearLaunchPlanId() { if (launchPlanIdBuilder_ == null) { @@ -2240,7 +1852,7 @@ public Builder clearLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.Identifier.Builder getLaunchPlanIdBuilder() { @@ -2254,7 +1866,7 @@ public flyteidl.core.IdentifierOuterClass.Identifier.Builder getLaunchPlanIdBuil * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrBuilder() { if (launchPlanIdBuilder_ != null) { @@ -2271,7 +1883,7 @@ public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrB * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> @@ -2381,37 +1993,12 @@ public interface CloudEventNodeExecutionOrBuilder extends */ flyteidl.core.IdentifierOuterClass.TaskExecutionIdentifierOrBuilder getTaskExecIdOrBuilder(); - /** - *
-     * Hydrated output
-     * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - boolean hasOutputData(); - /** - *
-     * Hydrated output
-     * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - flyteidl.core.Literals.LiteralMap getOutputData(); - /** - *
-     * Hydrated output
-     * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - flyteidl.core.Literals.LiteralMapOrBuilder getOutputDataOrBuilder(); - /** *
      * The typed interface for the task that produced the event.
      * 
* - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ boolean hasOutputInterface(); /** @@ -2419,7 +2006,7 @@ public interface CloudEventNodeExecutionOrBuilder extends * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ flyteidl.core.Interface.TypedInterface getOutputInterface(); /** @@ -2427,30 +2014,17 @@ public interface CloudEventNodeExecutionOrBuilder extends * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuilder(); - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - boolean hasInputData(); - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - flyteidl.core.Literals.LiteralMap getInputData(); - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder(); - /** *
      * The following are ExecutionMetadata fields
      * We can't have the ExecutionMetadata object directly because of import cycle
      * 
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ java.util.List getArtifactIdsList(); @@ -2460,7 +2034,7 @@ public interface CloudEventNodeExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index); /** @@ -2469,7 +2043,7 @@ public interface CloudEventNodeExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ int getArtifactIdsCount(); /** @@ -2478,7 +2052,7 @@ public interface CloudEventNodeExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ java.util.List getArtifactIdsOrBuilderList(); @@ -2488,17 +2062,17 @@ public interface CloudEventNodeExecutionOrBuilder extends * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( int index); /** - * string principal = 7; + * string principal = 5; */ java.lang.String getPrincipal(); /** - * string principal = 7; + * string principal = 5; */ com.google.protobuf.ByteString getPrincipalBytes(); @@ -2510,7 +2084,7 @@ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ boolean hasLaunchPlanId(); /** @@ -2520,7 +2094,7 @@ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId(); /** @@ -2530,7 +2104,7 @@ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrBuilder(); } @@ -2602,19 +2176,6 @@ private CloudEventNodeExecution( break; } case 26: { - flyteidl.core.Literals.LiteralMap.Builder subBuilder = null; - if (outputData_ != null) { - subBuilder = outputData_.toBuilder(); - } - outputData_ = input.readMessage(flyteidl.core.Literals.LiteralMap.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(outputData_); - outputData_ = subBuilder.buildPartial(); - } - - break; - } - case 34: { flyteidl.core.Interface.TypedInterface.Builder subBuilder = null; if (outputInterface_ != null) { subBuilder = outputInterface_.toBuilder(); @@ -2627,35 +2188,22 @@ private CloudEventNodeExecution( break; } - case 42: { - flyteidl.core.Literals.LiteralMap.Builder subBuilder = null; - if (inputData_ != null) { - subBuilder = inputData_.toBuilder(); - } - inputData_ = input.readMessage(flyteidl.core.Literals.LiteralMap.parser(), extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(inputData_); - inputData_ = subBuilder.buildPartial(); - } - - break; - } - case 50: { - if (!((mutable_bitField0_ & 0x00000020) != 0)) { + case 34: { + if (!((mutable_bitField0_ & 0x00000008) != 0)) { artifactIds_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000020; + mutable_bitField0_ |= 0x00000008; } artifactIds_.add( input.readMessage(flyteidl.core.ArtifactId.ArtifactID.parser(), extensionRegistry)); break; } - case 58: { + case 42: { java.lang.String s = input.readStringRequireUtf8(); principal_ = s; break; } - case 66: { + case 50: { flyteidl.core.IdentifierOuterClass.Identifier.Builder subBuilder = null; if (launchPlanId_ != null) { subBuilder = launchPlanId_.toBuilder(); @@ -2683,7 +2231,7 @@ private CloudEventNodeExecution( throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000020) != 0)) { + if (((mutable_bitField0_ & 0x00000008) != 0)) { artifactIds_ = java.util.Collections.unmodifiableList(artifactIds_); } this.unknownFields = unknownFields.build(); @@ -2758,47 +2306,14 @@ public flyteidl.core.IdentifierOuterClass.TaskExecutionIdentifierOrBuilder getTa return getTaskExecId(); } - public static final int OUTPUT_DATA_FIELD_NUMBER = 3; - private flyteidl.core.Literals.LiteralMap outputData_; - /** - *
-     * Hydrated output
-     * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public boolean hasOutputData() { - return outputData_ != null; - } - /** - *
-     * Hydrated output
-     * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public flyteidl.core.Literals.LiteralMap getOutputData() { - return outputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : outputData_; - } - /** - *
-     * Hydrated output
-     * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getOutputDataOrBuilder() { - return getOutputData(); - } - - public static final int OUTPUT_INTERFACE_FIELD_NUMBER = 4; + public static final int OUTPUT_INTERFACE_FIELD_NUMBER = 3; private flyteidl.core.Interface.TypedInterface outputInterface_; /** *
      * The typed interface for the task that produced the event.
      * 
* - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public boolean hasOutputInterface() { return outputInterface_ != null; @@ -2808,7 +2323,7 @@ public boolean hasOutputInterface() { * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public flyteidl.core.Interface.TypedInterface getOutputInterface() { return outputInterface_ == null ? flyteidl.core.Interface.TypedInterface.getDefaultInstance() : outputInterface_; @@ -2818,34 +2333,13 @@ public flyteidl.core.Interface.TypedInterface getOutputInterface() { * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuilder() { return getOutputInterface(); } - public static final int INPUT_DATA_FIELD_NUMBER = 5; - private flyteidl.core.Literals.LiteralMap inputData_; - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public boolean hasInputData() { - return inputData_ != null; - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public flyteidl.core.Literals.LiteralMap getInputData() { - return inputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : inputData_; - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder() { - return getInputData(); - } - - public static final int ARTIFACT_IDS_FIELD_NUMBER = 6; + public static final int ARTIFACT_IDS_FIELD_NUMBER = 4; private java.util.List artifactIds_; /** *
@@ -2853,7 +2347,7 @@ public flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder() {
      * We can't have the ExecutionMetadata object directly because of import cycle
      * 
* - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public java.util.List getArtifactIdsList() { return artifactIds_; @@ -2864,7 +2358,7 @@ public java.util.List getArtifactIdsList() * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public java.util.List getArtifactIdsOrBuilderList() { @@ -2876,7 +2370,7 @@ public java.util.List getArtifactIdsList() * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public int getArtifactIdsCount() { return artifactIds_.size(); @@ -2887,7 +2381,7 @@ public int getArtifactIdsCount() { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { return artifactIds_.get(index); @@ -2898,17 +2392,17 @@ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( int index) { return artifactIds_.get(index); } - public static final int PRINCIPAL_FIELD_NUMBER = 7; + public static final int PRINCIPAL_FIELD_NUMBER = 5; private volatile java.lang.Object principal_; /** - * string principal = 7; + * string principal = 5; */ public java.lang.String getPrincipal() { java.lang.Object ref = principal_; @@ -2923,7 +2417,7 @@ public java.lang.String getPrincipal() { } } /** - * string principal = 7; + * string principal = 5; */ public com.google.protobuf.ByteString getPrincipalBytes() { @@ -2939,7 +2433,7 @@ public java.lang.String getPrincipal() { } } - public static final int LAUNCH_PLAN_ID_FIELD_NUMBER = 8; + public static final int LAUNCH_PLAN_ID_FIELD_NUMBER = 6; private flyteidl.core.IdentifierOuterClass.Identifier launchPlanId_; /** *
@@ -2948,7 +2442,7 @@ public java.lang.String getPrincipal() {
      * Launch plan IDs are easier to get than workflow IDs so we'll use these for now.
      * 
* - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public boolean hasLaunchPlanId() { return launchPlanId_ != null; @@ -2960,7 +2454,7 @@ public boolean hasLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { return launchPlanId_ == null ? flyteidl.core.IdentifierOuterClass.Identifier.getDefaultInstance() : launchPlanId_; @@ -2972,7 +2466,7 @@ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrBuilder() { return getLaunchPlanId(); @@ -2998,23 +2492,17 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (taskExecId_ != null) { output.writeMessage(2, getTaskExecId()); } - if (outputData_ != null) { - output.writeMessage(3, getOutputData()); - } if (outputInterface_ != null) { - output.writeMessage(4, getOutputInterface()); - } - if (inputData_ != null) { - output.writeMessage(5, getInputData()); + output.writeMessage(3, getOutputInterface()); } for (int i = 0; i < artifactIds_.size(); i++) { - output.writeMessage(6, artifactIds_.get(i)); + output.writeMessage(4, artifactIds_.get(i)); } if (!getPrincipalBytes().isEmpty()) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 7, principal_); + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, principal_); } if (launchPlanId_ != null) { - output.writeMessage(8, getLaunchPlanId()); + output.writeMessage(6, getLaunchPlanId()); } unknownFields.writeTo(output); } @@ -3033,28 +2521,20 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getTaskExecId()); } - if (outputData_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(3, getOutputData()); - } if (outputInterface_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(4, getOutputInterface()); - } - if (inputData_ != null) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, getInputData()); + .computeMessageSize(3, getOutputInterface()); } for (int i = 0; i < artifactIds_.size(); i++) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(6, artifactIds_.get(i)); + .computeMessageSize(4, artifactIds_.get(i)); } if (!getPrincipalBytes().isEmpty()) { - size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, principal_); + size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, principal_); } if (launchPlanId_ != null) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(8, getLaunchPlanId()); + .computeMessageSize(6, getLaunchPlanId()); } size += unknownFields.getSerializedSize(); memoizedSize = size; @@ -3081,21 +2561,11 @@ public boolean equals(final java.lang.Object obj) { if (!getTaskExecId() .equals(other.getTaskExecId())) return false; } - if (hasOutputData() != other.hasOutputData()) return false; - if (hasOutputData()) { - if (!getOutputData() - .equals(other.getOutputData())) return false; - } if (hasOutputInterface() != other.hasOutputInterface()) return false; if (hasOutputInterface()) { if (!getOutputInterface() .equals(other.getOutputInterface())) return false; } - if (hasInputData() != other.hasInputData()) return false; - if (hasInputData()) { - if (!getInputData() - .equals(other.getInputData())) return false; - } if (!getArtifactIdsList() .equals(other.getArtifactIdsList())) return false; if (!getPrincipal() @@ -3124,18 +2594,10 @@ public int hashCode() { hash = (37 * hash) + TASK_EXEC_ID_FIELD_NUMBER; hash = (53 * hash) + getTaskExecId().hashCode(); } - if (hasOutputData()) { - hash = (37 * hash) + OUTPUT_DATA_FIELD_NUMBER; - hash = (53 * hash) + getOutputData().hashCode(); - } if (hasOutputInterface()) { hash = (37 * hash) + OUTPUT_INTERFACE_FIELD_NUMBER; hash = (53 * hash) + getOutputInterface().hashCode(); } - if (hasInputData()) { - hash = (37 * hash) + INPUT_DATA_FIELD_NUMBER; - hash = (53 * hash) + getInputData().hashCode(); - } if (getArtifactIdsCount() > 0) { hash = (37 * hash) + ARTIFACT_IDS_FIELD_NUMBER; hash = (53 * hash) + getArtifactIdsList().hashCode(); @@ -3292,27 +2754,15 @@ public Builder clear() { taskExecId_ = null; taskExecIdBuilder_ = null; } - if (outputDataBuilder_ == null) { - outputData_ = null; - } else { - outputData_ = null; - outputDataBuilder_ = null; - } if (outputInterfaceBuilder_ == null) { outputInterface_ = null; } else { outputInterface_ = null; outputInterfaceBuilder_ = null; } - if (inputDataBuilder_ == null) { - inputData_ = null; - } else { - inputData_ = null; - inputDataBuilder_ = null; - } if (artifactIdsBuilder_ == null) { artifactIds_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000008); } else { artifactIdsBuilder_.clear(); } @@ -3362,25 +2812,15 @@ public flyteidl.event.Cloudevents.CloudEventNodeExecution buildPartial() { } else { result.taskExecId_ = taskExecIdBuilder_.build(); } - if (outputDataBuilder_ == null) { - result.outputData_ = outputData_; - } else { - result.outputData_ = outputDataBuilder_.build(); - } if (outputInterfaceBuilder_ == null) { result.outputInterface_ = outputInterface_; } else { result.outputInterface_ = outputInterfaceBuilder_.build(); } - if (inputDataBuilder_ == null) { - result.inputData_ = inputData_; - } else { - result.inputData_ = inputDataBuilder_.build(); - } if (artifactIdsBuilder_ == null) { - if (((bitField0_ & 0x00000020) != 0)) { + if (((bitField0_ & 0x00000008) != 0)) { artifactIds_ = java.util.Collections.unmodifiableList(artifactIds_); - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000008); } result.artifactIds_ = artifactIds_; } else { @@ -3447,20 +2887,14 @@ public Builder mergeFrom(flyteidl.event.Cloudevents.CloudEventNodeExecution othe if (other.hasTaskExecId()) { mergeTaskExecId(other.getTaskExecId()); } - if (other.hasOutputData()) { - mergeOutputData(other.getOutputData()); - } if (other.hasOutputInterface()) { mergeOutputInterface(other.getOutputInterface()); } - if (other.hasInputData()) { - mergeInputData(other.getInputData()); - } if (artifactIdsBuilder_ == null) { if (!other.artifactIds_.isEmpty()) { if (artifactIds_.isEmpty()) { artifactIds_ = other.artifactIds_; - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000008); } else { ensureArtifactIdsIsMutable(); artifactIds_.addAll(other.artifactIds_); @@ -3473,7 +2907,7 @@ public Builder mergeFrom(flyteidl.event.Cloudevents.CloudEventNodeExecution othe artifactIdsBuilder_.dispose(); artifactIdsBuilder_ = null; artifactIds_ = other.artifactIds_; - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000008); artifactIdsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getArtifactIdsFieldBuilder() : null; @@ -3789,159 +3223,6 @@ public flyteidl.core.IdentifierOuterClass.TaskExecutionIdentifierOrBuilder getTa return taskExecIdBuilder_; } - private flyteidl.core.Literals.LiteralMap outputData_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> outputDataBuilder_; - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public boolean hasOutputData() { - return outputDataBuilder_ != null || outputData_ != null; - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public flyteidl.core.Literals.LiteralMap getOutputData() { - if (outputDataBuilder_ == null) { - return outputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : outputData_; - } else { - return outputDataBuilder_.getMessage(); - } - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public Builder setOutputData(flyteidl.core.Literals.LiteralMap value) { - if (outputDataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - outputData_ = value; - onChanged(); - } else { - outputDataBuilder_.setMessage(value); - } - - return this; - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public Builder setOutputData( - flyteidl.core.Literals.LiteralMap.Builder builderForValue) { - if (outputDataBuilder_ == null) { - outputData_ = builderForValue.build(); - onChanged(); - } else { - outputDataBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public Builder mergeOutputData(flyteidl.core.Literals.LiteralMap value) { - if (outputDataBuilder_ == null) { - if (outputData_ != null) { - outputData_ = - flyteidl.core.Literals.LiteralMap.newBuilder(outputData_).mergeFrom(value).buildPartial(); - } else { - outputData_ = value; - } - onChanged(); - } else { - outputDataBuilder_.mergeFrom(value); - } - - return this; - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public Builder clearOutputData() { - if (outputDataBuilder_ == null) { - outputData_ = null; - onChanged(); - } else { - outputData_ = null; - outputDataBuilder_ = null; - } - - return this; - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public flyteidl.core.Literals.LiteralMap.Builder getOutputDataBuilder() { - - onChanged(); - return getOutputDataFieldBuilder().getBuilder(); - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getOutputDataOrBuilder() { - if (outputDataBuilder_ != null) { - return outputDataBuilder_.getMessageOrBuilder(); - } else { - return outputData_ == null ? - flyteidl.core.Literals.LiteralMap.getDefaultInstance() : outputData_; - } - } - /** - *
-       * Hydrated output
-       * 
- * - * .flyteidl.core.LiteralMap output_data = 3; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> - getOutputDataFieldBuilder() { - if (outputDataBuilder_ == null) { - outputDataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder>( - getOutputData(), - getParentForChildren(), - isClean()); - outputData_ = null; - } - return outputDataBuilder_; - } - private flyteidl.core.Interface.TypedInterface outputInterface_; private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.Interface.TypedInterface, flyteidl.core.Interface.TypedInterface.Builder, flyteidl.core.Interface.TypedInterfaceOrBuilder> outputInterfaceBuilder_; @@ -3950,7 +3231,7 @@ public flyteidl.core.Literals.LiteralMapOrBuilder getOutputDataOrBuilder() { * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public boolean hasOutputInterface() { return outputInterfaceBuilder_ != null || outputInterface_ != null; @@ -3960,7 +3241,7 @@ public boolean hasOutputInterface() { * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public flyteidl.core.Interface.TypedInterface getOutputInterface() { if (outputInterfaceBuilder_ == null) { @@ -3974,7 +3255,7 @@ public flyteidl.core.Interface.TypedInterface getOutputInterface() { * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public Builder setOutputInterface(flyteidl.core.Interface.TypedInterface value) { if (outputInterfaceBuilder_ == null) { @@ -3994,7 +3275,7 @@ public Builder setOutputInterface(flyteidl.core.Interface.TypedInterface value) * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public Builder setOutputInterface( flyteidl.core.Interface.TypedInterface.Builder builderForValue) { @@ -4012,7 +3293,7 @@ public Builder setOutputInterface( * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public Builder mergeOutputInterface(flyteidl.core.Interface.TypedInterface value) { if (outputInterfaceBuilder_ == null) { @@ -4034,7 +3315,7 @@ public Builder mergeOutputInterface(flyteidl.core.Interface.TypedInterface value * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public Builder clearOutputInterface() { if (outputInterfaceBuilder_ == null) { @@ -4052,7 +3333,7 @@ public Builder clearOutputInterface() { * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public flyteidl.core.Interface.TypedInterface.Builder getOutputInterfaceBuilder() { @@ -4064,7 +3345,7 @@ public flyteidl.core.Interface.TypedInterface.Builder getOutputInterfaceBuilder( * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuilder() { if (outputInterfaceBuilder_ != null) { @@ -4079,7 +3360,7 @@ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuild * The typed interface for the task that produced the event. * * - * .flyteidl.core.TypedInterface output_interface = 4; + * .flyteidl.core.TypedInterface output_interface = 3; */ private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.Interface.TypedInterface, flyteidl.core.Interface.TypedInterface.Builder, flyteidl.core.Interface.TypedInterfaceOrBuilder> @@ -4095,129 +3376,12 @@ public flyteidl.core.Interface.TypedInterfaceOrBuilder getOutputInterfaceOrBuild return outputInterfaceBuilder_; } - private flyteidl.core.Literals.LiteralMap inputData_; - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> inputDataBuilder_; - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public boolean hasInputData() { - return inputDataBuilder_ != null || inputData_ != null; - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public flyteidl.core.Literals.LiteralMap getInputData() { - if (inputDataBuilder_ == null) { - return inputData_ == null ? flyteidl.core.Literals.LiteralMap.getDefaultInstance() : inputData_; - } else { - return inputDataBuilder_.getMessage(); - } - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public Builder setInputData(flyteidl.core.Literals.LiteralMap value) { - if (inputDataBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - inputData_ = value; - onChanged(); - } else { - inputDataBuilder_.setMessage(value); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public Builder setInputData( - flyteidl.core.Literals.LiteralMap.Builder builderForValue) { - if (inputDataBuilder_ == null) { - inputData_ = builderForValue.build(); - onChanged(); - } else { - inputDataBuilder_.setMessage(builderForValue.build()); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public Builder mergeInputData(flyteidl.core.Literals.LiteralMap value) { - if (inputDataBuilder_ == null) { - if (inputData_ != null) { - inputData_ = - flyteidl.core.Literals.LiteralMap.newBuilder(inputData_).mergeFrom(value).buildPartial(); - } else { - inputData_ = value; - } - onChanged(); - } else { - inputDataBuilder_.mergeFrom(value); - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public Builder clearInputData() { - if (inputDataBuilder_ == null) { - inputData_ = null; - onChanged(); - } else { - inputData_ = null; - inputDataBuilder_ = null; - } - - return this; - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public flyteidl.core.Literals.LiteralMap.Builder getInputDataBuilder() { - - onChanged(); - return getInputDataFieldBuilder().getBuilder(); - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - public flyteidl.core.Literals.LiteralMapOrBuilder getInputDataOrBuilder() { - if (inputDataBuilder_ != null) { - return inputDataBuilder_.getMessageOrBuilder(); - } else { - return inputData_ == null ? - flyteidl.core.Literals.LiteralMap.getDefaultInstance() : inputData_; - } - } - /** - * .flyteidl.core.LiteralMap input_data = 5; - */ - private com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder> - getInputDataFieldBuilder() { - if (inputDataBuilder_ == null) { - inputDataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< - flyteidl.core.Literals.LiteralMap, flyteidl.core.Literals.LiteralMap.Builder, flyteidl.core.Literals.LiteralMapOrBuilder>( - getInputData(), - getParentForChildren(), - isClean()); - inputData_ = null; - } - return inputDataBuilder_; - } - private java.util.List artifactIds_ = java.util.Collections.emptyList(); private void ensureArtifactIdsIsMutable() { - if (!((bitField0_ & 0x00000020) != 0)) { + if (!((bitField0_ & 0x00000008) != 0)) { artifactIds_ = new java.util.ArrayList(artifactIds_); - bitField0_ |= 0x00000020; + bitField0_ |= 0x00000008; } } @@ -4230,7 +3394,7 @@ private void ensureArtifactIdsIsMutable() { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public java.util.List getArtifactIdsList() { if (artifactIdsBuilder_ == null) { @@ -4245,7 +3409,7 @@ public java.util.List getArtifactIdsList() * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public int getArtifactIdsCount() { if (artifactIdsBuilder_ == null) { @@ -4260,7 +3424,7 @@ public int getArtifactIdsCount() { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { if (artifactIdsBuilder_ == null) { @@ -4275,7 +3439,7 @@ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder setArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID value) { @@ -4297,7 +3461,7 @@ public Builder setArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder setArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { @@ -4316,7 +3480,7 @@ public Builder setArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder addArtifactIds(flyteidl.core.ArtifactId.ArtifactID value) { if (artifactIdsBuilder_ == null) { @@ -4337,7 +3501,7 @@ public Builder addArtifactIds(flyteidl.core.ArtifactId.ArtifactID value) { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder addArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID value) { @@ -4359,7 +3523,7 @@ public Builder addArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder addArtifactIds( flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { @@ -4378,7 +3542,7 @@ public Builder addArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder addArtifactIds( int index, flyteidl.core.ArtifactId.ArtifactID.Builder builderForValue) { @@ -4397,7 +3561,7 @@ public Builder addArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder addAllArtifactIds( java.lang.Iterable values) { @@ -4417,12 +3581,12 @@ public Builder addAllArtifactIds( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder clearArtifactIds() { if (artifactIdsBuilder_ == null) { artifactIds_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000020); + bitField0_ = (bitField0_ & ~0x00000008); onChanged(); } else { artifactIdsBuilder_.clear(); @@ -4435,7 +3599,7 @@ public Builder clearArtifactIds() { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public Builder removeArtifactIds(int index) { if (artifactIdsBuilder_ == null) { @@ -4453,7 +3617,7 @@ public Builder removeArtifactIds(int index) { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdsBuilder( int index) { @@ -4465,7 +3629,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdsBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( int index) { @@ -4480,7 +3644,7 @@ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public java.util.List getArtifactIdsOrBuilderList() { @@ -4496,7 +3660,7 @@ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder() { return getArtifactIdsFieldBuilder().addBuilder( @@ -4508,7 +3672,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder() { * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( int index) { @@ -4521,7 +3685,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( * We can't have the ExecutionMetadata object directly because of import cycle * * - * repeated .flyteidl.core.ArtifactID artifact_ids = 6; + * repeated .flyteidl.core.ArtifactID artifact_ids = 4; */ public java.util.List getArtifactIdsBuilderList() { @@ -4534,7 +3698,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( artifactIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< flyteidl.core.ArtifactId.ArtifactID, flyteidl.core.ArtifactId.ArtifactID.Builder, flyteidl.core.ArtifactId.ArtifactIDOrBuilder>( artifactIds_, - ((bitField0_ & 0x00000020) != 0), + ((bitField0_ & 0x00000008) != 0), getParentForChildren(), isClean()); artifactIds_ = null; @@ -4544,7 +3708,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( private java.lang.Object principal_ = ""; /** - * string principal = 7; + * string principal = 5; */ public java.lang.String getPrincipal() { java.lang.Object ref = principal_; @@ -4559,7 +3723,7 @@ public java.lang.String getPrincipal() { } } /** - * string principal = 7; + * string principal = 5; */ public com.google.protobuf.ByteString getPrincipalBytes() { @@ -4575,7 +3739,7 @@ public java.lang.String getPrincipal() { } } /** - * string principal = 7; + * string principal = 5; */ public Builder setPrincipal( java.lang.String value) { @@ -4588,7 +3752,7 @@ public Builder setPrincipal( return this; } /** - * string principal = 7; + * string principal = 5; */ public Builder clearPrincipal() { @@ -4597,7 +3761,7 @@ public Builder clearPrincipal() { return this; } /** - * string principal = 7; + * string principal = 5; */ public Builder setPrincipalBytes( com.google.protobuf.ByteString value) { @@ -4621,7 +3785,7 @@ public Builder setPrincipalBytes( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public boolean hasLaunchPlanId() { return launchPlanIdBuilder_ != null || launchPlanId_ != null; @@ -4633,7 +3797,7 @@ public boolean hasLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { if (launchPlanIdBuilder_ == null) { @@ -4649,7 +3813,7 @@ public flyteidl.core.IdentifierOuterClass.Identifier getLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder setLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier value) { if (launchPlanIdBuilder_ == null) { @@ -4671,7 +3835,7 @@ public Builder setLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier val * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder setLaunchPlanId( flyteidl.core.IdentifierOuterClass.Identifier.Builder builderForValue) { @@ -4691,7 +3855,7 @@ public Builder setLaunchPlanId( * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder mergeLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier value) { if (launchPlanIdBuilder_ == null) { @@ -4715,7 +3879,7 @@ public Builder mergeLaunchPlanId(flyteidl.core.IdentifierOuterClass.Identifier v * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public Builder clearLaunchPlanId() { if (launchPlanIdBuilder_ == null) { @@ -4735,7 +3899,7 @@ public Builder clearLaunchPlanId() { * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.Identifier.Builder getLaunchPlanIdBuilder() { @@ -4749,7 +3913,7 @@ public flyteidl.core.IdentifierOuterClass.Identifier.Builder getLaunchPlanIdBuil * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrBuilder() { if (launchPlanIdBuilder_ != null) { @@ -4766,7 +3930,7 @@ public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getLaunchPlanIdOrB * Launch plan IDs are easier to get than workflow IDs so we'll use these for now. * * - * .flyteidl.core.Identifier launch_plan_id = 8; + * .flyteidl.core.Identifier launch_plan_id = 6; */ private com.google.protobuf.SingleFieldBuilderV3< flyteidl.core.IdentifierOuterClass.Identifier, flyteidl.core.IdentifierOuterClass.Identifier.Builder, flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder> @@ -5509,7 +4673,7 @@ public interface CloudEventExecutionStartOrBuilder extends /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5518,7 +4682,7 @@ public interface CloudEventExecutionStartOrBuilder extends getArtifactIdsList(); /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5526,7 +4690,7 @@ public interface CloudEventExecutionStartOrBuilder extends flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index); /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5534,7 +4698,7 @@ public interface CloudEventExecutionStartOrBuilder extends int getArtifactIdsCount(); /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5543,7 +4707,7 @@ public interface CloudEventExecutionStartOrBuilder extends getArtifactIdsOrBuilderList(); /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5553,38 +4717,38 @@ flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ java.util.List - getArtifactKeysList(); + getArtifactTrackersList(); /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - int getArtifactKeysCount(); + int getArtifactTrackersCount(); /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - java.lang.String getArtifactKeys(int index); + java.lang.String getArtifactTrackers(int index); /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ com.google.protobuf.ByteString - getArtifactKeysBytes(int index); + getArtifactTrackersBytes(int index); /** * string principal = 6; @@ -5614,7 +4778,7 @@ private CloudEventExecutionStart(com.google.protobuf.GeneratedMessageV3.Builder< } private CloudEventExecutionStart() { artifactIds_ = java.util.Collections.emptyList(); - artifactKeys_ = com.google.protobuf.LazyStringArrayList.EMPTY; + artifactTrackers_ = com.google.protobuf.LazyStringArrayList.EMPTY; principal_ = ""; } @@ -5693,10 +4857,10 @@ private CloudEventExecutionStart( case 42: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000010) != 0)) { - artifactKeys_ = new com.google.protobuf.LazyStringArrayList(); + artifactTrackers_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000010; } - artifactKeys_.add(s); + artifactTrackers_.add(s); break; } case 50: { @@ -5724,7 +4888,7 @@ private CloudEventExecutionStart( artifactIds_ = java.util.Collections.unmodifiableList(artifactIds_); } if (((mutable_bitField0_ & 0x00000010) != 0)) { - artifactKeys_ = artifactKeys_.getUnmodifiableView(); + artifactTrackers_ = artifactTrackers_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -5835,7 +4999,7 @@ public flyteidl.core.IdentifierOuterClass.IdentifierOrBuilder getWorkflowIdOrBui private java.util.List artifactIds_; /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5845,7 +5009,7 @@ public java.util.List getArtifactIdsList() } /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5856,7 +5020,7 @@ public java.util.List getArtifactIdsList() } /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5866,7 +5030,7 @@ public int getArtifactIdsCount() { } /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5876,7 +5040,7 @@ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { } /** *
-     * Artifact IDs found
+     * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
      * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -5886,49 +5050,49 @@ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( return artifactIds_.get(index); } - public static final int ARTIFACT_KEYS_FIELD_NUMBER = 5; - private com.google.protobuf.LazyStringList artifactKeys_; + public static final int ARTIFACT_TRACKERS_FIELD_NUMBER = 5; + private com.google.protobuf.LazyStringList artifactTrackers_; /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ public com.google.protobuf.ProtocolStringList - getArtifactKeysList() { - return artifactKeys_; + getArtifactTrackersList() { + return artifactTrackers_; } /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public int getArtifactKeysCount() { - return artifactKeys_.size(); + public int getArtifactTrackersCount() { + return artifactTrackers_.size(); } /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public java.lang.String getArtifactKeys(int index) { - return artifactKeys_.get(index); + public java.lang.String getArtifactTrackers(int index) { + return artifactTrackers_.get(index); } /** *
-     * Artifact keys found.
+     * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
      * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ public com.google.protobuf.ByteString - getArtifactKeysBytes(int index) { - return artifactKeys_.getByteString(index); + getArtifactTrackersBytes(int index) { + return artifactTrackers_.getByteString(index); } public static final int PRINCIPAL_FIELD_NUMBER = 6; @@ -5991,8 +5155,8 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) for (int i = 0; i < artifactIds_.size(); i++) { output.writeMessage(4, artifactIds_.get(i)); } - for (int i = 0; i < artifactKeys_.size(); i++) { - com.google.protobuf.GeneratedMessageV3.writeString(output, 5, artifactKeys_.getRaw(i)); + for (int i = 0; i < artifactTrackers_.size(); i++) { + com.google.protobuf.GeneratedMessageV3.writeString(output, 5, artifactTrackers_.getRaw(i)); } if (!getPrincipalBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, principal_); @@ -6024,11 +5188,11 @@ public int getSerializedSize() { } { int dataSize = 0; - for (int i = 0; i < artifactKeys_.size(); i++) { - dataSize += computeStringSizeNoTag(artifactKeys_.getRaw(i)); + for (int i = 0; i < artifactTrackers_.size(); i++) { + dataSize += computeStringSizeNoTag(artifactTrackers_.getRaw(i)); } size += dataSize; - size += 1 * getArtifactKeysList().size(); + size += 1 * getArtifactTrackersList().size(); } if (!getPrincipalBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, principal_); @@ -6065,8 +5229,8 @@ public boolean equals(final java.lang.Object obj) { } if (!getArtifactIdsList() .equals(other.getArtifactIdsList())) return false; - if (!getArtifactKeysList() - .equals(other.getArtifactKeysList())) return false; + if (!getArtifactTrackersList() + .equals(other.getArtifactTrackersList())) return false; if (!getPrincipal() .equals(other.getPrincipal())) return false; if (!unknownFields.equals(other.unknownFields)) return false; @@ -6096,9 +5260,9 @@ public int hashCode() { hash = (37 * hash) + ARTIFACT_IDS_FIELD_NUMBER; hash = (53 * hash) + getArtifactIdsList().hashCode(); } - if (getArtifactKeysCount() > 0) { - hash = (37 * hash) + ARTIFACT_KEYS_FIELD_NUMBER; - hash = (53 * hash) + getArtifactKeysList().hashCode(); + if (getArtifactTrackersCount() > 0) { + hash = (37 * hash) + ARTIFACT_TRACKERS_FIELD_NUMBER; + hash = (53 * hash) + getArtifactTrackersList().hashCode(); } hash = (37 * hash) + PRINCIPAL_FIELD_NUMBER; hash = (53 * hash) + getPrincipal().hashCode(); @@ -6264,7 +5428,7 @@ public Builder clear() { } else { artifactIdsBuilder_.clear(); } - artifactKeys_ = com.google.protobuf.LazyStringArrayList.EMPTY; + artifactTrackers_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); principal_ = ""; @@ -6321,10 +5485,10 @@ public flyteidl.event.Cloudevents.CloudEventExecutionStart buildPartial() { result.artifactIds_ = artifactIdsBuilder_.build(); } if (((bitField0_ & 0x00000010) != 0)) { - artifactKeys_ = artifactKeys_.getUnmodifiableView(); + artifactTrackers_ = artifactTrackers_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000010); } - result.artifactKeys_ = artifactKeys_; + result.artifactTrackers_ = artifactTrackers_; result.principal_ = principal_; result.bitField0_ = to_bitField0_; onBuilt(); @@ -6410,13 +5574,13 @@ public Builder mergeFrom(flyteidl.event.Cloudevents.CloudEventExecutionStart oth } } } - if (!other.artifactKeys_.isEmpty()) { - if (artifactKeys_.isEmpty()) { - artifactKeys_ = other.artifactKeys_; + if (!other.artifactTrackers_.isEmpty()) { + if (artifactTrackers_.isEmpty()) { + artifactTrackers_ = other.artifactTrackers_; bitField0_ = (bitField0_ & ~0x00000010); } else { - ensureArtifactKeysIsMutable(); - artifactKeys_.addAll(other.artifactKeys_); + ensureArtifactTrackersIsMutable(); + artifactTrackers_.addAll(other.artifactTrackers_); } onChanged(); } @@ -6891,7 +6055,7 @@ private void ensureArtifactIdsIsMutable() { /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -6905,7 +6069,7 @@ public java.util.List getArtifactIdsList() } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -6919,7 +6083,7 @@ public int getArtifactIdsCount() { } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -6933,7 +6097,7 @@ public flyteidl.core.ArtifactId.ArtifactID getArtifactIds(int index) { } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -6954,7 +6118,7 @@ public Builder setArtifactIds( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -6972,7 +6136,7 @@ public Builder setArtifactIds( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -6992,7 +6156,7 @@ public Builder addArtifactIds(flyteidl.core.ArtifactId.ArtifactID value) { } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7013,7 +6177,7 @@ public Builder addArtifactIds( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7031,7 +6195,7 @@ public Builder addArtifactIds( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7049,7 +6213,7 @@ public Builder addArtifactIds( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7068,7 +6232,7 @@ public Builder addAllArtifactIds( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7085,7 +6249,7 @@ public Builder clearArtifactIds() { } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7102,7 +6266,7 @@ public Builder removeArtifactIds(int index) { } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7113,7 +6277,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder getArtifactIdsBuilder( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7127,7 +6291,7 @@ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7142,7 +6306,7 @@ public flyteidl.core.ArtifactId.ArtifactIDOrBuilder getArtifactIdsOrBuilder( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7153,7 +6317,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder() { } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7165,7 +6329,7 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( } /** *
-       * Artifact IDs found
+       * Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run.
        * 
* * repeated .flyteidl.core.ArtifactID artifact_ids = 4; @@ -7189,132 +6353,132 @@ public flyteidl.core.ArtifactId.ArtifactID.Builder addArtifactIdsBuilder( return artifactIdsBuilder_; } - private com.google.protobuf.LazyStringList artifactKeys_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureArtifactKeysIsMutable() { + private com.google.protobuf.LazyStringList artifactTrackers_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureArtifactTrackersIsMutable() { if (!((bitField0_ & 0x00000010) != 0)) { - artifactKeys_ = new com.google.protobuf.LazyStringArrayList(artifactKeys_); + artifactTrackers_ = new com.google.protobuf.LazyStringArrayList(artifactTrackers_); bitField0_ |= 0x00000010; } } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ public com.google.protobuf.ProtocolStringList - getArtifactKeysList() { - return artifactKeys_.getUnmodifiableView(); + getArtifactTrackersList() { + return artifactTrackers_.getUnmodifiableView(); } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public int getArtifactKeysCount() { - return artifactKeys_.size(); + public int getArtifactTrackersCount() { + return artifactTrackers_.size(); } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public java.lang.String getArtifactKeys(int index) { - return artifactKeys_.get(index); + public java.lang.String getArtifactTrackers(int index) { + return artifactTrackers_.get(index); } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ public com.google.protobuf.ByteString - getArtifactKeysBytes(int index) { - return artifactKeys_.getByteString(index); + getArtifactTrackersBytes(int index) { + return artifactTrackers_.getByteString(index); } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public Builder setArtifactKeys( + public Builder setArtifactTrackers( int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } - ensureArtifactKeysIsMutable(); - artifactKeys_.set(index, value); + ensureArtifactTrackersIsMutable(); + artifactTrackers_.set(index, value); onChanged(); return this; } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public Builder addArtifactKeys( + public Builder addArtifactTrackers( java.lang.String value) { if (value == null) { throw new NullPointerException(); } - ensureArtifactKeysIsMutable(); - artifactKeys_.add(value); + ensureArtifactTrackersIsMutable(); + artifactTrackers_.add(value); onChanged(); return this; } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public Builder addAllArtifactKeys( + public Builder addAllArtifactTrackers( java.lang.Iterable values) { - ensureArtifactKeysIsMutable(); + ensureArtifactTrackersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( - values, artifactKeys_); + values, artifactTrackers_); onChanged(); return this; } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public Builder clearArtifactKeys() { - artifactKeys_ = com.google.protobuf.LazyStringArrayList.EMPTY; + public Builder clearArtifactTrackers() { + artifactTrackers_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000010); onChanged(); return this; } /** *
-       * Artifact keys found.
+       * Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service.
        * 
* - * repeated string artifact_keys = 5; + * repeated string artifact_trackers = 5; */ - public Builder addArtifactKeysBytes( + public Builder addArtifactTrackersBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); - ensureArtifactKeysIsMutable(); - artifactKeys_.add(value); + ensureArtifactTrackersIsMutable(); + artifactTrackers_.add(value); onChanged(); return this; } @@ -7474,40 +6638,35 @@ public flyteidl.event.Cloudevents.CloudEventExecutionStart getDefaultInstanceFor "flyteidl/core/literals.proto\032\035flyteidl/c" + "ore/interface.proto\032\037flyteidl/core/artif" + "act_id.proto\032\036flyteidl/core/identifier.p" + - "roto\032\037google/protobuf/timestamp.proto\"\260\003" + + "roto\032\037google/protobuf/timestamp.proto\"\321\002" + "\n\033CloudEventWorkflowExecution\0229\n\traw_eve" + "nt\030\001 \001(\0132&.flyteidl.event.WorkflowExecut" + - "ionEvent\022.\n\013output_data\030\002 \001(\0132\031.flyteidl" + - ".core.LiteralMap\0227\n\020output_interface\030\003 \001" + - "(\0132\035.flyteidl.core.TypedInterface\022-\n\ninp" + - "ut_data\030\004 \001(\0132\031.flyteidl.core.LiteralMap" + - "\022/\n\014artifact_ids\030\005 \003(\0132\031.flyteidl.core.A" + - "rtifactID\022G\n\023reference_execution\030\006 \001(\0132*" + - ".flyteidl.core.WorkflowExecutionIdentifi" + - "er\022\021\n\tprincipal\030\007 \001(\t\0221\n\016launch_plan_id\030" + - "\010 \001(\0132\031.flyteidl.core.Identifier\"\235\003\n\027Clo" + - "udEventNodeExecution\0225\n\traw_event\030\001 \001(\0132" + - "\".flyteidl.event.NodeExecutionEvent\022<\n\014t" + - "ask_exec_id\030\002 \001(\0132&.flyteidl.core.TaskEx" + - "ecutionIdentifier\022.\n\013output_data\030\003 \001(\0132\031" + - ".flyteidl.core.LiteralMap\0227\n\020output_inte" + - "rface\030\004 \001(\0132\035.flyteidl.core.TypedInterfa" + - "ce\022-\n\ninput_data\030\005 \001(\0132\031.flyteidl.core.L" + - "iteralMap\022/\n\014artifact_ids\030\006 \003(\0132\031.flytei" + - "dl.core.ArtifactID\022\021\n\tprincipal\030\007 \001(\t\0221\n" + - "\016launch_plan_id\030\010 \001(\0132\031.flyteidl.core.Id" + - "entifier\"P\n\027CloudEventTaskExecution\0225\n\tr" + - "aw_event\030\001 \001(\0132\".flyteidl.event.TaskExec" + - "utionEvent\"\232\002\n\030CloudEventExecutionStart\022" + - "@\n\014execution_id\030\001 \001(\0132*.flyteidl.core.Wo" + - "rkflowExecutionIdentifier\0221\n\016launch_plan" + - "_id\030\002 \001(\0132\031.flyteidl.core.Identifier\022.\n\013" + - "workflow_id\030\003 \001(\0132\031.flyteidl.core.Identi" + - "fier\022/\n\014artifact_ids\030\004 \003(\0132\031.flyteidl.co" + - "re.ArtifactID\022\025\n\rartifact_keys\030\005 \003(\t\022\021\n\t" + - "principal\030\006 \001(\tB=Z;github.com/flyteorg/f" + - "lyte/flyteidl/gen/pb-go/flyteidl/eventb\006" + - "proto3" + "ionEvent\0227\n\020output_interface\030\002 \001(\0132\035.fly" + + "teidl.core.TypedInterface\022/\n\014artifact_id" + + "s\030\003 \003(\0132\031.flyteidl.core.ArtifactID\022G\n\023re" + + "ference_execution\030\004 \001(\0132*.flyteidl.core." + + "WorkflowExecutionIdentifier\022\021\n\tprincipal" + + "\030\005 \001(\t\0221\n\016launch_plan_id\030\006 \001(\0132\031.flyteid" + + "l.core.Identifier\"\276\002\n\027CloudEventNodeExec" + + "ution\0225\n\traw_event\030\001 \001(\0132\".flyteidl.even" + + "t.NodeExecutionEvent\022<\n\014task_exec_id\030\002 \001" + + "(\0132&.flyteidl.core.TaskExecutionIdentifi" + + "er\0227\n\020output_interface\030\003 \001(\0132\035.flyteidl." + + "core.TypedInterface\022/\n\014artifact_ids\030\004 \003(" + + "\0132\031.flyteidl.core.ArtifactID\022\021\n\tprincipa" + + "l\030\005 \001(\t\0221\n\016launch_plan_id\030\006 \001(\0132\031.flytei" + + "dl.core.Identifier\"P\n\027CloudEventTaskExec" + + "ution\0225\n\traw_event\030\001 \001(\0132\".flyteidl.even" + + "t.TaskExecutionEvent\"\236\002\n\030CloudEventExecu" + + "tionStart\022@\n\014execution_id\030\001 \001(\0132*.flytei" + + "dl.core.WorkflowExecutionIdentifier\0221\n\016l" + + "aunch_plan_id\030\002 \001(\0132\031.flyteidl.core.Iden" + + "tifier\022.\n\013workflow_id\030\003 \001(\0132\031.flyteidl.c" + + "ore.Identifier\022/\n\014artifact_ids\030\004 \003(\0132\031.f" + + "lyteidl.core.ArtifactID\022\031\n\021artifact_trac" + + "kers\030\005 \003(\t\022\021\n\tprincipal\030\006 \001(\tB=Z;github." + + "com/flyteorg/flyte/flyteidl/gen/pb-go/fl" + + "yteidl/eventb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -7532,13 +6691,13 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_event_CloudEventWorkflowExecution_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_event_CloudEventWorkflowExecution_descriptor, - new java.lang.String[] { "RawEvent", "OutputData", "OutputInterface", "InputData", "ArtifactIds", "ReferenceExecution", "Principal", "LaunchPlanId", }); + new java.lang.String[] { "RawEvent", "OutputInterface", "ArtifactIds", "ReferenceExecution", "Principal", "LaunchPlanId", }); internal_static_flyteidl_event_CloudEventNodeExecution_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_flyteidl_event_CloudEventNodeExecution_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_event_CloudEventNodeExecution_descriptor, - new java.lang.String[] { "RawEvent", "TaskExecId", "OutputData", "OutputInterface", "InputData", "ArtifactIds", "Principal", "LaunchPlanId", }); + new java.lang.String[] { "RawEvent", "TaskExecId", "OutputInterface", "ArtifactIds", "Principal", "LaunchPlanId", }); internal_static_flyteidl_event_CloudEventTaskExecution_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_flyteidl_event_CloudEventTaskExecution_fieldAccessorTable = new @@ -7550,7 +6709,7 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( internal_static_flyteidl_event_CloudEventExecutionStart_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_flyteidl_event_CloudEventExecutionStart_descriptor, - new java.lang.String[] { "ExecutionId", "LaunchPlanId", "WorkflowId", "ArtifactIds", "ArtifactKeys", "Principal", }); + new java.lang.String[] { "ExecutionId", "LaunchPlanId", "WorkflowId", "ArtifactIds", "ArtifactTrackers", "Principal", }); flyteidl.event.Event.getDescriptor(); flyteidl.core.Literals.getDescriptor(); flyteidl.core.Interface.getDescriptor(); diff --git a/flyteidl/gen/pb-java/flyteidl/service/Admin.java b/flyteidl/gen/pb-java/flyteidl/service/Admin.java index 80e46d6475..b98591274e 100644 --- a/flyteidl/gen/pb-java/flyteidl/service/Admin.java +++ b/flyteidl/gen/pb-java/flyteidl/service/Admin.java @@ -38,258 +38,397 @@ public static void registerAllExtensions( "admin/task_execution.proto\032\034flyteidl/adm" + "in/version.proto\032\033flyteidl/admin/common." + "proto\032\'flyteidl/admin/description_entity" + - ".proto2\204N\n\014AdminService\022m\n\nCreateTask\022!." + - "flyteidl.admin.TaskCreateRequest\032\".flyte" + - "idl.admin.TaskCreateResponse\"\030\202\323\344\223\002\022\"\r/a" + - "pi/v1/tasks:\001*\022\210\001\n\007GetTask\022 .flyteidl.ad" + - "min.ObjectGetRequest\032\024.flyteidl.admin.Ta" + - "sk\"E\202\323\344\223\002?\022=/api/v1/tasks/{id.project}/{" + - "id.domain}/{id.name}/{id.version}\022\227\001\n\013Li" + - "stTaskIds\0220.flyteidl.admin.NamedEntityId" + - "entifierListRequest\032).flyteidl.admin.Nam" + - "edEntityIdentifierList\"+\202\323\344\223\002%\022#/api/v1/" + - "task_ids/{project}/{domain}\022\256\001\n\tListTask" + + ".proto2\270y\n\014AdminService\022\216\001\n\nCreateTask\022!" + + ".flyteidl.admin.TaskCreateRequest\032\".flyt" + + "eidl.admin.TaskCreateResponse\"9\202\323\344\223\0023\"\r/" + + "api/v1/tasks:\001*Z\037\"\032/api/v1/tasks/org/{id" + + ".org}:\001*\022\330\001\n\007GetTask\022 .flyteidl.admin.Ob" + + "jectGetRequest\032\024.flyteidl.admin.Task\"\224\001\202" + + "\323\344\223\002\215\001\022=/api/v1/tasks/{id.project}/{id.d" + + "omain}/{id.name}/{id.version}ZL\022J/api/v1" + + "/tasks/org/{id.org}/{id.project}/{id.dom" + + "ain}/{id.name}/{id.version}\022\305\001\n\013ListTask" + + "Ids\0220.flyteidl.admin.NamedEntityIdentifi" + + "erListRequest\032).flyteidl.admin.NamedEnti" + + "tyIdentifierList\"Y\202\323\344\223\002S\022#/api/v1/task_i" + + "ds/{project}/{domain}Z,\022*/api/v1/tasks/o" + + "rg/{org}/{project}/{domain}\022\250\002\n\tListTask" + "s\022#.flyteidl.admin.ResourceListRequest\032\030" + - ".flyteidl.admin.TaskList\"b\202\323\344\223\002\\\0220/api/v" + - "1/tasks/{id.project}/{id.domain}/{id.nam" + - "e}Z(\022&/api/v1/tasks/{id.project}/{id.dom" + - "ain}\022}\n\016CreateWorkflow\022%.flyteidl.admin." + - "WorkflowCreateRequest\032&.flyteidl.admin.W" + - "orkflowCreateResponse\"\034\202\323\344\223\002\026\"\021/api/v1/w" + - "orkflows:\001*\022\224\001\n\013GetWorkflow\022 .flyteidl.a" + + ".flyteidl.admin.TaskList\"\333\001\202\323\344\223\002\324\001\0220/api" + + "/v1/tasks/{id.project}/{id.domain}/{id.n" + + "ame}Z?\022=/api/v1/tasks/org/{id.org}/{id.p" + + "roject}/{id.domain}/{id.name}Z(\022&/api/v1" + + "/tasks/{id.project}/{id.domain}Z5\0223/api/" + + "v1/tasks/org/{id.org}/{id.project}/{id.d" + + "omain}\022\242\001\n\016CreateWorkflow\022%.flyteidl.adm" + + "in.WorkflowCreateRequest\032&.flyteidl.admi" + + "n.WorkflowCreateResponse\"A\202\323\344\223\002;\"\021/api/v" + + "1/workflows:\001*Z#\"\036/api/v1/workflows/org/" + + "{id.org}:\001*\022\350\001\n\013GetWorkflow\022 .flyteidl.a" + "dmin.ObjectGetRequest\032\030.flyteidl.admin.W" + - "orkflow\"I\202\323\344\223\002C\022A/api/v1/workflows/{id.p" + - "roject}/{id.domain}/{id.name}/{id.versio" + - "n}\022\237\001\n\017ListWorkflowIds\0220.flyteidl.admin." + - "NamedEntityIdentifierListRequest\032).flyte" + - "idl.admin.NamedEntityIdentifierList\"/\202\323\344" + - "\223\002)\022\'/api/v1/workflow_ids/{project}/{dom" + - "ain}\022\276\001\n\rListWorkflows\022#.flyteidl.admin." + - "ResourceListRequest\032\034.flyteidl.admin.Wor" + - "kflowList\"j\202\323\344\223\002d\0224/api/v1/workflows/{id" + - ".project}/{id.domain}/{id.name}Z,\022*/api/" + - "v1/workflows/{id.project}/{id.domain}\022\206\001" + - "\n\020CreateLaunchPlan\022\'.flyteidl.admin.Laun" + - "chPlanCreateRequest\032(.flyteidl.admin.Lau" + - "nchPlanCreateResponse\"\037\202\323\344\223\002\031\"\024/api/v1/l" + - "aunch_plans:\001*\022\233\001\n\rGetLaunchPlan\022 .flyte" + - "idl.admin.ObjectGetRequest\032\032.flyteidl.ad" + - "min.LaunchPlan\"L\202\323\344\223\002F\022D/api/v1/launch_p" + - "lans/{id.project}/{id.domain}/{id.name}/" + - "{id.version}\022\242\001\n\023GetActiveLaunchPlan\022\'.f" + - "lyteidl.admin.ActiveLaunchPlanRequest\032\032." + - "flyteidl.admin.LaunchPlan\"F\202\323\344\223\002@\022>/api/" + - "v1/active_launch_plans/{id.project}/{id." + - "domain}/{id.name}\022\234\001\n\025ListActiveLaunchPl" + - "ans\022+.flyteidl.admin.ActiveLaunchPlanLis" + - "tRequest\032\036.flyteidl.admin.LaunchPlanList" + - "\"6\202\323\344\223\0020\022./api/v1/active_launch_plans/{p" + - "roject}/{domain}\022\244\001\n\021ListLaunchPlanIds\0220" + - ".flyteidl.admin.NamedEntityIdentifierLis" + - "tRequest\032).flyteidl.admin.NamedEntityIde" + - "ntifierList\"2\202\323\344\223\002,\022*/api/v1/launch_plan" + - "_ids/{project}/{domain}\022\310\001\n\017ListLaunchPl" + - "ans\022#.flyteidl.admin.ResourceListRequest" + - "\032\036.flyteidl.admin.LaunchPlanList\"p\202\323\344\223\002j" + - "\0227/api/v1/launch_plans/{id.project}/{id." + - "domain}/{id.name}Z/\022-/api/v1/launch_plan" + - "s/{id.project}/{id.domain}\022\266\001\n\020UpdateLau" + - "nchPlan\022\'.flyteidl.admin.LaunchPlanUpdat" + - "eRequest\032(.flyteidl.admin.LaunchPlanUpda" + - "teResponse\"O\202\323\344\223\002I\032D/api/v1/launch_plans" + - "/{id.project}/{id.domain}/{id.name}/{id." + - "version}:\001*\022\201\001\n\017CreateExecution\022&.flytei" + - "dl.admin.ExecutionCreateRequest\032\'.flytei" + - "dl.admin.ExecutionCreateResponse\"\035\202\323\344\223\002\027" + - "\"\022/api/v1/executions:\001*\022\216\001\n\021RelaunchExec" + - "ution\022(.flyteidl.admin.ExecutionRelaunch" + - "Request\032\'.flyteidl.admin.ExecutionCreate" + - "Response\"&\202\323\344\223\002 \"\033/api/v1/executions/rel" + - "aunch:\001*\022\213\001\n\020RecoverExecution\022\'.flyteidl" + - ".admin.ExecutionRecoverRequest\032\'.flyteid" + - "l.admin.ExecutionCreateResponse\"%\202\323\344\223\002\037\"" + - "\032/api/v1/executions/recover:\001*\022\225\001\n\014GetEx" + - "ecution\022+.flyteidl.admin.WorkflowExecuti" + - "onGetRequest\032\031.flyteidl.admin.Execution\"" + - "=\202\323\344\223\0027\0225/api/v1/executions/{id.project}" + - "/{id.domain}/{id.name}\022\244\001\n\017UpdateExecuti" + - "on\022&.flyteidl.admin.ExecutionUpdateReque" + - "st\032\'.flyteidl.admin.ExecutionUpdateRespo" + - "nse\"@\202\323\344\223\002:\0325/api/v1/executions/{id.proj" + - "ect}/{id.domain}/{id.name}:\001*\022\271\001\n\020GetExe" + - "cutionData\022/.flyteidl.admin.WorkflowExec" + - "utionGetDataRequest\0320.flyteidl.admin.Wor" + - "kflowExecutionGetDataResponse\"B\202\323\344\223\002<\022:/" + - "api/v1/data/executions/{id.project}/{id." + - "domain}/{id.name}\022\211\001\n\016ListExecutions\022#.f" + - "lyteidl.admin.ResourceListRequest\032\035.flyt" + - "eidl.admin.ExecutionList\"3\202\323\344\223\002-\022+/api/v" + - "1/executions/{id.project}/{id.domain}\022\255\001" + - "\n\022TerminateExecution\022).flyteidl.admin.Ex" + - "ecutionTerminateRequest\032*.flyteidl.admin" + - ".ExecutionTerminateResponse\"@\202\323\344\223\002:*5/ap" + - "i/v1/executions/{id.project}/{id.domain}" + - "/{id.name}:\001*\022\322\001\n\020GetNodeExecution\022\'.fly" + - "teidl.admin.NodeExecutionGetRequest\032\035.fl" + - "yteidl.admin.NodeExecution\"v\202\323\344\223\002p\022n/api" + - "/v1/node_executions/{id.execution_id.pro" + - "ject}/{id.execution_id.domain}/{id.execu" + - "tion_id.name}/{id.node_id}\022\336\001\n\022ListNodeE" + - "xecutions\022(.flyteidl.admin.NodeExecution" + - "ListRequest\032!.flyteidl.admin.NodeExecuti" + - "onList\"{\202\323\344\223\002u\022s/api/v1/node_executions/" + - "{workflow_execution_id.project}/{workflo" + - "w_execution_id.domain}/{workflow_executi" + - "on_id.name}\022\245\004\n\031ListNodeExecutionsForTas" + - "k\022/.flyteidl.admin.NodeExecutionForTaskL" + - "istRequest\032!.flyteidl.admin.NodeExecutio" + - "nList\"\263\003\202\323\344\223\002\254\003\022\251\003/api/v1/children/task_" + - "executions/{task_execution_id.node_execu" + - "tion_id.execution_id.project}/{task_exec" + - "ution_id.node_execution_id.execution_id." + - "domain}/{task_execution_id.node_executio" + - "n_id.execution_id.name}/{task_execution_" + - "id.node_execution_id.node_id}/{task_exec" + - "ution_id.task_id.project}/{task_executio" + - "n_id.task_id.domain}/{task_execution_id." + - "task_id.name}/{task_execution_id.task_id" + - ".version}/{task_execution_id.retry_attem" + - "pt}\022\356\001\n\024GetNodeExecutionData\022+.flyteidl." + - "admin.NodeExecutionGetDataRequest\032,.flyt" + - "eidl.admin.NodeExecutionGetDataResponse\"" + - "{\202\323\344\223\002u\022s/api/v1/data/node_executions/{i" + - "d.execution_id.project}/{id.execution_id" + - ".domain}/{id.execution_id.name}/{id.node" + - "_id}\022\177\n\017RegisterProject\022&.flyteidl.admin" + - ".ProjectRegisterRequest\032\'.flyteidl.admin" + - ".ProjectRegisterResponse\"\033\202\323\344\223\002\025\"\020/api/v" + - "1/projects:\001*\022q\n\rUpdateProject\022\027.flyteid" + - "l.admin.Project\032%.flyteidl.admin.Project" + - "UpdateResponse\" \202\323\344\223\002\032\032\025/api/v1/projects" + - "/{id}:\001*\022f\n\014ListProjects\022\".flyteidl.admi" + - "n.ProjectListRequest\032\030.flyteidl.admin.Pr" + - "ojects\"\030\202\323\344\223\002\022\022\020/api/v1/projects\022\231\001\n\023Cre" + - "ateWorkflowEvent\022-.flyteidl.admin.Workfl" + - "owExecutionEventRequest\032..flyteidl.admin" + - ".WorkflowExecutionEventResponse\"#\202\323\344\223\002\035\"" + - "\030/api/v1/events/workflows:\001*\022\211\001\n\017CreateN" + - "odeEvent\022).flyteidl.admin.NodeExecutionE" + - "ventRequest\032*.flyteidl.admin.NodeExecuti" + - "onEventResponse\"\037\202\323\344\223\002\031\"\024/api/v1/events/" + - "nodes:\001*\022\211\001\n\017CreateTaskEvent\022).flyteidl." + - "admin.TaskExecutionEventRequest\032*.flytei" + - "dl.admin.TaskExecutionEventResponse\"\037\202\323\344" + - "\223\002\031\"\024/api/v1/events/tasks:\001*\022\200\003\n\020GetTask" + - "Execution\022\'.flyteidl.admin.TaskExecution" + - "GetRequest\032\035.flyteidl.admin.TaskExecutio" + - "n\"\243\002\202\323\344\223\002\234\002\022\231\002/api/v1/task_executions/{i" + - "d.node_execution_id.execution_id.project" + - "}/{id.node_execution_id.execution_id.dom" + - "ain}/{id.node_execution_id.execution_id." + - "name}/{id.node_execution_id.node_id}/{id" + - ".task_id.project}/{id.task_id.domain}/{i" + - "d.task_id.name}/{id.task_id.version}/{id" + - ".retry_attempt}\022\230\002\n\022ListTaskExecutions\022(" + - ".flyteidl.admin.TaskExecutionListRequest" + - "\032!.flyteidl.admin.TaskExecutionList\"\264\001\202\323" + - "\344\223\002\255\001\022\252\001/api/v1/task_executions/{node_ex" + - "ecution_id.execution_id.project}/{node_e" + - "xecution_id.execution_id.domain}/{node_e" + - "xecution_id.execution_id.name}/{node_exe" + - "cution_id.node_id}\022\234\003\n\024GetTaskExecutionD" + - "ata\022+.flyteidl.admin.TaskExecutionGetDat" + - "aRequest\032,.flyteidl.admin.TaskExecutionG" + - "etDataResponse\"\250\002\202\323\344\223\002\241\002\022\236\002/api/v1/data/" + - "task_executions/{id.node_execution_id.ex" + - "ecution_id.project}/{id.node_execution_i" + - "d.execution_id.domain}/{id.node_executio" + - "n_id.execution_id.name}/{id.node_executi" + - "on_id.node_id}/{id.task_id.project}/{id." + - "task_id.domain}/{id.task_id.name}/{id.ta" + - "sk_id.version}/{id.retry_attempt}\022\343\001\n\035Up" + - "dateProjectDomainAttributes\0224.flyteidl.a" + - "dmin.ProjectDomainAttributesUpdateReques" + - "t\0325.flyteidl.admin.ProjectDomainAttribut" + - "esUpdateResponse\"U\202\323\344\223\002O\032J/api/v1/projec" + - "t_domain_attributes/{attributes.project}" + - "/{attributes.domain}:\001*\022\301\001\n\032GetProjectDo" + - "mainAttributes\0221.flyteidl.admin.ProjectD" + - "omainAttributesGetRequest\0322.flyteidl.adm" + - "in.ProjectDomainAttributesGetResponse\"<\202" + - "\323\344\223\0026\0224/api/v1/project_domain_attributes" + - "/{project}/{domain}\022\315\001\n\035DeleteProjectDom" + - "ainAttributes\0224.flyteidl.admin.ProjectDo" + - "mainAttributesDeleteRequest\0325.flyteidl.a" + - "dmin.ProjectDomainAttributesDeleteRespon" + - "se\"?\202\323\344\223\0029*4/api/v1/project_domain_attri" + - "butes/{project}/{domain}:\001*\022\266\001\n\027UpdatePr" + - "ojectAttributes\022..flyteidl.admin.Project" + - "AttributesUpdateRequest\032/.flyteidl.admin" + - ".ProjectAttributesUpdateResponse\":\202\323\344\223\0024" + - "\032//api/v1/project_attributes/{attributes" + - ".project}:\001*\022\237\001\n\024GetProjectAttributes\022+." + - "flyteidl.admin.ProjectAttributesGetReque" + - "st\032,.flyteidl.admin.ProjectAttributesGet" + - "Response\",\202\323\344\223\002&\022$/api/v1/project_attrib" + - "utes/{project}\022\253\001\n\027DeleteProjectAttribut" + - "es\022..flyteidl.admin.ProjectAttributesDel" + - "eteRequest\032/.flyteidl.admin.ProjectAttri" + - "butesDeleteResponse\"/\202\323\344\223\002)*$/api/v1/pro" + - "ject_attributes/{project}:\001*\022\344\001\n\030UpdateW" + - "orkflowAttributes\022/.flyteidl.admin.Workf" + - "lowAttributesUpdateRequest\0320.flyteidl.ad" + - "min.WorkflowAttributesUpdateResponse\"e\202\323" + - "\344\223\002_\032Z/api/v1/workflow_attributes/{attri" + - "butes.project}/{attributes.domain}/{attr" + - "ibutes.workflow}:\001*\022\267\001\n\025GetWorkflowAttri" + - "butes\022,.flyteidl.admin.WorkflowAttribute" + - "sGetRequest\032-.flyteidl.admin.WorkflowAtt" + - "ributesGetResponse\"A\202\323\344\223\002;\0229/api/v1/work" + - "flow_attributes/{project}/{domain}/{work" + - "flow}\022\303\001\n\030DeleteWorkflowAttributes\022/.fly" + - "teidl.admin.WorkflowAttributesDeleteRequ" + - "est\0320.flyteidl.admin.WorkflowAttributesD" + - "eleteResponse\"D\202\323\344\223\002>*9/api/v1/workflow_" + - "attributes/{project}/{domain}/{workflow}" + - ":\001*\022\240\001\n\027ListMatchableAttributes\022..flytei" + - "dl.admin.ListMatchableAttributesRequest\032" + - "/.flyteidl.admin.ListMatchableAttributes" + - "Response\"$\202\323\344\223\002\036\022\034/api/v1/matchable_attr" + - "ibutes\022\237\001\n\021ListNamedEntities\022&.flyteidl." + + "orkflow\"\234\001\202\323\344\223\002\225\001\022A/api/v1/workflows/{id" + + ".project}/{id.domain}/{id.name}/{id.vers" + + "ion}ZP\022N/api/v1/workflows/org/{id.org}/{" + + "id.project}/{id.domain}/{id.name}/{id.ve" + + "rsion}\022\321\001\n\017ListWorkflowIds\0220.flyteidl.ad" + + "min.NamedEntityIdentifierListRequest\032).f" + + "lyteidl.admin.NamedEntityIdentifierList\"" + + "a\202\323\344\223\002[\022\'/api/v1/workflow_ids/{project}/" + + "{domain}Z0\022./api/v1/workflows/org/{org}/" + + "{project}/{domain}\022\300\002\n\rListWorkflows\022#.f" + + "lyteidl.admin.ResourceListRequest\032\034.flyt" + + "eidl.admin.WorkflowList\"\353\001\202\323\344\223\002\344\001\0224/api/" + + "v1/workflows/{id.project}/{id.domain}/{i" + + "d.name}ZC\022A/api/v1/workflows/org/{id.org" + + "}/{id.project}/{id.domain}/{id.name}Z,\022*" + + "/api/v1/workflows/{id.project}/{id.domai" + + "n}Z9\0227/api/v1/workflows/org/{id.org}/{id" + + ".project}/{id.domain}\022\256\001\n\020CreateLaunchPl" + + "an\022\'.flyteidl.admin.LaunchPlanCreateRequ" + + "est\032(.flyteidl.admin.LaunchPlanCreateRes" + + "ponse\"G\202\323\344\223\002A\"\024/api/v1/launch_plans:\001*Z&" + + "\"!/api/v1/launch_plans/org/{id.org}:\001*\022\362" + + "\001\n\rGetLaunchPlan\022 .flyteidl.admin.Object" + + "GetRequest\032\032.flyteidl.admin.LaunchPlan\"\242" + + "\001\202\323\344\223\002\233\001\022D/api/v1/launch_plans/{id.proje" + + "ct}/{id.domain}/{id.name}/{id.version}ZS" + + "\022Q/api/v1/launch_plans/org/{id.org}/{id." + + "project}/{id.domain}/{id.name}/{id.versi" + + "on}\022\363\001\n\023GetActiveLaunchPlan\022\'.flyteidl.a" + + "dmin.ActiveLaunchPlanRequest\032\032.flyteidl." + + "admin.LaunchPlan\"\226\001\202\323\344\223\002\217\001\022>/api/v1/acti" + + "ve_launch_plans/{id.project}/{id.domain}" + + "/{id.name}ZM\022K/api/v1/active_launch_plan" + + "s/org/{id.org}/{id.project}/{id.domain}/" + + "{id.name}\022\330\001\n\025ListActiveLaunchPlans\022+.fl" + + "yteidl.admin.ActiveLaunchPlanListRequest" + + "\032\036.flyteidl.admin.LaunchPlanList\"r\202\323\344\223\002l" + + "\022./api/v1/active_launch_plans/{project}/" + + "{domain}Z:\0228/api/v1/active_launch_plans/" + + "org/{org}/{project}/{domain}\022\334\001\n\021ListLau" + + "nchPlanIds\0220.flyteidl.admin.NamedEntityI" + + "dentifierListRequest\032).flyteidl.admin.Na" + + "medEntityIdentifierList\"j\202\323\344\223\002d\022*/api/v1" + + "/launch_plan_ids/{project}/{domain}Z6\0224/" + + "api/v1/launch_plan_ids/org/{org}/{projec" + + "t}/{domain}\022\320\002\n\017ListLaunchPlans\022#.flytei" + + "dl.admin.ResourceListRequest\032\036.flyteidl." + + "admin.LaunchPlanList\"\367\001\202\323\344\223\002\360\001\0227/api/v1/" + + "launch_plans/{id.project}/{id.domain}/{i" + + "d.name}ZF\022D/api/v1/launch_plans/org/{id." + + "org}/{id.project}/{id.domain}/{id.name}Z" + + "/\022-/api/v1/launch_plans/{id.project}/{id" + + ".domain}Z<\022:/api/v1/launch_plans/org/{id" + + ".org}/{id.project}/{id.domain}\022\215\002\n\020Updat" + + "eLaunchPlan\022\'.flyteidl.admin.LaunchPlanU" + + "pdateRequest\032(.flyteidl.admin.LaunchPlan" + + "UpdateResponse\"\245\001\202\323\344\223\002\236\001\032D/api/v1/launch" + + "_plans/{id.project}/{id.domain}/{id.name" + + "}/{id.version}:\001*ZS\032Q/api/v1/launch_plan" + + "s/org/{id.org}/{id.project}/{id.domain}/" + + "{id.name}/{id.version}\022\244\001\n\017CreateExecuti" + + "on\022&.flyteidl.admin.ExecutionCreateReque" + + "st\032\'.flyteidl.admin.ExecutionCreateRespo" + + "nse\"@\202\323\344\223\002:\"\022/api/v1/executions:\001*Z!\032\034/a" + + "pi/v1/executions/org/{org}:\001*\022\275\001\n\021Relaun" + + "chExecution\022(.flyteidl.admin.ExecutionRe" + + "launchRequest\032\'.flyteidl.admin.Execution" + + "CreateResponse\"U\202\323\344\223\002O\"\033/api/v1/executio" + + "ns/relaunch:\001*Z-\"(/api/v1/executions/org" + + "/{id.org}/relaunch:\001*\022\271\001\n\020RecoverExecuti" + + "on\022\'.flyteidl.admin.ExecutionRecoverRequ" + + "est\032\'.flyteidl.admin.ExecutionCreateResp" + + "onse\"S\202\323\344\223\002M\"\032/api/v1/executions/recover" + + ":\001*Z,\"\'/api/v1/executions/org/{id.org}/r" + + "ecover:\001*\022\334\001\n\014GetExecution\022+.flyteidl.ad" + + "min.WorkflowExecutionGetRequest\032\031.flytei" + + "dl.admin.Execution\"\203\001\202\323\344\223\002}\0225/api/v1/exe" + + "cutions/{id.project}/{id.domain}/{id.nam" + + "e}ZD\022B/api/v1/executions/org/{id.org}/{i" + + "d.project}/{id.domain}/{id.name}\022\357\001\n\017Upd" + + "ateExecution\022&.flyteidl.admin.ExecutionU" + + "pdateRequest\032\'.flyteidl.admin.ExecutionU" + + "pdateResponse\"\212\001\202\323\344\223\002\203\001\0325/api/v1/executi" + + "ons/{id.project}/{id.domain}/{id.name}:\001" + + "*ZG\032B/api/v1/executions/org/{id.org}/{id" + + ".project}/{id.domain}/{id.name}:\001*\022\206\002\n\020G" + + "etExecutionData\022/.flyteidl.admin.Workflo" + + "wExecutionGetDataRequest\0320.flyteidl.admi" + + "n.WorkflowExecutionGetDataResponse\"\216\001\202\323\344" + + "\223\002\207\001\022:/api/v1/data/executions/{id.projec" + + "t}/{id.domain}/{id.name}ZI\022G/api/v1/data" + + "/executions/org/{id.org}/{id.project}/{i" + + "d.domain}/{id.name}\022\305\001\n\016ListExecutions\022#" + + ".flyteidl.admin.ResourceListRequest\032\035.fl" + + "yteidl.admin.ExecutionList\"o\202\323\344\223\002i\022+/api" + + "/v1/executions/{id.project}/{id.domain}Z" + + ":\0228/api/v1/executions/org/{id.org}/{id.p" + + "roject}/{id.domain}\022\370\001\n\022TerminateExecuti" + + "on\022).flyteidl.admin.ExecutionTerminateRe" + + "quest\032*.flyteidl.admin.ExecutionTerminat" + + "eResponse\"\212\001\202\323\344\223\002\203\001*5/api/v1/executions/" + + "{id.project}/{id.domain}/{id.name}:\001*ZG*" + + "B/api/v1/executions/org/{id.org}/{id.pro" + + "ject}/{id.domain}/{id.name}:\001*\022\342\002\n\020GetNo" + + "deExecution\022\'.flyteidl.admin.NodeExecuti" + + "onGetRequest\032\035.flyteidl.admin.NodeExecut" + + "ion\"\205\002\202\323\344\223\002\376\001\022n/api/v1/node_executions/{" + + "id.execution_id.project}/{id.execution_i" + + "d.domain}/{id.execution_id.name}/{id.nod" + + "e_id}Z\213\001\022\210\001/api/v1/node_executions/org/{" + + "id.execution_id.org}/{id.execution_id.pr" + + "oject}/{id.execution_id.domain}/{id.exec" + + "ution_id.name}/{id.node_id}\022\236\003\n\026GetDynam" + + "icNodeWorkflow\022-.flyteidl.admin.GetDynam" + + "icNodeWorkflowRequest\032+.flyteidl.admin.D" + + "ynamicNodeWorkflowResponse\"\247\002\202\323\344\223\002\240\002\022\177/a" + + "pi/v1/node_executions/{id.execution_id.p" + + "roject}/{id.execution_id.domain}/{id.exe" + + "cution_id.name}/{id.node_id}/dynamic_wor" + + "kflowZ\234\001\022\231\001/api/v1/node_executions/org/{" + + "id.execution_id.org}/{id.execution_id.pr" + + "oject}/{id.execution_id.domain}/{id.exec" + + "ution_id.name}/{id.node_id}/dynamic_work" + + "flow\022\371\002\n\022ListNodeExecutions\022(.flyteidl.a" + + "dmin.NodeExecutionListRequest\032!.flyteidl" + + ".admin.NodeExecutionList\"\225\002\202\323\344\223\002\216\002\022s/api" + + "/v1/node_executions/{workflow_execution_" + + "id.project}/{workflow_execution_id.domai" + + "n}/{workflow_execution_id.name}Z\226\001\022\223\001/ap" + + "i/v1/node_executions/org/{workflow_execu" + + "tion_id.org}/{workflow_execution_id.proj" + + "ect}/{workflow_execution_id.domain}/{wor" + + "kflow_execution_id.name}\022\217\010\n\031ListNodeExe" + + "cutionsForTask\022/.flyteidl.admin.NodeExec" + + "utionForTaskListRequest\032!.flyteidl.admin" + + ".NodeExecutionList\"\235\007\202\323\344\223\002\226\007\022\251\003/api/v1/c" + + "hildren/task_executions/{task_execution_" + + "id.node_execution_id.execution_id.projec" + + "t}/{task_execution_id.node_execution_id." + + "execution_id.domain}/{task_execution_id." + + "node_execution_id.execution_id.name}/{ta" + + "sk_execution_id.node_execution_id.node_i" + + "d}/{task_execution_id.task_id.project}/{" + + "task_execution_id.task_id.domain}/{task_" + + "execution_id.task_id.name}/{task_executi" + + "on_id.task_id.version}/{task_execution_i" + + "d.retry_attempt}Z\347\003\022\344\003/api/v1/children/o" + + "rg/{task_execution_id.node_execution_id." + + "execution_id.org}/task_executions/{task_" + + "execution_id.node_execution_id.execution" + + "_id.project}/{task_execution_id.node_exe" + + "cution_id.execution_id.domain}/{task_exe" + + "cution_id.node_execution_id.execution_id" + + ".name}/{task_execution_id.node_execution" + + "_id.node_id}/{task_execution_id.task_id." + + "project}/{task_execution_id.task_id.doma" + + "in}/{task_execution_id.task_id.name}/{ta" + + "sk_execution_id.task_id.version}/{task_e" + + "xecution_id.retry_attempt}\022\203\003\n\024GetNodeEx" + + "ecutionData\022+.flyteidl.admin.NodeExecuti" + + "onGetDataRequest\032,.flyteidl.admin.NodeEx" + + "ecutionGetDataResponse\"\217\002\202\323\344\223\002\210\002\022s/api/v" + + "1/data/node_executions/{id.execution_id." + + "project}/{id.execution_id.domain}/{id.ex" + + "ecution_id.name}/{id.node_id}Z\220\001\022\215\001/api/" + + "v1/data/org/{id.execution_id.org}/node_e" + + "xecutions/{id.execution_id.project}/{id." + + "execution_id.domain}/{id.execution_id.na" + + "me}/{id.node_id}\022\250\001\n\017RegisterProject\022&.f" + + "lyteidl.admin.ProjectRegisterRequest\032\'.f" + + "lyteidl.admin.ProjectRegisterResponse\"D\202" + + "\323\344\223\002>\"\020/api/v1/projects:\001*Z\'\"\"/api/v1/pr" + + "ojects/org/{project.org}:\001*\022\227\001\n\rUpdatePr" + + "oject\022\027.flyteidl.admin.Project\032%.flyteid" + + "l.admin.ProjectUpdateResponse\"F\202\323\344\223\002@\032\025/" + + "api/v1/projects/{id}:\001*Z$\032\037/api/v1/proje" + + "cts/org/{org}/{id}:\001*\022\204\001\n\014ListProjects\022\"" + + ".flyteidl.admin.ProjectListRequest\032\030.fly" + + "teidl.admin.Projects\"6\202\323\344\223\0020\022\020/api/v1/pr" + + "ojectsZ\034\022\032/api/v1/projects/org/{org}\022\325\001\n" + + "\023CreateWorkflowEvent\022-.flyteidl.admin.Wo" + + "rkflowExecutionEventRequest\032..flyteidl.a" + + "dmin.WorkflowExecutionEventResponse\"_\202\323\344" + + "\223\002Y\"\030/api/v1/events/workflows:\001*Z:\"5/api" + + "/v1/events/org/{event.execution_id.org}/" + + "workflows:\001*\022\304\001\n\017CreateNodeEvent\022).flyte" + + "idl.admin.NodeExecutionEventRequest\032*.fl" + + "yteidl.admin.NodeExecutionEventResponse\"" + + "Z\202\323\344\223\002T\"\024/api/v1/events/nodes:\001*Z9\"4/api" + + "/v1/events/org/{event.id.execution_id.or" + + "g}/nodes:\001*\022\332\001\n\017CreateTaskEvent\022).flytei" + + "dl.admin.TaskExecutionEventRequest\032*.fly" + + "teidl.admin.TaskExecutionEventResponse\"p" + + "\202\323\344\223\002j\"\024/api/v1/events/tasks:\001*ZO\"J/api/" + + "v1/events/org/{event.parent_node_executi" + + "on_id.execution_id.org}/tasks:\001*\022\313\005\n\020Get" + + "TaskExecution\022\'.flyteidl.admin.TaskExecu" + + "tionGetRequest\032\035.flyteidl.admin.TaskExec" + + "ution\"\356\004\202\323\344\223\002\347\004\022\231\002/api/v1/task_execution" + + "s/{id.node_execution_id.execution_id.pro" + + "ject}/{id.node_execution_id.execution_id" + + ".domain}/{id.node_execution_id.execution" + + "_id.name}/{id.node_execution_id.node_id}" + + "/{id.task_id.project}/{id.task_id.domain" + + "}/{id.task_id.name}/{id.task_id.version}" + + "/{id.retry_attempt}Z\310\002\022\305\002/api/v1/task_ex" + + "ecutions/org/{id.node_execution_id.execu" + + "tion_id.org}/{id.node_execution_id.execu" + + "tion_id.project}/{id.node_execution_id.e" + + "xecution_id.domain}/{id.node_execution_i" + + "d.execution_id.name}/{id.node_execution_" + + "id.node_id}/{id.task_id.project}/{id.tas" + + "k_id.domain}/{id.task_id.name}/{id.task_" + + "id.version}/{id.retry_attempt}\022\361\003\n\022ListT" + + "askExecutions\022(.flyteidl.admin.TaskExecu" + + "tionListRequest\032!.flyteidl.admin.TaskExe" + + "cutionList\"\215\003\202\323\344\223\002\206\003\022\252\001/api/v1/task_exec" + + "utions/{node_execution_id.execution_id.p" + + "roject}/{node_execution_id.execution_id." + + "domain}/{node_execution_id.execution_id." + + "name}/{node_execution_id.node_id}Z\326\001\022\323\001/" + + "api/v1/task_executions/org/{node_executi" + + "on_id.execution_id.org}/{node_execution_" + + "id.execution_id.project}/{node_execution" + + "_id.execution_id.domain}/{node_execution" + + "_id.execution_id.name}/{node_execution_i" + + "d.node_id}\022\354\005\n\024GetTaskExecutionData\022+.fl" + + "yteidl.admin.TaskExecutionGetDataRequest" + + "\032,.flyteidl.admin.TaskExecutionGetDataRe" + + "sponse\"\370\004\202\323\344\223\002\361\004\022\236\002/api/v1/data/task_exe" + + "cutions/{id.node_execution_id.execution_" + + "id.project}/{id.node_execution_id.execut" + + "ion_id.domain}/{id.node_execution_id.exe" + + "cution_id.name}/{id.node_execution_id.no" + + "de_id}/{id.task_id.project}/{id.task_id." + + "domain}/{id.task_id.name}/{id.task_id.ve" + + "rsion}/{id.retry_attempt}Z\315\002\022\312\002/api/v1/d" + + "ata/org/{id.node_execution_id.execution_" + + "id.org}/task_executions/{id.node_executi" + + "on_id.execution_id.project}/{id.node_exe" + + "cution_id.execution_id.domain}/{id.node_" + + "execution_id.execution_id.name}/{id.node" + + "_execution_id.node_id}/{id.task_id.proje" + + "ct}/{id.task_id.domain}/{id.task_id.name" + + "}/{id.task_id.version}/{id.retry_attempt" + + "}\022\313\002\n\035UpdateProjectDomainAttributes\0224.fl" + + "yteidl.admin.ProjectDomainAttributesUpda" + + "teRequest\0325.flyteidl.admin.ProjectDomain" + + "AttributesUpdateResponse\"\274\001\202\323\344\223\002\265\001\032J/api" + + "/v1/project_domain_attributes/{attribute" + + "s.project}/{attributes.domain}:\001*Zd\032_/ap" + + "i/v1/project_domain_attributes/org/{attr" + + "ibutes.org}/{attributes.project}/{attrib" + + "utes.domain}:\001*\022\203\002\n\032GetProjectDomainAttr" + + "ibutes\0221.flyteidl.admin.ProjectDomainAtt" + + "ributesGetRequest\0322.flyteidl.admin.Proje" + + "ctDomainAttributesGetResponse\"~\202\323\344\223\002x\0224/" + + "api/v1/project_domain_attributes/{projec" + + "t}/{domain}Z@\022>/api/v1/project_domain_at" + + "tributes/org/{org}/{project}/{domain}\022\223\002" + + "\n\035DeleteProjectDomainAttributes\0224.flytei" + + "dl.admin.ProjectDomainAttributesDeleteRe" + + "quest\0325.flyteidl.admin.ProjectDomainAttr" + + "ibutesDeleteResponse\"\204\001\202\323\344\223\002~*4/api/v1/p" + + "roject_domain_attributes/{project}/{doma" + + "in}:\001*ZC*>/api/v1/project_domain_attribu" + + "tes/org/{org}/{project}/{domain}:\001*\022\212\002\n\027" + + "UpdateProjectAttributes\022..flyteidl.admin" + + ".ProjectAttributesUpdateRequest\032/.flytei" + + "dl.admin.ProjectAttributesUpdateResponse" + + "\"\215\001\202\323\344\223\002\206\001\032//api/v1/project_attributes/{" + + "attributes.project}:\001*ZP\032K/api/v1/projec" + + "t_domain_attributes/org/{attributes.org}" + + "/{attributes.project}:\001*\022\330\001\n\024GetProjectA" + + "ttributes\022+.flyteidl.admin.ProjectAttrib" + + "utesGetRequest\032,.flyteidl.admin.ProjectA" + + "ttributesGetResponse\"e\202\323\344\223\002_\022$/api/v1/pr" + + "oject_attributes/{project}Z7\0225/api/v1/pr" + + "oject_domain_attributes/org/{org}/{proje" + + "ct}\022\347\001\n\027DeleteProjectAttributes\022..flytei" + + "dl.admin.ProjectAttributesDeleteRequest\032" + + "/.flyteidl.admin.ProjectAttributesDelete" + + "Response\"k\202\323\344\223\002e*$/api/v1/project_attrib" + + "utes/{project}:\001*Z:*5/api/v1/project_dom" + + "ain_attributes/org/{org}/{project}:\001*\022\334\002" + + "\n\030UpdateWorkflowAttributes\022/.flyteidl.ad" + + "min.WorkflowAttributesUpdateRequest\0320.fl" + + "yteidl.admin.WorkflowAttributesUpdateRes" + + "ponse\"\334\001\202\323\344\223\002\325\001\032Z/api/v1/workflow_attrib" + + "utes/{attributes.project}/{attributes.do" + + "main}/{attributes.workflow}:\001*Zt\032o/api/v" + + "1/workflow_attributes/org/{attributes.or" + + "g}/{attributes.project}/{attributes.doma" + + "in}/{attributes.workflow}:\001*\022\200\002\n\025GetWork" + + "flowAttributes\022,.flyteidl.admin.Workflow" + + "AttributesGetRequest\032-.flyteidl.admin.Wo" + + "rkflowAttributesGetResponse\"\211\001\202\323\344\223\002\202\001\0229/" + + "api/v1/workflow_attributes/{project}/{do" + + "main}/{workflow}ZE\022C/api/v1/workflow_att" + + "ributes/org/{org}/{project}/{domain}/{wo" + + "rkflow}\022\217\002\n\030DeleteWorkflowAttributes\022/.f" + + "lyteidl.admin.WorkflowAttributesDeleteRe" + + "quest\0320.flyteidl.admin.WorkflowAttribute" + + "sDeleteResponse\"\217\001\202\323\344\223\002\210\001*9/api/v1/workf" + + "low_attributes/{project}/{domain}/{workf" + + "low}:\001*ZH*C/api/v1/workflow_attributes/o" + + "rg/{org}/{project}/{domain}/{workflow}:\001" + + "*\022\312\001\n\027ListMatchableAttributes\022..flyteidl" + + ".admin.ListMatchableAttributesRequest\032/." + + "flyteidl.admin.ListMatchableAttributesRe" + + "sponse\"N\202\323\344\223\002H\022\034/api/v1/matchable_attrib" + + "utesZ(\022&/api/v1/matchable_attributes/org" + + "/{org}\022\350\001\n\021ListNamedEntities\022&.flyteidl." + "admin.NamedEntityListRequest\032\037.flyteidl." + - "admin.NamedEntityList\"A\202\323\344\223\002;\0229/api/v1/n" + - "amed_entities/{resource_type}/{project}/" + - "{domain}\022\247\001\n\016GetNamedEntity\022%.flyteidl.a" + - "dmin.NamedEntityGetRequest\032\033.flyteidl.ad" + - "min.NamedEntity\"Q\202\323\344\223\002K\022I/api/v1/named_e" + + "admin.NamedEntityList\"\211\001\202\323\344\223\002\202\001\0229/api/v1" + + "/named_entities/{resource_type}/{project" + + "}/{domain}ZE\022C/api/v1/named_entities/org" + + "/{org}/{resource_type}/{project}/{domain" + + "}\022\203\002\n\016GetNamedEntity\022%.flyteidl.admin.Na" + + "medEntityGetRequest\032\033.flyteidl.admin.Nam" + + "edEntity\"\254\001\202\323\344\223\002\245\001\022I/api/v1/named_entiti" + + "es/{resource_type}/{id.project}/{id.doma" + + "in}/{id.name}ZX\022V/api/v1/named_entities/" + + "org/{id.org}/{resource_type}/{id.project" + + "}/{id.domain}/{id.name}\022\235\002\n\021UpdateNamedE" + + "ntity\022(.flyteidl.admin.NamedEntityUpdate" + + "Request\032).flyteidl.admin.NamedEntityUpda" + + "teResponse\"\262\001\202\323\344\223\002\253\001\032I/api/v1/named_enti" + + "ties/{resource_type}/{id.project}/{id.do" + + "main}/{id.name}:\001*Z[\032V/api/v1/named_enti" + + "ties/org/{id.org}/{resource_type}/{id.pr" + + "oject}/{id.domain}/{id.name}:\001*\022l\n\nGetVe" + + "rsion\022!.flyteidl.admin.GetVersionRequest" + + "\032\".flyteidl.admin.GetVersionResponse\"\027\202\323" + + "\344\223\002\021\022\017/api/v1/version\022\266\002\n\024GetDescription" + + "Entity\022 .flyteidl.admin.ObjectGetRequest" + + "\032!.flyteidl.admin.DescriptionEntity\"\330\001\202\323" + + "\344\223\002\321\001\022_/api/v1/description_entities/{id." + + "resource_type}/{id.project}/{id.domain}/" + + "{id.name}/{id.version}Zn\022l/api/v1/descri" + + "ption_entities/org/{id.org}/{id.resource" + + "_type}/{id.project}/{id.domain}/{id.name" + + "}/{id.version}\022\310\003\n\027ListDescriptionEntiti" + + "es\022,.flyteidl.admin.DescriptionEntityLis" + + "tRequest\032%.flyteidl.admin.DescriptionEnt" + + "ityList\"\327\002\202\323\344\223\002\320\002\022O/api/v1/description_e" + "ntities/{resource_type}/{id.project}/{id" + - ".domain}/{id.name}\022\276\001\n\021UpdateNamedEntity" + - "\022(.flyteidl.admin.NamedEntityUpdateReque" + - "st\032).flyteidl.admin.NamedEntityUpdateRes" + - "ponse\"T\202\323\344\223\002N\032I/api/v1/named_entities/{r" + - "esource_type}/{id.project}/{id.domain}/{" + - "id.name}:\001*\022l\n\nGetVersion\022!.flyteidl.adm" + - "in.GetVersionRequest\032\".flyteidl.admin.Ge" + - "tVersionResponse\"\027\202\323\344\223\002\021\022\017/api/v1/versio" + - "n\022\304\001\n\024GetDescriptionEntity\022 .flyteidl.ad" + - "min.ObjectGetRequest\032!.flyteidl.admin.De" + - "scriptionEntity\"g\202\323\344\223\002a\022_/api/v1/descrip" + - "tion_entities/{id.resource_type}/{id.pro" + - "ject}/{id.domain}/{id.name}/{id.version}" + - "\022\222\002\n\027ListDescriptionEntities\022,.flyteidl." + - "admin.DescriptionEntityListRequest\032%.fly" + - "teidl.admin.DescriptionEntityList\"\241\001\202\323\344\223" + - "\002\232\001\022O/api/v1/description_entities/{resou" + - "rce_type}/{id.project}/{id.domain}/{id.n" + - "ame}ZG\022E/api/v1/description_entities/{re" + - "source_type}/{id.project}/{id.domain}\022\305\001" + - "\n\023GetExecutionMetrics\0222.flyteidl.admin.W" + - "orkflowExecutionGetMetricsRequest\0323.flyt" + - "eidl.admin.WorkflowExecutionGetMetricsRe" + - "sponse\"E\202\323\344\223\002?\022=/api/v1/metrics/executio" + - "ns/{id.project}/{id.domain}/{id.name}B?Z" + - "=github.com/flyteorg/flyte/flyteidl/gen/" + - "pb-go/flyteidl/serviceb\006proto3" + ".domain}/{id.name}Z^\022\\/api/v1/descriptio" + + "n_entities/org/{id.org}/{resource_type}/" + + "{id.project}/{id.domain}/{id.name}ZG\022E/a" + + "pi/v1/description_entities/{resource_typ" + + "e}/{id.project}/{id.domain}ZT\022R/api/v1/d" + + "escription_entities/org/{id.org}/{resour" + + "ce_type}/{id.project}/{id.domain}\022\225\002\n\023Ge" + + "tExecutionMetrics\0222.flyteidl.admin.Workf" + + "lowExecutionGetMetricsRequest\0323.flyteidl" + + ".admin.WorkflowExecutionGetMetricsRespon", + "se\"\224\001\202\323\344\223\002\215\001\022=/api/v1/metrics/executions" + + "/{id.project}/{id.domain}/{id.name}ZL\022J/" + + "api/v1/metrics/executions/org/{id.org}/{" + + "id.project}/{id.domain}/{id.name}B?Z=git" + + "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" + + "o/flyteidl/serviceb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { diff --git a/flyteidl/gen/pb-java/flyteidl/service/Agent.java b/flyteidl/gen/pb-java/flyteidl/service/Agent.java index a4d582fab0..7242f5ba36 100644 --- a/flyteidl/gen/pb-java/flyteidl/service/Agent.java +++ b/flyteidl/gen/pb-java/flyteidl/service/Agent.java @@ -25,22 +25,27 @@ public static void registerAllExtensions( java.lang.String[] descriptorData = { "\n\034flyteidl/service/agent.proto\022\020flyteidl" + ".service\032\034google/api/annotations.proto\032\032" + - "flyteidl/admin/agent.proto2\217\002\n\021AsyncAgen" + + "flyteidl/admin/agent.proto2\314\003\n\021AsyncAgen" + "tService\022U\n\nCreateTask\022!.flyteidl.admin." + "CreateTaskRequest\032\".flyteidl.admin.Creat" + "eTaskResponse\"\000\022L\n\007GetTask\022\036.flyteidl.ad" + "min.GetTaskRequest\032\037.flyteidl.admin.GetT" + "askResponse\"\000\022U\n\nDeleteTask\022!.flyteidl.a" + "dmin.DeleteTaskRequest\032\".flyteidl.admin." + - "DeleteTaskResponse\"\0002\360\001\n\024AgentMetadataSe" + - "rvice\022k\n\010GetAgent\022\037.flyteidl.admin.GetAg" + - "entRequest\032 .flyteidl.admin.GetAgentResp" + - "onse\"\034\202\323\344\223\002\026\022\024/api/v1/agent/{name}\022k\n\nLi" + - "stAgents\022!.flyteidl.admin.ListAgentsRequ" + - "est\032\".flyteidl.admin.ListAgentsResponse\"" + - "\026\202\323\344\223\002\020\022\016/api/v1/agentsB?Z=github.com/fl" + - "yteorg/flyte/flyteidl/gen/pb-go/flyteidl" + - "/serviceb\006proto3" + "DeleteTaskResponse\"\000\022a\n\016GetTaskMetrics\022%" + + ".flyteidl.admin.GetTaskMetricsRequest\032&." + + "flyteidl.admin.GetTaskMetricsResponse\"\000\022" + + "X\n\013GetTaskLogs\022\".flyteidl.admin.GetTaskL" + + "ogsRequest\032#.flyteidl.admin.GetTaskLogsR" + + "esponse\"\0002\360\001\n\024AgentMetadataService\022k\n\010Ge" + + "tAgent\022\037.flyteidl.admin.GetAgentRequest\032" + + " .flyteidl.admin.GetAgentResponse\"\034\202\323\344\223\002" + + "\026\022\024/api/v1/agent/{name}\022k\n\nListAgents\022!." + + "flyteidl.admin.ListAgentsRequest\032\".flyte" + + "idl.admin.ListAgentsResponse\"\026\202\323\344\223\002\020\022\016/a" + + "pi/v1/agentsB?Z=github.com/flyteorg/flyt" + + "e/flyteidl/gen/pb-go/flyteidl/serviceb\006p" + + "roto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { diff --git a/flyteidl/gen/pb-java/flyteidl/service/ExternalPluginServiceOuterClass.java b/flyteidl/gen/pb-java/flyteidl/service/ExternalPluginServiceOuterClass.java index 2336dd1564..7fe6508f1a 100644 --- a/flyteidl/gen/pb-java/flyteidl/service/ExternalPluginServiceOuterClass.java +++ b/flyteidl/gen/pb-java/flyteidl/service/ExternalPluginServiceOuterClass.java @@ -4658,30 +4658,29 @@ public flyteidl.service.ExternalPluginServiceOuterClass.TaskDeleteResponse getDe "\n.flyteidl/service/external_plugin_servi" + "ce.proto\022\020flyteidl.service\032\034flyteidl/cor" + "e/literals.proto\032\031flyteidl/core/tasks.pr" + - "oto\032\035flyteidl/core/interface.proto\"\210\001\n\021T" + - "askCreateRequest\022)\n\006inputs\030\001 \001(\0132\031.flyte" + - "idl.core.LiteralMap\022-\n\010template\030\002 \001(\0132\033." + - "flyteidl.core.TaskTemplate\022\025\n\routput_pre" + - "fix\030\003 \001(\t:\002\030\001\"(\n\022TaskCreateResponse\022\016\n\006j" + - "ob_id\030\001 \001(\t:\002\030\001\"7\n\016TaskGetRequest\022\021\n\ttas" + - "k_type\030\001 \001(\t\022\016\n\006job_id\030\002 \001(\t:\002\030\001\"i\n\017Task" + - "GetResponse\022&\n\005state\030\001 \001(\0162\027.flyteidl.se" + - "rvice.State\022*\n\007outputs\030\002 \001(\0132\031.flyteidl." + - "core.LiteralMap:\002\030\001\":\n\021TaskDeleteRequest" + - "\022\021\n\ttask_type\030\001 \001(\t\022\016\n\006job_id\030\002 \001(\t:\002\030\001\"" + - "\030\n\022TaskDeleteResponse:\002\030\001*b\n\005State\022\025\n\021RE" + - "TRYABLE_FAILURE\020\000\022\025\n\021PERMANENT_FAILURE\020\001" + - "\022\013\n\007PENDING\020\002\022\013\n\007RUNNING\020\003\022\r\n\tSUCCEEDED\020" + - "\004\032\002\030\0012\250\002\n\025ExternalPluginService\022\\\n\nCreat" + - "eTask\022#.flyteidl.service.TaskCreateReque" + - "st\032$.flyteidl.service.TaskCreateResponse" + - "\"\003\210\002\001\022S\n\007GetTask\022 .flyteidl.service.Task" + - "GetRequest\032!.flyteidl.service.TaskGetRes" + - "ponse\"\003\210\002\001\022\\\n\nDeleteTask\022#.flyteidl.serv" + - "ice.TaskDeleteRequest\032$.flyteidl.service" + - ".TaskDeleteResponse\"\003\210\002\001B?Z=github.com/f" + - "lyteorg/flyte/flyteidl/gen/pb-go/flyteid" + - "l/serviceb\006proto3" + "oto\"\210\001\n\021TaskCreateRequest\022)\n\006inputs\030\001 \001(" + + "\0132\031.flyteidl.core.LiteralMap\022-\n\010template" + + "\030\002 \001(\0132\033.flyteidl.core.TaskTemplate\022\025\n\ro" + + "utput_prefix\030\003 \001(\t:\002\030\001\"(\n\022TaskCreateResp" + + "onse\022\016\n\006job_id\030\001 \001(\t:\002\030\001\"7\n\016TaskGetReque" + + "st\022\021\n\ttask_type\030\001 \001(\t\022\016\n\006job_id\030\002 \001(\t:\002\030" + + "\001\"i\n\017TaskGetResponse\022&\n\005state\030\001 \001(\0162\027.fl" + + "yteidl.service.State\022*\n\007outputs\030\002 \001(\0132\031." + + "flyteidl.core.LiteralMap:\002\030\001\":\n\021TaskDele" + + "teRequest\022\021\n\ttask_type\030\001 \001(\t\022\016\n\006job_id\030\002" + + " \001(\t:\002\030\001\"\030\n\022TaskDeleteResponse:\002\030\001*b\n\005St" + + "ate\022\025\n\021RETRYABLE_FAILURE\020\000\022\025\n\021PERMANENT_" + + "FAILURE\020\001\022\013\n\007PENDING\020\002\022\013\n\007RUNNING\020\003\022\r\n\tS" + + "UCCEEDED\020\004\032\002\030\0012\250\002\n\025ExternalPluginService" + + "\022\\\n\nCreateTask\022#.flyteidl.service.TaskCr" + + "eateRequest\032$.flyteidl.service.TaskCreat" + + "eResponse\"\003\210\002\001\022S\n\007GetTask\022 .flyteidl.ser" + + "vice.TaskGetRequest\032!.flyteidl.service.T" + + "askGetResponse\"\003\210\002\001\022\\\n\nDeleteTask\022#.flyt" + + "eidl.service.TaskDeleteRequest\032$.flyteid" + + "l.service.TaskDeleteResponse\"\003\210\002\001B?Z=git" + + "hub.com/flyteorg/flyte/flyteidl/gen/pb-g" + + "o/flyteidl/serviceb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { @@ -4696,7 +4695,6 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( new com.google.protobuf.Descriptors.FileDescriptor[] { flyteidl.core.Literals.getDescriptor(), flyteidl.core.Tasks.getDescriptor(), - flyteidl.core.Interface.getDescriptor(), }, assigner); internal_static_flyteidl_service_TaskCreateRequest_descriptor = getDescriptor().getMessageTypes().get(0); @@ -4736,7 +4734,6 @@ public com.google.protobuf.ExtensionRegistry assignDescriptors( new java.lang.String[] { }); flyteidl.core.Literals.getDescriptor(); flyteidl.core.Tasks.getDescriptor(); - flyteidl.core.Interface.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) diff --git a/flyteidl/gen/pb-java/flyteidl/service/Signal.java b/flyteidl/gen/pb-java/flyteidl/service/Signal.java index bf83c2a8b0..46fcf38caa 100644 --- a/flyteidl/gen/pb-java/flyteidl/service/Signal.java +++ b/flyteidl/gen/pb-java/flyteidl/service/Signal.java @@ -25,19 +25,24 @@ public static void registerAllExtensions( java.lang.String[] descriptorData = { "\n\035flyteidl/service/signal.proto\022\020flyteid" + "l.service\032\034google/api/annotations.proto\032" + - "\033flyteidl/admin/signal.proto2\232\003\n\rSignalS" + + "\033flyteidl/admin/signal.proto2\336\004\n\rSignalS" + "ervice\022W\n\021GetOrCreateSignal\022(.flyteidl.a" + "dmin.SignalGetOrCreateRequest\032\026.flyteidl" + - ".admin.Signal\"\000\022\301\001\n\013ListSignals\022!.flytei" + + ".admin.Signal\"\000\022\324\002\n\013ListSignals\022!.flytei" + "dl.admin.SignalListRequest\032\032.flyteidl.ad" + - "min.SignalList\"s\202\323\344\223\002m\022k/api/v1/signals/" + - "{workflow_execution_id.project}/{workflo" + - "w_execution_id.domain}/{workflow_executi" + - "on_id.name}\022l\n\tSetSignal\022 .flyteidl.admi" + - "n.SignalSetRequest\032!.flyteidl.admin.Sign" + - "alSetResponse\"\032\202\323\344\223\002\024\"\017/api/v1/signals:\001" + - "*B?Z=github.com/flyteorg/flyte/flyteidl/" + - "gen/pb-go/flyteidl/serviceb\006proto3" + "min.SignalList\"\205\002\202\323\344\223\002\376\001\022k/api/v1/signal" + + "s/{workflow_execution_id.project}/{workf" + + "low_execution_id.domain}/{workflow_execu" + + "tion_id.name}Z\216\001\022\213\001/api/v1/signals/org/{" + + "workflow_execution_id.org}/{workflow_exe" + + "cution_id.project}/{workflow_execution_i" + + "d.domain}/{workflow_execution_id.name}\022\234" + + "\001\n\tSetSignal\022 .flyteidl.admin.SignalSetR" + + "equest\032!.flyteidl.admin.SignalSetRespons" + + "e\"J\202\323\344\223\002D\"\017/api/v1/signals:\001*Z.\")/api/v1" + + "/signals/org/{id.execution_id.org}:\001*B?Z" + + "=github.com/flyteorg/flyte/flyteidl/gen/" + + "pb-go/flyteidl/serviceb\006proto3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { diff --git a/flyteidl/gen/pb-js/flyteidl.d.ts b/flyteidl/gen/pb-js/flyteidl.d.ts index af564753d4..5fe0c697d8 100644 --- a/flyteidl/gen/pb-js/flyteidl.d.ts +++ b/flyteidl/gen/pb-js/flyteidl.d.ts @@ -78,6 +78,9 @@ export namespace flyteidl { /** ArtifactBindingData partitionKey */ partitionKey?: (string|null); + /** ArtifactBindingData bindToTimePartition */ + bindToTimePartition?: (boolean|null); + /** ArtifactBindingData transform */ transform?: (string|null); } @@ -97,9 +100,15 @@ export namespace flyteidl { /** ArtifactBindingData partitionKey. */ public partitionKey: string; + /** ArtifactBindingData bindToTimePartition. */ + public bindToTimePartition: boolean; + /** ArtifactBindingData transform. */ public transform: string; + /** ArtifactBindingData partitionData. */ + public partitionData?: ("partitionKey"|"bindToTimePartition"); + /** * Creates a new ArtifactBindingData instance using the specified properties. * @param [properties] Properties to set @@ -191,6 +200,9 @@ export namespace flyteidl { /** LabelValue staticValue */ staticValue?: (string|null); + /** LabelValue timeValue */ + timeValue?: (google.protobuf.ITimestamp|null); + /** LabelValue triggeredBinding */ triggeredBinding?: (flyteidl.core.IArtifactBindingData|null); @@ -210,6 +222,9 @@ export namespace flyteidl { /** LabelValue staticValue. */ public staticValue: string; + /** LabelValue timeValue. */ + public timeValue?: (google.protobuf.ITimestamp|null); + /** LabelValue triggeredBinding. */ public triggeredBinding?: (flyteidl.core.IArtifactBindingData|null); @@ -217,7 +232,7 @@ export namespace flyteidl { public inputBinding?: (flyteidl.core.IInputBindingData|null); /** LabelValue value. */ - public value?: ("staticValue"|"triggeredBinding"|"inputBinding"); + public value?: ("staticValue"|"timeValue"|"triggeredBinding"|"inputBinding"); /** * Creates a new LabelValue instance using the specified properties. @@ -304,6 +319,58 @@ export namespace flyteidl { public static verify(message: { [k: string]: any }): (string|null); } + /** Properties of a TimePartition. */ + interface ITimePartition { + + /** TimePartition value */ + value?: (flyteidl.core.ILabelValue|null); + } + + /** Represents a TimePartition. */ + class TimePartition implements ITimePartition { + + /** + * Constructs a new TimePartition. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.core.ITimePartition); + + /** TimePartition value. */ + public value?: (flyteidl.core.ILabelValue|null); + + /** + * Creates a new TimePartition instance using the specified properties. + * @param [properties] Properties to set + * @returns TimePartition instance + */ + public static create(properties?: flyteidl.core.ITimePartition): flyteidl.core.TimePartition; + + /** + * Encodes the specified TimePartition message. Does not implicitly {@link flyteidl.core.TimePartition.verify|verify} messages. + * @param message TimePartition message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.core.ITimePartition, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a TimePartition message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns TimePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.core.TimePartition; + + /** + * Verifies a TimePartition message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + /** Properties of an ArtifactID. */ interface IArtifactID { @@ -315,6 +382,9 @@ export namespace flyteidl { /** ArtifactID partitions */ partitions?: (flyteidl.core.IPartitions|null); + + /** ArtifactID timePartition */ + timePartition?: (flyteidl.core.ITimePartition|null); } /** Represents an ArtifactID. */ @@ -335,8 +405,8 @@ export namespace flyteidl { /** ArtifactID partitions. */ public partitions?: (flyteidl.core.IPartitions|null); - /** ArtifactID dimensions. */ - public dimensions?: "partitions"; + /** ArtifactID timePartition. */ + public timePartition?: (flyteidl.core.ITimePartition|null); /** * Creates a new ArtifactID instance using the specified properties. @@ -502,64 +572,6 @@ export namespace flyteidl { public static verify(message: { [k: string]: any }): (string|null); } - /** Properties of a Trigger. */ - interface ITrigger { - - /** Trigger triggerId */ - triggerId?: (flyteidl.core.IIdentifier|null); - - /** Trigger triggers */ - triggers?: (flyteidl.core.IArtifactID[]|null); - } - - /** Represents a Trigger. */ - class Trigger implements ITrigger { - - /** - * Constructs a new Trigger. - * @param [properties] Properties to set - */ - constructor(properties?: flyteidl.core.ITrigger); - - /** Trigger triggerId. */ - public triggerId?: (flyteidl.core.IIdentifier|null); - - /** Trigger triggers. */ - public triggers: flyteidl.core.IArtifactID[]; - - /** - * Creates a new Trigger instance using the specified properties. - * @param [properties] Properties to set - * @returns Trigger instance - */ - public static create(properties?: flyteidl.core.ITrigger): flyteidl.core.Trigger; - - /** - * Encodes the specified Trigger message. Does not implicitly {@link flyteidl.core.Trigger.verify|verify} messages. - * @param message Trigger message or plain object to encode - * @param [writer] Writer to encode to - * @returns Writer - */ - public static encode(message: flyteidl.core.ITrigger, writer?: $protobuf.Writer): $protobuf.Writer; - - /** - * Decodes a Trigger message from the specified reader or buffer. - * @param reader Reader or buffer to decode from - * @param [length] Message length if known beforehand - * @returns Trigger - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.core.Trigger; - - /** - * Verifies a Trigger message. - * @param message Plain object to verify - * @returns `null` if valid, otherwise the reason why it is not - */ - public static verify(message: { [k: string]: any }): (string|null); - } - /** ResourceType enum. */ enum ResourceType { UNSPECIFIED = 0, @@ -586,6 +598,9 @@ export namespace flyteidl { /** Identifier version */ version?: (string|null); + + /** Identifier org */ + org?: (string|null); } /** Represents an Identifier. */ @@ -612,6 +627,9 @@ export namespace flyteidl { /** Identifier version. */ public version: string; + /** Identifier org. */ + public org: string; + /** * Creates a new Identifier instance using the specified properties. * @param [properties] Properties to set @@ -656,6 +674,9 @@ export namespace flyteidl { /** WorkflowExecutionIdentifier name */ name?: (string|null); + + /** WorkflowExecutionIdentifier org */ + org?: (string|null); } /** Represents a WorkflowExecutionIdentifier. */ @@ -676,6 +697,9 @@ export namespace flyteidl { /** WorkflowExecutionIdentifier name. */ public name: string; + /** WorkflowExecutionIdentifier org. */ + public org: string; + /** * Creates a new WorkflowExecutionIdentifier instance using the specified properties. * @param [properties] Properties to set @@ -897,7 +921,8 @@ export namespace flyteidl { CACHE_POPULATED = 3, CACHE_LOOKUP_FAILURE = 4, CACHE_PUT_FAILURE = 5, - CACHE_SKIPPED = 6 + CACHE_SKIPPED = 6, + CACHE_EVICTED = 7 } /** Properties of a CatalogArtifactTag. */ @@ -7352,6 +7377,64 @@ export namespace flyteidl { public static verify(message: { [k: string]: any }): (string|null); } + /** Properties of an ExecutionMetricResult. */ + interface IExecutionMetricResult { + + /** ExecutionMetricResult metric */ + metric?: (string|null); + + /** ExecutionMetricResult data */ + data?: (google.protobuf.IStruct|null); + } + + /** Represents an ExecutionMetricResult. */ + class ExecutionMetricResult implements IExecutionMetricResult { + + /** + * Constructs a new ExecutionMetricResult. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.core.IExecutionMetricResult); + + /** ExecutionMetricResult metric. */ + public metric: string; + + /** ExecutionMetricResult data. */ + public data?: (google.protobuf.IStruct|null); + + /** + * Creates a new ExecutionMetricResult instance using the specified properties. + * @param [properties] Properties to set + * @returns ExecutionMetricResult instance + */ + public static create(properties?: flyteidl.core.IExecutionMetricResult): flyteidl.core.ExecutionMetricResult; + + /** + * Encodes the specified ExecutionMetricResult message. Does not implicitly {@link flyteidl.core.ExecutionMetricResult.verify|verify} messages. + * @param message ExecutionMetricResult message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.core.IExecutionMetricResult, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes an ExecutionMetricResult message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns ExecutionMetricResult + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.core.ExecutionMetricResult; + + /** + * Verifies an ExecutionMetricResult message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + /** Properties of a WorkflowClosure. */ interface IWorkflowClosure { @@ -7420,15 +7503,9 @@ export namespace flyteidl { /** CloudEventWorkflowExecution rawEvent */ rawEvent?: (flyteidl.event.IWorkflowExecutionEvent|null); - /** CloudEventWorkflowExecution outputData */ - outputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventWorkflowExecution outputInterface */ outputInterface?: (flyteidl.core.ITypedInterface|null); - /** CloudEventWorkflowExecution inputData */ - inputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventWorkflowExecution artifactIds */ artifactIds?: (flyteidl.core.IArtifactID[]|null); @@ -7454,15 +7531,9 @@ export namespace flyteidl { /** CloudEventWorkflowExecution rawEvent. */ public rawEvent?: (flyteidl.event.IWorkflowExecutionEvent|null); - /** CloudEventWorkflowExecution outputData. */ - public outputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventWorkflowExecution outputInterface. */ public outputInterface?: (flyteidl.core.ITypedInterface|null); - /** CloudEventWorkflowExecution inputData. */ - public inputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventWorkflowExecution artifactIds. */ public artifactIds: flyteidl.core.IArtifactID[]; @@ -7517,15 +7588,9 @@ export namespace flyteidl { /** CloudEventNodeExecution taskExecId */ taskExecId?: (flyteidl.core.ITaskExecutionIdentifier|null); - /** CloudEventNodeExecution outputData */ - outputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventNodeExecution outputInterface */ outputInterface?: (flyteidl.core.ITypedInterface|null); - /** CloudEventNodeExecution inputData */ - inputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventNodeExecution artifactIds */ artifactIds?: (flyteidl.core.IArtifactID[]|null); @@ -7551,15 +7616,9 @@ export namespace flyteidl { /** CloudEventNodeExecution taskExecId. */ public taskExecId?: (flyteidl.core.ITaskExecutionIdentifier|null); - /** CloudEventNodeExecution outputData. */ - public outputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventNodeExecution outputInterface. */ public outputInterface?: (flyteidl.core.ITypedInterface|null); - /** CloudEventNodeExecution inputData. */ - public inputData?: (flyteidl.core.ILiteralMap|null); - /** CloudEventNodeExecution artifactIds. */ public artifactIds: flyteidl.core.IArtifactID[]; @@ -7669,8 +7728,8 @@ export namespace flyteidl { /** CloudEventExecutionStart artifactIds */ artifactIds?: (flyteidl.core.IArtifactID[]|null); - /** CloudEventExecutionStart artifactKeys */ - artifactKeys?: (string[]|null); + /** CloudEventExecutionStart artifactTrackers */ + artifactTrackers?: (string[]|null); /** CloudEventExecutionStart principal */ principal?: (string|null); @@ -7697,8 +7756,8 @@ export namespace flyteidl { /** CloudEventExecutionStart artifactIds. */ public artifactIds: flyteidl.core.IArtifactID[]; - /** CloudEventExecutionStart artifactKeys. */ - public artifactKeys: string[]; + /** CloudEventExecutionStart artifactTrackers. */ + public artifactTrackers: string[]; /** CloudEventExecutionStart principal. */ public principal: string; @@ -9121,6 +9180,9 @@ export namespace flyteidl { /** Resource logLinks */ logLinks?: (flyteidl.core.ITaskLog[]|null); + + /** Resource phase */ + phase?: (flyteidl.core.TaskExecution.Phase|null); } /** Represents a Resource. */ @@ -9144,6 +9206,9 @@ export namespace flyteidl { /** Resource logLinks. */ public logLinks: flyteidl.core.ITaskLog[]; + /** Resource phase. */ + public phase: flyteidl.core.TaskExecution.Phase; + /** * Creates a new Resource instance using the specified properties. * @param [properties] Properties to set @@ -9541,6 +9606,268 @@ export namespace flyteidl { public static verify(message: { [k: string]: any }): (string|null); } + /** Properties of a GetTaskMetricsRequest. */ + interface IGetTaskMetricsRequest { + + /** GetTaskMetricsRequest taskType */ + taskType?: (string|null); + + /** GetTaskMetricsRequest resourceMeta */ + resourceMeta?: (Uint8Array|null); + + /** GetTaskMetricsRequest queries */ + queries?: (string[]|null); + + /** GetTaskMetricsRequest startTime */ + startTime?: (google.protobuf.ITimestamp|null); + + /** GetTaskMetricsRequest endTime */ + endTime?: (google.protobuf.ITimestamp|null); + + /** GetTaskMetricsRequest step */ + step?: (google.protobuf.IDuration|null); + } + + /** Represents a GetTaskMetricsRequest. */ + class GetTaskMetricsRequest implements IGetTaskMetricsRequest { + + /** + * Constructs a new GetTaskMetricsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.admin.IGetTaskMetricsRequest); + + /** GetTaskMetricsRequest taskType. */ + public taskType: string; + + /** GetTaskMetricsRequest resourceMeta. */ + public resourceMeta: Uint8Array; + + /** GetTaskMetricsRequest queries. */ + public queries: string[]; + + /** GetTaskMetricsRequest startTime. */ + public startTime?: (google.protobuf.ITimestamp|null); + + /** GetTaskMetricsRequest endTime. */ + public endTime?: (google.protobuf.ITimestamp|null); + + /** GetTaskMetricsRequest step. */ + public step?: (google.protobuf.IDuration|null); + + /** + * Creates a new GetTaskMetricsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns GetTaskMetricsRequest instance + */ + public static create(properties?: flyteidl.admin.IGetTaskMetricsRequest): flyteidl.admin.GetTaskMetricsRequest; + + /** + * Encodes the specified GetTaskMetricsRequest message. Does not implicitly {@link flyteidl.admin.GetTaskMetricsRequest.verify|verify} messages. + * @param message GetTaskMetricsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.admin.IGetTaskMetricsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetTaskMetricsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetTaskMetricsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.admin.GetTaskMetricsRequest; + + /** + * Verifies a GetTaskMetricsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + + /** Properties of a GetTaskMetricsResponse. */ + interface IGetTaskMetricsResponse { + + /** GetTaskMetricsResponse results */ + results?: (flyteidl.core.IExecutionMetricResult[]|null); + } + + /** Represents a GetTaskMetricsResponse. */ + class GetTaskMetricsResponse implements IGetTaskMetricsResponse { + + /** + * Constructs a new GetTaskMetricsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.admin.IGetTaskMetricsResponse); + + /** GetTaskMetricsResponse results. */ + public results: flyteidl.core.IExecutionMetricResult[]; + + /** + * Creates a new GetTaskMetricsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns GetTaskMetricsResponse instance + */ + public static create(properties?: flyteidl.admin.IGetTaskMetricsResponse): flyteidl.admin.GetTaskMetricsResponse; + + /** + * Encodes the specified GetTaskMetricsResponse message. Does not implicitly {@link flyteidl.admin.GetTaskMetricsResponse.verify|verify} messages. + * @param message GetTaskMetricsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.admin.IGetTaskMetricsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetTaskMetricsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetTaskMetricsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.admin.GetTaskMetricsResponse; + + /** + * Verifies a GetTaskMetricsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + + /** Properties of a GetTaskLogsRequest. */ + interface IGetTaskLogsRequest { + + /** GetTaskLogsRequest taskType */ + taskType?: (string|null); + + /** GetTaskLogsRequest resourceMeta */ + resourceMeta?: (Uint8Array|null); + + /** GetTaskLogsRequest lines */ + lines?: (Long|null); + + /** GetTaskLogsRequest token */ + token?: (string|null); + } + + /** Represents a GetTaskLogsRequest. */ + class GetTaskLogsRequest implements IGetTaskLogsRequest { + + /** + * Constructs a new GetTaskLogsRequest. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.admin.IGetTaskLogsRequest); + + /** GetTaskLogsRequest taskType. */ + public taskType: string; + + /** GetTaskLogsRequest resourceMeta. */ + public resourceMeta: Uint8Array; + + /** GetTaskLogsRequest lines. */ + public lines: Long; + + /** GetTaskLogsRequest token. */ + public token: string; + + /** + * Creates a new GetTaskLogsRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns GetTaskLogsRequest instance + */ + public static create(properties?: flyteidl.admin.IGetTaskLogsRequest): flyteidl.admin.GetTaskLogsRequest; + + /** + * Encodes the specified GetTaskLogsRequest message. Does not implicitly {@link flyteidl.admin.GetTaskLogsRequest.verify|verify} messages. + * @param message GetTaskLogsRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.admin.IGetTaskLogsRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetTaskLogsRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetTaskLogsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.admin.GetTaskLogsRequest; + + /** + * Verifies a GetTaskLogsRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + + /** Properties of a GetTaskLogsResponse. */ + interface IGetTaskLogsResponse { + + /** GetTaskLogsResponse results */ + results?: (string[]|null); + + /** GetTaskLogsResponse token */ + token?: (string|null); + } + + /** Represents a GetTaskLogsResponse. */ + class GetTaskLogsResponse implements IGetTaskLogsResponse { + + /** + * Constructs a new GetTaskLogsResponse. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.admin.IGetTaskLogsResponse); + + /** GetTaskLogsResponse results. */ + public results: string[]; + + /** GetTaskLogsResponse token. */ + public token: string; + + /** + * Creates a new GetTaskLogsResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns GetTaskLogsResponse instance + */ + public static create(properties?: flyteidl.admin.IGetTaskLogsResponse): flyteidl.admin.GetTaskLogsResponse; + + /** + * Encodes the specified GetTaskLogsResponse message. Does not implicitly {@link flyteidl.admin.GetTaskLogsResponse.verify|verify} messages. + * @param message GetTaskLogsResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.admin.IGetTaskLogsResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetTaskLogsResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetTaskLogsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.admin.GetTaskLogsResponse; + + /** + * Verifies a GetTaskLogsResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + /** Properties of a ClusterAssignment. */ interface IClusterAssignment { @@ -9604,6 +9931,9 @@ export namespace flyteidl { /** NamedEntityIdentifier name */ name?: (string|null); + + /** NamedEntityIdentifier org */ + org?: (string|null); } /** Represents a NamedEntityIdentifier. */ @@ -9624,6 +9954,9 @@ export namespace flyteidl { /** NamedEntityIdentifier name. */ public name: string; + /** NamedEntityIdentifier org. */ + public org: string; + /** * Creates a new NamedEntityIdentifier instance using the specified properties. * @param [properties] Properties to set @@ -9873,6 +10206,9 @@ export namespace flyteidl { /** NamedEntityIdentifierListRequest filters */ filters?: (string|null); + + /** NamedEntityIdentifierListRequest org */ + org?: (string|null); } /** Represents a NamedEntityIdentifierListRequest. */ @@ -9902,6 +10238,9 @@ export namespace flyteidl { /** NamedEntityIdentifierListRequest filters. */ public filters: string; + /** NamedEntityIdentifierListRequest org. */ + public org: string; + /** * Creates a new NamedEntityIdentifierListRequest instance using the specified properties. * @param [properties] Properties to set @@ -9958,6 +10297,9 @@ export namespace flyteidl { /** NamedEntityListRequest filters */ filters?: (string|null); + + /** NamedEntityListRequest org */ + org?: (string|null); } /** Represents a NamedEntityListRequest. */ @@ -9990,6 +10332,9 @@ export namespace flyteidl { /** NamedEntityListRequest filters. */ public filters: string; + /** NamedEntityListRequest org. */ + public org: string; + /** * Creates a new NamedEntityListRequest instance using the specified properties. * @param [properties] Properties to set @@ -11895,6 +12240,9 @@ export namespace flyteidl { /** ExecutionCreateRequest inputs */ inputs?: (flyteidl.core.ILiteralMap|null); + + /** ExecutionCreateRequest org */ + org?: (string|null); } /** Represents an ExecutionCreateRequest. */ @@ -11921,6 +12269,9 @@ export namespace flyteidl { /** ExecutionCreateRequest inputs. */ public inputs?: (flyteidl.core.ILiteralMap|null); + /** ExecutionCreateRequest org. */ + public org: string; + /** * Creates a new ExecutionCreateRequest instance using the specified properties. * @param [properties] Properties to set @@ -14183,6 +14534,9 @@ export namespace flyteidl { /** ActiveLaunchPlanListRequest sortBy */ sortBy?: (flyteidl.admin.ISort|null); + + /** ActiveLaunchPlanListRequest org */ + org?: (string|null); } /** Represents an ActiveLaunchPlanListRequest. */ @@ -14209,6 +14563,9 @@ export namespace flyteidl { /** ActiveLaunchPlanListRequest sortBy. */ public sortBy?: (flyteidl.admin.ISort|null); + /** ActiveLaunchPlanListRequest org. */ + public org: string; + /** * Creates a new ActiveLaunchPlanListRequest instance using the specified properties. * @param [properties] Properties to set @@ -15073,6 +15430,9 @@ export namespace flyteidl { /** MatchableAttributesConfiguration launchPlan */ launchPlan?: (string|null); + + /** MatchableAttributesConfiguration org */ + org?: (string|null); } /** Represents a MatchableAttributesConfiguration. */ @@ -15099,6 +15459,9 @@ export namespace flyteidl { /** MatchableAttributesConfiguration launchPlan. */ public launchPlan: string; + /** MatchableAttributesConfiguration org. */ + public org: string; + /** * Creates a new MatchableAttributesConfiguration instance using the specified properties. * @param [properties] Properties to set @@ -15137,6 +15500,9 @@ export namespace flyteidl { /** ListMatchableAttributesRequest resourceType */ resourceType?: (flyteidl.admin.MatchableResource|null); + + /** ListMatchableAttributesRequest org */ + org?: (string|null); } /** Represents a ListMatchableAttributesRequest. */ @@ -15151,6 +15517,9 @@ export namespace flyteidl { /** ListMatchableAttributesRequest resourceType. */ public resourceType: flyteidl.admin.MatchableResource; + /** ListMatchableAttributesRequest org. */ + public org: string; + /** * Creates a new ListMatchableAttributesRequest instance using the specified properties. * @param [properties] Properties to set @@ -16088,6 +16457,110 @@ export namespace flyteidl { public static verify(message: { [k: string]: any }): (string|null); } + /** Properties of a GetDynamicNodeWorkflowRequest. */ + interface IGetDynamicNodeWorkflowRequest { + + /** GetDynamicNodeWorkflowRequest id */ + id?: (flyteidl.core.INodeExecutionIdentifier|null); + } + + /** Represents a GetDynamicNodeWorkflowRequest. */ + class GetDynamicNodeWorkflowRequest implements IGetDynamicNodeWorkflowRequest { + + /** + * Constructs a new GetDynamicNodeWorkflowRequest. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.admin.IGetDynamicNodeWorkflowRequest); + + /** GetDynamicNodeWorkflowRequest id. */ + public id?: (flyteidl.core.INodeExecutionIdentifier|null); + + /** + * Creates a new GetDynamicNodeWorkflowRequest instance using the specified properties. + * @param [properties] Properties to set + * @returns GetDynamicNodeWorkflowRequest instance + */ + public static create(properties?: flyteidl.admin.IGetDynamicNodeWorkflowRequest): flyteidl.admin.GetDynamicNodeWorkflowRequest; + + /** + * Encodes the specified GetDynamicNodeWorkflowRequest message. Does not implicitly {@link flyteidl.admin.GetDynamicNodeWorkflowRequest.verify|verify} messages. + * @param message GetDynamicNodeWorkflowRequest message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.admin.IGetDynamicNodeWorkflowRequest, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a GetDynamicNodeWorkflowRequest message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns GetDynamicNodeWorkflowRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.admin.GetDynamicNodeWorkflowRequest; + + /** + * Verifies a GetDynamicNodeWorkflowRequest message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + + /** Properties of a DynamicNodeWorkflowResponse. */ + interface IDynamicNodeWorkflowResponse { + + /** DynamicNodeWorkflowResponse compiledWorkflow */ + compiledWorkflow?: (flyteidl.core.ICompiledWorkflowClosure|null); + } + + /** Represents a DynamicNodeWorkflowResponse. */ + class DynamicNodeWorkflowResponse implements IDynamicNodeWorkflowResponse { + + /** + * Constructs a new DynamicNodeWorkflowResponse. + * @param [properties] Properties to set + */ + constructor(properties?: flyteidl.admin.IDynamicNodeWorkflowResponse); + + /** DynamicNodeWorkflowResponse compiledWorkflow. */ + public compiledWorkflow?: (flyteidl.core.ICompiledWorkflowClosure|null); + + /** + * Creates a new DynamicNodeWorkflowResponse instance using the specified properties. + * @param [properties] Properties to set + * @returns DynamicNodeWorkflowResponse instance + */ + public static create(properties?: flyteidl.admin.IDynamicNodeWorkflowResponse): flyteidl.admin.DynamicNodeWorkflowResponse; + + /** + * Encodes the specified DynamicNodeWorkflowResponse message. Does not implicitly {@link flyteidl.admin.DynamicNodeWorkflowResponse.verify|verify} messages. + * @param message DynamicNodeWorkflowResponse message or plain object to encode + * @param [writer] Writer to encode to + * @returns Writer + */ + public static encode(message: flyteidl.admin.IDynamicNodeWorkflowResponse, writer?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a DynamicNodeWorkflowResponse message from the specified reader or buffer. + * @param reader Reader or buffer to decode from + * @param [length] Message length if known beforehand + * @returns DynamicNodeWorkflowResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(reader: ($protobuf.Reader|Uint8Array), length?: number): flyteidl.admin.DynamicNodeWorkflowResponse; + + /** + * Verifies a DynamicNodeWorkflowResponse message. + * @param message Plain object to verify + * @returns `null` if valid, otherwise the reason why it is not + */ + public static verify(message: { [k: string]: any }): (string|null); + } + /** Properties of an EmailMessage. */ interface IEmailMessage { @@ -16236,6 +16709,9 @@ export namespace flyteidl { /** Project state */ state?: (flyteidl.admin.Project.ProjectState|null); + + /** Project org */ + org?: (string|null); } /** Represents a Project. */ @@ -16265,6 +16741,9 @@ export namespace flyteidl { /** Project state. */ public state: flyteidl.admin.Project.ProjectState; + /** Project org. */ + public org: string; + /** * Creates a new Project instance using the specified properties. * @param [properties] Properties to set @@ -16380,6 +16859,9 @@ export namespace flyteidl { /** ProjectListRequest sortBy */ sortBy?: (flyteidl.admin.ISort|null); + + /** ProjectListRequest org */ + org?: (string|null); } /** Represents a ProjectListRequest. */ @@ -16403,6 +16885,9 @@ export namespace flyteidl { /** ProjectListRequest sortBy. */ public sortBy?: (flyteidl.admin.ISort|null); + /** ProjectListRequest org. */ + public org: string; + /** * Creates a new ProjectListRequest instance using the specified properties. * @param [properties] Properties to set @@ -16588,6 +17073,9 @@ export namespace flyteidl { /** ProjectAttributes matchingAttributes */ matchingAttributes?: (flyteidl.admin.IMatchingAttributes|null); + + /** ProjectAttributes org */ + org?: (string|null); } /** Represents a ProjectAttributes. */ @@ -16605,6 +17093,9 @@ export namespace flyteidl { /** ProjectAttributes matchingAttributes. */ public matchingAttributes?: (flyteidl.admin.IMatchingAttributes|null); + /** ProjectAttributes org. */ + public org: string; + /** * Creates a new ProjectAttributes instance using the specified properties. * @param [properties] Properties to set @@ -16744,6 +17235,9 @@ export namespace flyteidl { /** ProjectAttributesGetRequest resourceType */ resourceType?: (flyteidl.admin.MatchableResource|null); + + /** ProjectAttributesGetRequest org */ + org?: (string|null); } /** Represents a ProjectAttributesGetRequest. */ @@ -16761,6 +17255,9 @@ export namespace flyteidl { /** ProjectAttributesGetRequest resourceType. */ public resourceType: flyteidl.admin.MatchableResource; + /** ProjectAttributesGetRequest org. */ + public org: string; + /** * Creates a new ProjectAttributesGetRequest instance using the specified properties. * @param [properties] Properties to set @@ -16854,6 +17351,9 @@ export namespace flyteidl { /** ProjectAttributesDeleteRequest resourceType */ resourceType?: (flyteidl.admin.MatchableResource|null); + + /** ProjectAttributesDeleteRequest org */ + org?: (string|null); } /** Represents a ProjectAttributesDeleteRequest. */ @@ -16871,6 +17371,9 @@ export namespace flyteidl { /** ProjectAttributesDeleteRequest resourceType. */ public resourceType: flyteidl.admin.MatchableResource; + /** ProjectAttributesDeleteRequest org. */ + public org: string; + /** * Creates a new ProjectAttributesDeleteRequest instance using the specified properties. * @param [properties] Properties to set @@ -16961,6 +17464,9 @@ export namespace flyteidl { /** ProjectDomainAttributes matchingAttributes */ matchingAttributes?: (flyteidl.admin.IMatchingAttributes|null); + + /** ProjectDomainAttributes org */ + org?: (string|null); } /** Represents a ProjectDomainAttributes. */ @@ -16981,6 +17487,9 @@ export namespace flyteidl { /** ProjectDomainAttributes matchingAttributes. */ public matchingAttributes?: (flyteidl.admin.IMatchingAttributes|null); + /** ProjectDomainAttributes org. */ + public org: string; + /** * Creates a new ProjectDomainAttributes instance using the specified properties. * @param [properties] Properties to set @@ -17123,6 +17632,9 @@ export namespace flyteidl { /** ProjectDomainAttributesGetRequest resourceType */ resourceType?: (flyteidl.admin.MatchableResource|null); + + /** ProjectDomainAttributesGetRequest org */ + org?: (string|null); } /** Represents a ProjectDomainAttributesGetRequest. */ @@ -17143,6 +17655,9 @@ export namespace flyteidl { /** ProjectDomainAttributesGetRequest resourceType. */ public resourceType: flyteidl.admin.MatchableResource; + /** ProjectDomainAttributesGetRequest org. */ + public org: string; + /** * Creates a new ProjectDomainAttributesGetRequest instance using the specified properties. * @param [properties] Properties to set @@ -17239,6 +17754,9 @@ export namespace flyteidl { /** ProjectDomainAttributesDeleteRequest resourceType */ resourceType?: (flyteidl.admin.MatchableResource|null); + + /** ProjectDomainAttributesDeleteRequest org */ + org?: (string|null); } /** Represents a ProjectDomainAttributesDeleteRequest. */ @@ -17259,6 +17777,9 @@ export namespace flyteidl { /** ProjectDomainAttributesDeleteRequest resourceType. */ public resourceType: flyteidl.admin.MatchableResource; + /** ProjectDomainAttributesDeleteRequest org. */ + public org: string; + /** * Creates a new ProjectDomainAttributesDeleteRequest instance using the specified properties. * @param [properties] Properties to set @@ -19310,6 +19831,9 @@ export namespace flyteidl { /** WorkflowAttributes matchingAttributes */ matchingAttributes?: (flyteidl.admin.IMatchingAttributes|null); + + /** WorkflowAttributes org */ + org?: (string|null); } /** Represents a WorkflowAttributes. */ @@ -19333,6 +19857,9 @@ export namespace flyteidl { /** WorkflowAttributes matchingAttributes. */ public matchingAttributes?: (flyteidl.admin.IMatchingAttributes|null); + /** WorkflowAttributes org. */ + public org: string; + /** * Creates a new WorkflowAttributes instance using the specified properties. * @param [properties] Properties to set @@ -19478,6 +20005,9 @@ export namespace flyteidl { /** WorkflowAttributesGetRequest resourceType */ resourceType?: (flyteidl.admin.MatchableResource|null); + + /** WorkflowAttributesGetRequest org */ + org?: (string|null); } /** Represents a WorkflowAttributesGetRequest. */ @@ -19501,6 +20031,9 @@ export namespace flyteidl { /** WorkflowAttributesGetRequest resourceType. */ public resourceType: flyteidl.admin.MatchableResource; + /** WorkflowAttributesGetRequest org. */ + public org: string; + /** * Creates a new WorkflowAttributesGetRequest instance using the specified properties. * @param [properties] Properties to set @@ -19600,6 +20133,9 @@ export namespace flyteidl { /** WorkflowAttributesDeleteRequest resourceType */ resourceType?: (flyteidl.admin.MatchableResource|null); + + /** WorkflowAttributesDeleteRequest org */ + org?: (string|null); } /** Represents a WorkflowAttributesDeleteRequest. */ @@ -19623,6 +20159,9 @@ export namespace flyteidl { /** WorkflowAttributesDeleteRequest resourceType. */ public resourceType: flyteidl.admin.MatchableResource; + /** WorkflowAttributesDeleteRequest org. */ + public org: string; + /** * Creates a new WorkflowAttributesDeleteRequest instance using the specified properties. * @param [properties] Properties to set @@ -20062,6 +20601,20 @@ export namespace flyteidl { */ public getNodeExecution(request: flyteidl.admin.INodeExecutionGetRequest): Promise; + /** + * Calls GetDynamicNodeWorkflow. + * @param request GetDynamicNodeWorkflowRequest message or plain object + * @param callback Node-style callback called with the error, if any, and DynamicNodeWorkflowResponse + */ + public getDynamicNodeWorkflow(request: flyteidl.admin.IGetDynamicNodeWorkflowRequest, callback: flyteidl.service.AdminService.GetDynamicNodeWorkflowCallback): void; + + /** + * Calls GetDynamicNodeWorkflow. + * @param request GetDynamicNodeWorkflowRequest message or plain object + * @returns Promise + */ + public getDynamicNodeWorkflow(request: flyteidl.admin.IGetDynamicNodeWorkflowRequest): Promise; + /** * Calls ListNodeExecutions. * @param request NodeExecutionListRequest message or plain object @@ -20639,6 +21192,13 @@ export namespace flyteidl { */ type GetNodeExecutionCallback = (error: (Error|null), response?: flyteidl.admin.NodeExecution) => void; + /** + * Callback as used by {@link flyteidl.service.AdminService#getDynamicNodeWorkflow}. + * @param error Error, if any + * @param [response] DynamicNodeWorkflowResponse + */ + type GetDynamicNodeWorkflowCallback = (error: (Error|null), response?: flyteidl.admin.DynamicNodeWorkflowResponse) => void; + /** * Callback as used by {@link flyteidl.service.AdminService#listNodeExecutions}. * @param error Error, if any @@ -20904,6 +21464,34 @@ export namespace flyteidl { * @returns Promise */ public deleteTask(request: flyteidl.admin.IDeleteTaskRequest): Promise; + + /** + * Calls GetTaskMetrics. + * @param request GetTaskMetricsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and GetTaskMetricsResponse + */ + public getTaskMetrics(request: flyteidl.admin.IGetTaskMetricsRequest, callback: flyteidl.service.AsyncAgentService.GetTaskMetricsCallback): void; + + /** + * Calls GetTaskMetrics. + * @param request GetTaskMetricsRequest message or plain object + * @returns Promise + */ + public getTaskMetrics(request: flyteidl.admin.IGetTaskMetricsRequest): Promise; + + /** + * Calls GetTaskLogs. + * @param request GetTaskLogsRequest message or plain object + * @param callback Node-style callback called with the error, if any, and GetTaskLogsResponse + */ + public getTaskLogs(request: flyteidl.admin.IGetTaskLogsRequest, callback: flyteidl.service.AsyncAgentService.GetTaskLogsCallback): void; + + /** + * Calls GetTaskLogs. + * @param request GetTaskLogsRequest message or plain object + * @returns Promise + */ + public getTaskLogs(request: flyteidl.admin.IGetTaskLogsRequest): Promise; } namespace AsyncAgentService { @@ -20928,6 +21516,20 @@ export namespace flyteidl { * @param [response] DeleteTaskResponse */ type DeleteTaskCallback = (error: (Error|null), response?: flyteidl.admin.DeleteTaskResponse) => void; + + /** + * Callback as used by {@link flyteidl.service.AsyncAgentService#getTaskMetrics}. + * @param error Error, if any + * @param [response] GetTaskMetricsResponse + */ + type GetTaskMetricsCallback = (error: (Error|null), response?: flyteidl.admin.GetTaskMetricsResponse) => void; + + /** + * Callback as used by {@link flyteidl.service.AsyncAgentService#getTaskLogs}. + * @param error Error, if any + * @param [response] GetTaskLogsResponse + */ + type GetTaskLogsCallback = (error: (Error|null), response?: flyteidl.admin.GetTaskLogsResponse) => void; } /** Represents an AgentMetadataService */ diff --git a/flyteidl/gen/pb-js/flyteidl.js b/flyteidl/gen/pb-js/flyteidl.js index 8375c630e7..7a5c668347 100644 --- a/flyteidl/gen/pb-js/flyteidl.js +++ b/flyteidl/gen/pb-js/flyteidl.js @@ -186,6 +186,7 @@ * @interface IArtifactBindingData * @property {number|null} [index] ArtifactBindingData index * @property {string|null} [partitionKey] ArtifactBindingData partitionKey + * @property {boolean|null} [bindToTimePartition] ArtifactBindingData bindToTimePartition * @property {string|null} [transform] ArtifactBindingData transform */ @@ -220,6 +221,14 @@ */ ArtifactBindingData.prototype.partitionKey = ""; + /** + * ArtifactBindingData bindToTimePartition. + * @member {boolean} bindToTimePartition + * @memberof flyteidl.core.ArtifactBindingData + * @instance + */ + ArtifactBindingData.prototype.bindToTimePartition = false; + /** * ArtifactBindingData transform. * @member {string} transform @@ -228,6 +237,20 @@ */ ArtifactBindingData.prototype.transform = ""; + // OneOf field names bound to virtual getters and setters + var $oneOfFields; + + /** + * ArtifactBindingData partitionData. + * @member {"partitionKey"|"bindToTimePartition"|undefined} partitionData + * @memberof flyteidl.core.ArtifactBindingData + * @instance + */ + Object.defineProperty(ArtifactBindingData.prototype, "partitionData", { + get: $util.oneOfGetter($oneOfFields = ["partitionKey", "bindToTimePartition"]), + set: $util.oneOfSetter($oneOfFields) + }); + /** * Creates a new ArtifactBindingData instance using the specified properties. * @function create @@ -256,8 +279,10 @@ writer.uint32(/* id 1, wireType 0 =*/8).uint32(message.index); if (message.partitionKey != null && message.hasOwnProperty("partitionKey")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.partitionKey); + if (message.bindToTimePartition != null && message.hasOwnProperty("bindToTimePartition")) + writer.uint32(/* id 3, wireType 0 =*/24).bool(message.bindToTimePartition); if (message.transform != null && message.hasOwnProperty("transform")) - writer.uint32(/* id 3, wireType 2 =*/26).string(message.transform); + writer.uint32(/* id 4, wireType 2 =*/34).string(message.transform); return writer; }; @@ -286,6 +311,9 @@ message.partitionKey = reader.string(); break; case 3: + message.bindToTimePartition = reader.bool(); + break; + case 4: message.transform = reader.string(); break; default: @@ -307,12 +335,22 @@ ArtifactBindingData.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; + var properties = {}; if (message.index != null && message.hasOwnProperty("index")) if (!$util.isInteger(message.index)) return "index: integer expected"; - if (message.partitionKey != null && message.hasOwnProperty("partitionKey")) + if (message.partitionKey != null && message.hasOwnProperty("partitionKey")) { + properties.partitionData = 1; if (!$util.isString(message.partitionKey)) return "partitionKey: string expected"; + } + if (message.bindToTimePartition != null && message.hasOwnProperty("bindToTimePartition")) { + if (properties.partitionData === 1) + return "partitionData: multiple values"; + properties.partitionData = 1; + if (typeof message.bindToTimePartition !== "boolean") + return "bindToTimePartition: boolean expected"; + } if (message.transform != null && message.hasOwnProperty("transform")) if (!$util.isString(message.transform)) return "transform: string expected"; @@ -439,6 +477,7 @@ * @memberof flyteidl.core * @interface ILabelValue * @property {string|null} [staticValue] LabelValue staticValue + * @property {google.protobuf.ITimestamp|null} [timeValue] LabelValue timeValue * @property {flyteidl.core.IArtifactBindingData|null} [triggeredBinding] LabelValue triggeredBinding * @property {flyteidl.core.IInputBindingData|null} [inputBinding] LabelValue inputBinding */ @@ -466,6 +505,14 @@ */ LabelValue.prototype.staticValue = ""; + /** + * LabelValue timeValue. + * @member {google.protobuf.ITimestamp|null|undefined} timeValue + * @memberof flyteidl.core.LabelValue + * @instance + */ + LabelValue.prototype.timeValue = null; + /** * LabelValue triggeredBinding. * @member {flyteidl.core.IArtifactBindingData|null|undefined} triggeredBinding @@ -487,12 +534,12 @@ /** * LabelValue value. - * @member {"staticValue"|"triggeredBinding"|"inputBinding"|undefined} value + * @member {"staticValue"|"timeValue"|"triggeredBinding"|"inputBinding"|undefined} value * @memberof flyteidl.core.LabelValue * @instance */ Object.defineProperty(LabelValue.prototype, "value", { - get: $util.oneOfGetter($oneOfFields = ["staticValue", "triggeredBinding", "inputBinding"]), + get: $util.oneOfGetter($oneOfFields = ["staticValue", "timeValue", "triggeredBinding", "inputBinding"]), set: $util.oneOfSetter($oneOfFields) }); @@ -522,10 +569,12 @@ writer = $Writer.create(); if (message.staticValue != null && message.hasOwnProperty("staticValue")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.staticValue); + if (message.timeValue != null && message.hasOwnProperty("timeValue")) + $root.google.protobuf.Timestamp.encode(message.timeValue, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.triggeredBinding != null && message.hasOwnProperty("triggeredBinding")) - $root.flyteidl.core.ArtifactBindingData.encode(message.triggeredBinding, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + $root.flyteidl.core.ArtifactBindingData.encode(message.triggeredBinding, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); if (message.inputBinding != null && message.hasOwnProperty("inputBinding")) - $root.flyteidl.core.InputBindingData.encode(message.inputBinding, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + $root.flyteidl.core.InputBindingData.encode(message.inputBinding, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -551,9 +600,12 @@ message.staticValue = reader.string(); break; case 2: - message.triggeredBinding = $root.flyteidl.core.ArtifactBindingData.decode(reader, reader.uint32()); + message.timeValue = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); break; case 3: + message.triggeredBinding = $root.flyteidl.core.ArtifactBindingData.decode(reader, reader.uint32()); + break; + case 4: message.inputBinding = $root.flyteidl.core.InputBindingData.decode(reader, reader.uint32()); break; default: @@ -581,6 +633,16 @@ if (!$util.isString(message.staticValue)) return "staticValue: string expected"; } + if (message.timeValue != null && message.hasOwnProperty("timeValue")) { + if (properties.value === 1) + return "value: multiple values"; + properties.value = 1; + { + var error = $root.google.protobuf.Timestamp.verify(message.timeValue); + if (error) + return "timeValue." + error; + } + } if (message.triggeredBinding != null && message.hasOwnProperty("triggeredBinding")) { if (properties.value === 1) return "value: multiple values"; @@ -733,6 +795,118 @@ return Partitions; })(); + core.TimePartition = (function() { + + /** + * Properties of a TimePartition. + * @memberof flyteidl.core + * @interface ITimePartition + * @property {flyteidl.core.ILabelValue|null} [value] TimePartition value + */ + + /** + * Constructs a new TimePartition. + * @memberof flyteidl.core + * @classdesc Represents a TimePartition. + * @implements ITimePartition + * @constructor + * @param {flyteidl.core.ITimePartition=} [properties] Properties to set + */ + function TimePartition(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * TimePartition value. + * @member {flyteidl.core.ILabelValue|null|undefined} value + * @memberof flyteidl.core.TimePartition + * @instance + */ + TimePartition.prototype.value = null; + + /** + * Creates a new TimePartition instance using the specified properties. + * @function create + * @memberof flyteidl.core.TimePartition + * @static + * @param {flyteidl.core.ITimePartition=} [properties] Properties to set + * @returns {flyteidl.core.TimePartition} TimePartition instance + */ + TimePartition.create = function create(properties) { + return new TimePartition(properties); + }; + + /** + * Encodes the specified TimePartition message. Does not implicitly {@link flyteidl.core.TimePartition.verify|verify} messages. + * @function encode + * @memberof flyteidl.core.TimePartition + * @static + * @param {flyteidl.core.ITimePartition} message TimePartition message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + TimePartition.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.value != null && message.hasOwnProperty("value")) + $root.flyteidl.core.LabelValue.encode(message.value, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Decodes a TimePartition message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.core.TimePartition + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.core.TimePartition} TimePartition + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + TimePartition.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.core.TimePartition(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.value = $root.flyteidl.core.LabelValue.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies a TimePartition message. + * @function verify + * @memberof flyteidl.core.TimePartition + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + TimePartition.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.value != null && message.hasOwnProperty("value")) { + var error = $root.flyteidl.core.LabelValue.verify(message.value); + if (error) + return "value." + error; + } + return null; + }; + + return TimePartition; + })(); + core.ArtifactID = (function() { /** @@ -742,6 +916,7 @@ * @property {flyteidl.core.IArtifactKey|null} [artifactKey] ArtifactID artifactKey * @property {string|null} [version] ArtifactID version * @property {flyteidl.core.IPartitions|null} [partitions] ArtifactID partitions + * @property {flyteidl.core.ITimePartition|null} [timePartition] ArtifactID timePartition */ /** @@ -783,19 +958,13 @@ */ ArtifactID.prototype.partitions = null; - // OneOf field names bound to virtual getters and setters - var $oneOfFields; - /** - * ArtifactID dimensions. - * @member {"partitions"|undefined} dimensions + * ArtifactID timePartition. + * @member {flyteidl.core.ITimePartition|null|undefined} timePartition * @memberof flyteidl.core.ArtifactID * @instance */ - Object.defineProperty(ArtifactID.prototype, "dimensions", { - get: $util.oneOfGetter($oneOfFields = ["partitions"]), - set: $util.oneOfSetter($oneOfFields) - }); + ArtifactID.prototype.timePartition = null; /** * Creates a new ArtifactID instance using the specified properties. @@ -827,6 +996,8 @@ writer.uint32(/* id 2, wireType 2 =*/18).string(message.version); if (message.partitions != null && message.hasOwnProperty("partitions")) $root.flyteidl.core.Partitions.encode(message.partitions, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.timePartition != null && message.hasOwnProperty("timePartition")) + $root.flyteidl.core.TimePartition.encode(message.timePartition, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -857,6 +1028,9 @@ case 3: message.partitions = $root.flyteidl.core.Partitions.decode(reader, reader.uint32()); break; + case 4: + message.timePartition = $root.flyteidl.core.TimePartition.decode(reader, reader.uint32()); + break; default: reader.skipType(tag & 7); break; @@ -876,7 +1050,6 @@ ArtifactID.verify = function verify(message) { if (typeof message !== "object" || message === null) return "object expected"; - var properties = {}; if (message.artifactKey != null && message.hasOwnProperty("artifactKey")) { var error = $root.flyteidl.core.ArtifactKey.verify(message.artifactKey); if (error) @@ -886,12 +1059,14 @@ if (!$util.isString(message.version)) return "version: string expected"; if (message.partitions != null && message.hasOwnProperty("partitions")) { - properties.dimensions = 1; - { - var error = $root.flyteidl.core.Partitions.verify(message.partitions); - if (error) - return "partitions." + error; - } + var error = $root.flyteidl.core.Partitions.verify(message.partitions); + if (error) + return "partitions." + error; + } + if (message.timePartition != null && message.hasOwnProperty("timePartition")) { + var error = $root.flyteidl.core.TimePartition.verify(message.timePartition); + if (error) + return "timePartition." + error; } return null; }; @@ -1229,145 +1404,6 @@ return ArtifactQuery; })(); - core.Trigger = (function() { - - /** - * Properties of a Trigger. - * @memberof flyteidl.core - * @interface ITrigger - * @property {flyteidl.core.IIdentifier|null} [triggerId] Trigger triggerId - * @property {Array.|null} [triggers] Trigger triggers - */ - - /** - * Constructs a new Trigger. - * @memberof flyteidl.core - * @classdesc Represents a Trigger. - * @implements ITrigger - * @constructor - * @param {flyteidl.core.ITrigger=} [properties] Properties to set - */ - function Trigger(properties) { - this.triggers = []; - if (properties) - for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) - if (properties[keys[i]] != null) - this[keys[i]] = properties[keys[i]]; - } - - /** - * Trigger triggerId. - * @member {flyteidl.core.IIdentifier|null|undefined} triggerId - * @memberof flyteidl.core.Trigger - * @instance - */ - Trigger.prototype.triggerId = null; - - /** - * Trigger triggers. - * @member {Array.} triggers - * @memberof flyteidl.core.Trigger - * @instance - */ - Trigger.prototype.triggers = $util.emptyArray; - - /** - * Creates a new Trigger instance using the specified properties. - * @function create - * @memberof flyteidl.core.Trigger - * @static - * @param {flyteidl.core.ITrigger=} [properties] Properties to set - * @returns {flyteidl.core.Trigger} Trigger instance - */ - Trigger.create = function create(properties) { - return new Trigger(properties); - }; - - /** - * Encodes the specified Trigger message. Does not implicitly {@link flyteidl.core.Trigger.verify|verify} messages. - * @function encode - * @memberof flyteidl.core.Trigger - * @static - * @param {flyteidl.core.ITrigger} message Trigger message or plain object to encode - * @param {$protobuf.Writer} [writer] Writer to encode to - * @returns {$protobuf.Writer} Writer - */ - Trigger.encode = function encode(message, writer) { - if (!writer) - writer = $Writer.create(); - if (message.triggerId != null && message.hasOwnProperty("triggerId")) - $root.flyteidl.core.Identifier.encode(message.triggerId, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.triggers != null && message.triggers.length) - for (var i = 0; i < message.triggers.length; ++i) - $root.flyteidl.core.ArtifactID.encode(message.triggers[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - return writer; - }; - - /** - * Decodes a Trigger message from the specified reader or buffer. - * @function decode - * @memberof flyteidl.core.Trigger - * @static - * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from - * @param {number} [length] Message length if known beforehand - * @returns {flyteidl.core.Trigger} Trigger - * @throws {Error} If the payload is not a reader or valid buffer - * @throws {$protobuf.util.ProtocolError} If required fields are missing - */ - Trigger.decode = function decode(reader, length) { - if (!(reader instanceof $Reader)) - reader = $Reader.create(reader); - var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.core.Trigger(); - while (reader.pos < end) { - var tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - message.triggerId = $root.flyteidl.core.Identifier.decode(reader, reader.uint32()); - break; - case 2: - if (!(message.triggers && message.triggers.length)) - message.triggers = []; - message.triggers.push($root.flyteidl.core.ArtifactID.decode(reader, reader.uint32())); - break; - default: - reader.skipType(tag & 7); - break; - } - } - return message; - }; - - /** - * Verifies a Trigger message. - * @function verify - * @memberof flyteidl.core.Trigger - * @static - * @param {Object.} message Plain object to verify - * @returns {string|null} `null` if valid, otherwise the reason why it is not - */ - Trigger.verify = function verify(message) { - if (typeof message !== "object" || message === null) - return "object expected"; - if (message.triggerId != null && message.hasOwnProperty("triggerId")) { - var error = $root.flyteidl.core.Identifier.verify(message.triggerId); - if (error) - return "triggerId." + error; - } - if (message.triggers != null && message.hasOwnProperty("triggers")) { - if (!Array.isArray(message.triggers)) - return "triggers: array expected"; - for (var i = 0; i < message.triggers.length; ++i) { - var error = $root.flyteidl.core.ArtifactID.verify(message.triggers[i]); - if (error) - return "triggers." + error; - } - } - return null; - }; - - return Trigger; - })(); - /** * ResourceType enum. * @name flyteidl.core.ResourceType @@ -1399,6 +1435,7 @@ * @property {string|null} [domain] Identifier domain * @property {string|null} [name] Identifier name * @property {string|null} [version] Identifier version + * @property {string|null} [org] Identifier org */ /** @@ -1456,6 +1493,14 @@ */ Identifier.prototype.version = ""; + /** + * Identifier org. + * @member {string} org + * @memberof flyteidl.core.Identifier + * @instance + */ + Identifier.prototype.org = ""; + /** * Creates a new Identifier instance using the specified properties. * @function create @@ -1490,6 +1535,8 @@ writer.uint32(/* id 4, wireType 2 =*/34).string(message.name); if (message.version != null && message.hasOwnProperty("version")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.version); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.org); return writer; }; @@ -1526,6 +1573,9 @@ case 5: message.version = reader.string(); break; + case 6: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -1568,6 +1618,9 @@ if (message.version != null && message.hasOwnProperty("version")) if (!$util.isString(message.version)) return "version: string expected"; + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -1583,6 +1636,7 @@ * @property {string|null} [project] WorkflowExecutionIdentifier project * @property {string|null} [domain] WorkflowExecutionIdentifier domain * @property {string|null} [name] WorkflowExecutionIdentifier name + * @property {string|null} [org] WorkflowExecutionIdentifier org */ /** @@ -1624,6 +1678,14 @@ */ WorkflowExecutionIdentifier.prototype.name = ""; + /** + * WorkflowExecutionIdentifier org. + * @member {string} org + * @memberof flyteidl.core.WorkflowExecutionIdentifier + * @instance + */ + WorkflowExecutionIdentifier.prototype.org = ""; + /** * Creates a new WorkflowExecutionIdentifier instance using the specified properties. * @function create @@ -1654,6 +1716,8 @@ writer.uint32(/* id 2, wireType 2 =*/18).string(message.domain); if (message.name != null && message.hasOwnProperty("name")) writer.uint32(/* id 4, wireType 2 =*/34).string(message.name); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.org); return writer; }; @@ -1684,6 +1748,9 @@ case 4: message.name = reader.string(); break; + case 5: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -1712,6 +1779,9 @@ if (message.name != null && message.hasOwnProperty("name")) if (!$util.isString(message.name)) return "name: string expected"; + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -2135,6 +2205,7 @@ * @property {number} CACHE_LOOKUP_FAILURE=4 CACHE_LOOKUP_FAILURE value * @property {number} CACHE_PUT_FAILURE=5 CACHE_PUT_FAILURE value * @property {number} CACHE_SKIPPED=6 CACHE_SKIPPED value + * @property {number} CACHE_EVICTED=7 CACHE_EVICTED value */ core.CatalogCacheStatus = (function() { var valuesById = {}, values = Object.create(valuesById); @@ -2145,6 +2216,7 @@ values[valuesById[4] = "CACHE_LOOKUP_FAILURE"] = 4; values[valuesById[5] = "CACHE_PUT_FAILURE"] = 5; values[valuesById[6] = "CACHE_SKIPPED"] = 6; + values[valuesById[7] = "CACHE_EVICTED"] = 7; return values; })(); @@ -17789,6 +17861,135 @@ return Span; })(); + core.ExecutionMetricResult = (function() { + + /** + * Properties of an ExecutionMetricResult. + * @memberof flyteidl.core + * @interface IExecutionMetricResult + * @property {string|null} [metric] ExecutionMetricResult metric + * @property {google.protobuf.IStruct|null} [data] ExecutionMetricResult data + */ + + /** + * Constructs a new ExecutionMetricResult. + * @memberof flyteidl.core + * @classdesc Represents an ExecutionMetricResult. + * @implements IExecutionMetricResult + * @constructor + * @param {flyteidl.core.IExecutionMetricResult=} [properties] Properties to set + */ + function ExecutionMetricResult(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * ExecutionMetricResult metric. + * @member {string} metric + * @memberof flyteidl.core.ExecutionMetricResult + * @instance + */ + ExecutionMetricResult.prototype.metric = ""; + + /** + * ExecutionMetricResult data. + * @member {google.protobuf.IStruct|null|undefined} data + * @memberof flyteidl.core.ExecutionMetricResult + * @instance + */ + ExecutionMetricResult.prototype.data = null; + + /** + * Creates a new ExecutionMetricResult instance using the specified properties. + * @function create + * @memberof flyteidl.core.ExecutionMetricResult + * @static + * @param {flyteidl.core.IExecutionMetricResult=} [properties] Properties to set + * @returns {flyteidl.core.ExecutionMetricResult} ExecutionMetricResult instance + */ + ExecutionMetricResult.create = function create(properties) { + return new ExecutionMetricResult(properties); + }; + + /** + * Encodes the specified ExecutionMetricResult message. Does not implicitly {@link flyteidl.core.ExecutionMetricResult.verify|verify} messages. + * @function encode + * @memberof flyteidl.core.ExecutionMetricResult + * @static + * @param {flyteidl.core.IExecutionMetricResult} message ExecutionMetricResult message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + ExecutionMetricResult.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.metric != null && message.hasOwnProperty("metric")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.metric); + if (message.data != null && message.hasOwnProperty("data")) + $root.google.protobuf.Struct.encode(message.data, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + return writer; + }; + + /** + * Decodes an ExecutionMetricResult message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.core.ExecutionMetricResult + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.core.ExecutionMetricResult} ExecutionMetricResult + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + ExecutionMetricResult.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.core.ExecutionMetricResult(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.metric = reader.string(); + break; + case 2: + message.data = $root.google.protobuf.Struct.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies an ExecutionMetricResult message. + * @function verify + * @memberof flyteidl.core.ExecutionMetricResult + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + ExecutionMetricResult.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.metric != null && message.hasOwnProperty("metric")) + if (!$util.isString(message.metric)) + return "metric: string expected"; + if (message.data != null && message.hasOwnProperty("data")) { + var error = $root.google.protobuf.Struct.verify(message.data); + if (error) + return "data." + error; + } + return null; + }; + + return ExecutionMetricResult; + })(); + core.WorkflowClosure = (function() { /** @@ -17947,9 +18148,7 @@ * @memberof flyteidl.event * @interface ICloudEventWorkflowExecution * @property {flyteidl.event.IWorkflowExecutionEvent|null} [rawEvent] CloudEventWorkflowExecution rawEvent - * @property {flyteidl.core.ILiteralMap|null} [outputData] CloudEventWorkflowExecution outputData * @property {flyteidl.core.ITypedInterface|null} [outputInterface] CloudEventWorkflowExecution outputInterface - * @property {flyteidl.core.ILiteralMap|null} [inputData] CloudEventWorkflowExecution inputData * @property {Array.|null} [artifactIds] CloudEventWorkflowExecution artifactIds * @property {flyteidl.core.IWorkflowExecutionIdentifier|null} [referenceExecution] CloudEventWorkflowExecution referenceExecution * @property {string|null} [principal] CloudEventWorkflowExecution principal @@ -17980,14 +18179,6 @@ */ CloudEventWorkflowExecution.prototype.rawEvent = null; - /** - * CloudEventWorkflowExecution outputData. - * @member {flyteidl.core.ILiteralMap|null|undefined} outputData - * @memberof flyteidl.event.CloudEventWorkflowExecution - * @instance - */ - CloudEventWorkflowExecution.prototype.outputData = null; - /** * CloudEventWorkflowExecution outputInterface. * @member {flyteidl.core.ITypedInterface|null|undefined} outputInterface @@ -17996,14 +18187,6 @@ */ CloudEventWorkflowExecution.prototype.outputInterface = null; - /** - * CloudEventWorkflowExecution inputData. - * @member {flyteidl.core.ILiteralMap|null|undefined} inputData - * @memberof flyteidl.event.CloudEventWorkflowExecution - * @instance - */ - CloudEventWorkflowExecution.prototype.inputData = null; - /** * CloudEventWorkflowExecution artifactIds. * @member {Array.} artifactIds @@ -18062,21 +18245,17 @@ writer = $Writer.create(); if (message.rawEvent != null && message.hasOwnProperty("rawEvent")) $root.flyteidl.event.WorkflowExecutionEvent.encode(message.rawEvent, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.outputData != null && message.hasOwnProperty("outputData")) - $root.flyteidl.core.LiteralMap.encode(message.outputData, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.outputInterface != null && message.hasOwnProperty("outputInterface")) - $root.flyteidl.core.TypedInterface.encode(message.outputInterface, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.inputData != null && message.hasOwnProperty("inputData")) - $root.flyteidl.core.LiteralMap.encode(message.inputData, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + $root.flyteidl.core.TypedInterface.encode(message.outputInterface, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); if (message.artifactIds != null && message.artifactIds.length) for (var i = 0; i < message.artifactIds.length; ++i) - $root.flyteidl.core.ArtifactID.encode(message.artifactIds[i], writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + $root.flyteidl.core.ArtifactID.encode(message.artifactIds[i], writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); if (message.referenceExecution != null && message.hasOwnProperty("referenceExecution")) - $root.flyteidl.core.WorkflowExecutionIdentifier.encode(message.referenceExecution, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + $root.flyteidl.core.WorkflowExecutionIdentifier.encode(message.referenceExecution, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.principal != null && message.hasOwnProperty("principal")) - writer.uint32(/* id 7, wireType 2 =*/58).string(message.principal); + writer.uint32(/* id 5, wireType 2 =*/42).string(message.principal); if (message.launchPlanId != null && message.hasOwnProperty("launchPlanId")) - $root.flyteidl.core.Identifier.encode(message.launchPlanId, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + $root.flyteidl.core.Identifier.encode(message.launchPlanId, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); return writer; }; @@ -18102,26 +18281,20 @@ message.rawEvent = $root.flyteidl.event.WorkflowExecutionEvent.decode(reader, reader.uint32()); break; case 2: - message.outputData = $root.flyteidl.core.LiteralMap.decode(reader, reader.uint32()); - break; - case 3: message.outputInterface = $root.flyteidl.core.TypedInterface.decode(reader, reader.uint32()); break; - case 4: - message.inputData = $root.flyteidl.core.LiteralMap.decode(reader, reader.uint32()); - break; - case 5: + case 3: if (!(message.artifactIds && message.artifactIds.length)) message.artifactIds = []; message.artifactIds.push($root.flyteidl.core.ArtifactID.decode(reader, reader.uint32())); break; - case 6: + case 4: message.referenceExecution = $root.flyteidl.core.WorkflowExecutionIdentifier.decode(reader, reader.uint32()); break; - case 7: + case 5: message.principal = reader.string(); break; - case 8: + case 6: message.launchPlanId = $root.flyteidl.core.Identifier.decode(reader, reader.uint32()); break; default: @@ -18148,21 +18321,11 @@ if (error) return "rawEvent." + error; } - if (message.outputData != null && message.hasOwnProperty("outputData")) { - var error = $root.flyteidl.core.LiteralMap.verify(message.outputData); - if (error) - return "outputData." + error; - } if (message.outputInterface != null && message.hasOwnProperty("outputInterface")) { var error = $root.flyteidl.core.TypedInterface.verify(message.outputInterface); if (error) return "outputInterface." + error; } - if (message.inputData != null && message.hasOwnProperty("inputData")) { - var error = $root.flyteidl.core.LiteralMap.verify(message.inputData); - if (error) - return "inputData." + error; - } if (message.artifactIds != null && message.hasOwnProperty("artifactIds")) { if (!Array.isArray(message.artifactIds)) return "artifactIds: array expected"; @@ -18199,9 +18362,7 @@ * @interface ICloudEventNodeExecution * @property {flyteidl.event.INodeExecutionEvent|null} [rawEvent] CloudEventNodeExecution rawEvent * @property {flyteidl.core.ITaskExecutionIdentifier|null} [taskExecId] CloudEventNodeExecution taskExecId - * @property {flyteidl.core.ILiteralMap|null} [outputData] CloudEventNodeExecution outputData * @property {flyteidl.core.ITypedInterface|null} [outputInterface] CloudEventNodeExecution outputInterface - * @property {flyteidl.core.ILiteralMap|null} [inputData] CloudEventNodeExecution inputData * @property {Array.|null} [artifactIds] CloudEventNodeExecution artifactIds * @property {string|null} [principal] CloudEventNodeExecution principal * @property {flyteidl.core.IIdentifier|null} [launchPlanId] CloudEventNodeExecution launchPlanId @@ -18239,14 +18400,6 @@ */ CloudEventNodeExecution.prototype.taskExecId = null; - /** - * CloudEventNodeExecution outputData. - * @member {flyteidl.core.ILiteralMap|null|undefined} outputData - * @memberof flyteidl.event.CloudEventNodeExecution - * @instance - */ - CloudEventNodeExecution.prototype.outputData = null; - /** * CloudEventNodeExecution outputInterface. * @member {flyteidl.core.ITypedInterface|null|undefined} outputInterface @@ -18255,14 +18408,6 @@ */ CloudEventNodeExecution.prototype.outputInterface = null; - /** - * CloudEventNodeExecution inputData. - * @member {flyteidl.core.ILiteralMap|null|undefined} inputData - * @memberof flyteidl.event.CloudEventNodeExecution - * @instance - */ - CloudEventNodeExecution.prototype.inputData = null; - /** * CloudEventNodeExecution artifactIds. * @member {Array.} artifactIds @@ -18315,19 +18460,15 @@ $root.flyteidl.event.NodeExecutionEvent.encode(message.rawEvent, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); if (message.taskExecId != null && message.hasOwnProperty("taskExecId")) $root.flyteidl.core.TaskExecutionIdentifier.encode(message.taskExecId, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.outputData != null && message.hasOwnProperty("outputData")) - $root.flyteidl.core.LiteralMap.encode(message.outputData, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); if (message.outputInterface != null && message.hasOwnProperty("outputInterface")) - $root.flyteidl.core.TypedInterface.encode(message.outputInterface, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.inputData != null && message.hasOwnProperty("inputData")) - $root.flyteidl.core.LiteralMap.encode(message.inputData, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + $root.flyteidl.core.TypedInterface.encode(message.outputInterface, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); if (message.artifactIds != null && message.artifactIds.length) for (var i = 0; i < message.artifactIds.length; ++i) - $root.flyteidl.core.ArtifactID.encode(message.artifactIds[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + $root.flyteidl.core.ArtifactID.encode(message.artifactIds[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.principal != null && message.hasOwnProperty("principal")) - writer.uint32(/* id 7, wireType 2 =*/58).string(message.principal); + writer.uint32(/* id 5, wireType 2 =*/42).string(message.principal); if (message.launchPlanId != null && message.hasOwnProperty("launchPlanId")) - $root.flyteidl.core.Identifier.encode(message.launchPlanId, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); + $root.flyteidl.core.Identifier.encode(message.launchPlanId, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); return writer; }; @@ -18356,23 +18497,17 @@ message.taskExecId = $root.flyteidl.core.TaskExecutionIdentifier.decode(reader, reader.uint32()); break; case 3: - message.outputData = $root.flyteidl.core.LiteralMap.decode(reader, reader.uint32()); - break; - case 4: message.outputInterface = $root.flyteidl.core.TypedInterface.decode(reader, reader.uint32()); break; - case 5: - message.inputData = $root.flyteidl.core.LiteralMap.decode(reader, reader.uint32()); - break; - case 6: + case 4: if (!(message.artifactIds && message.artifactIds.length)) message.artifactIds = []; message.artifactIds.push($root.flyteidl.core.ArtifactID.decode(reader, reader.uint32())); break; - case 7: + case 5: message.principal = reader.string(); break; - case 8: + case 6: message.launchPlanId = $root.flyteidl.core.Identifier.decode(reader, reader.uint32()); break; default: @@ -18404,21 +18539,11 @@ if (error) return "taskExecId." + error; } - if (message.outputData != null && message.hasOwnProperty("outputData")) { - var error = $root.flyteidl.core.LiteralMap.verify(message.outputData); - if (error) - return "outputData." + error; - } if (message.outputInterface != null && message.hasOwnProperty("outputInterface")) { var error = $root.flyteidl.core.TypedInterface.verify(message.outputInterface); if (error) return "outputInterface." + error; } - if (message.inputData != null && message.hasOwnProperty("inputData")) { - var error = $root.flyteidl.core.LiteralMap.verify(message.inputData); - if (error) - return "inputData." + error; - } if (message.artifactIds != null && message.hasOwnProperty("artifactIds")) { if (!Array.isArray(message.artifactIds)) return "artifactIds: array expected"; @@ -18564,7 +18689,7 @@ * @property {flyteidl.core.IIdentifier|null} [launchPlanId] CloudEventExecutionStart launchPlanId * @property {flyteidl.core.IIdentifier|null} [workflowId] CloudEventExecutionStart workflowId * @property {Array.|null} [artifactIds] CloudEventExecutionStart artifactIds - * @property {Array.|null} [artifactKeys] CloudEventExecutionStart artifactKeys + * @property {Array.|null} [artifactTrackers] CloudEventExecutionStart artifactTrackers * @property {string|null} [principal] CloudEventExecutionStart principal */ @@ -18578,7 +18703,7 @@ */ function CloudEventExecutionStart(properties) { this.artifactIds = []; - this.artifactKeys = []; + this.artifactTrackers = []; if (properties) for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) if (properties[keys[i]] != null) @@ -18618,12 +18743,12 @@ CloudEventExecutionStart.prototype.artifactIds = $util.emptyArray; /** - * CloudEventExecutionStart artifactKeys. - * @member {Array.} artifactKeys + * CloudEventExecutionStart artifactTrackers. + * @member {Array.} artifactTrackers * @memberof flyteidl.event.CloudEventExecutionStart * @instance */ - CloudEventExecutionStart.prototype.artifactKeys = $util.emptyArray; + CloudEventExecutionStart.prototype.artifactTrackers = $util.emptyArray; /** * CloudEventExecutionStart principal. @@ -18666,9 +18791,9 @@ if (message.artifactIds != null && message.artifactIds.length) for (var i = 0; i < message.artifactIds.length; ++i) $root.flyteidl.core.ArtifactID.encode(message.artifactIds[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.artifactKeys != null && message.artifactKeys.length) - for (var i = 0; i < message.artifactKeys.length; ++i) - writer.uint32(/* id 5, wireType 2 =*/42).string(message.artifactKeys[i]); + if (message.artifactTrackers != null && message.artifactTrackers.length) + for (var i = 0; i < message.artifactTrackers.length; ++i) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.artifactTrackers[i]); if (message.principal != null && message.hasOwnProperty("principal")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.principal); return writer; @@ -18707,9 +18832,9 @@ message.artifactIds.push($root.flyteidl.core.ArtifactID.decode(reader, reader.uint32())); break; case 5: - if (!(message.artifactKeys && message.artifactKeys.length)) - message.artifactKeys = []; - message.artifactKeys.push(reader.string()); + if (!(message.artifactTrackers && message.artifactTrackers.length)) + message.artifactTrackers = []; + message.artifactTrackers.push(reader.string()); break; case 6: message.principal = reader.string(); @@ -18757,12 +18882,12 @@ return "artifactIds." + error; } } - if (message.artifactKeys != null && message.hasOwnProperty("artifactKeys")) { - if (!Array.isArray(message.artifactKeys)) - return "artifactKeys: array expected"; - for (var i = 0; i < message.artifactKeys.length; ++i) - if (!$util.isString(message.artifactKeys[i])) - return "artifactKeys: string[] expected"; + if (message.artifactTrackers != null && message.hasOwnProperty("artifactTrackers")) { + if (!Array.isArray(message.artifactTrackers)) + return "artifactTrackers: array expected"; + for (var i = 0; i < message.artifactTrackers.length; ++i) + if (!$util.isString(message.artifactTrackers[i])) + return "artifactTrackers: string[] expected"; } if (message.principal != null && message.hasOwnProperty("principal")) if (!$util.isString(message.principal)) @@ -19878,6 +20003,7 @@ case 4: case 5: case 6: + case 7: break; } if (message.catalogKey != null && message.hasOwnProperty("catalogKey")) { @@ -21145,6 +21271,7 @@ case 4: case 5: case 6: + case 7: break; } if (message.logs != null && message.hasOwnProperty("logs")) { @@ -22365,6 +22492,7 @@ * @property {flyteidl.core.ILiteralMap|null} [outputs] Resource outputs * @property {string|null} [message] Resource message * @property {Array.|null} [logLinks] Resource logLinks + * @property {flyteidl.core.TaskExecution.Phase|null} [phase] Resource phase */ /** @@ -22415,6 +22543,14 @@ */ Resource.prototype.logLinks = $util.emptyArray; + /** + * Resource phase. + * @member {flyteidl.core.TaskExecution.Phase} phase + * @memberof flyteidl.admin.Resource + * @instance + */ + Resource.prototype.phase = 0; + /** * Creates a new Resource instance using the specified properties. * @function create @@ -22448,6 +22584,8 @@ if (message.logLinks != null && message.logLinks.length) for (var i = 0; i < message.logLinks.length; ++i) $root.flyteidl.core.TaskLog.encode(message.logLinks[i], writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.phase != null && message.hasOwnProperty("phase")) + writer.uint32(/* id 5, wireType 0 =*/40).int32(message.phase); return writer; }; @@ -22483,6 +22621,9 @@ message.logLinks = []; message.logLinks.push($root.flyteidl.core.TaskLog.decode(reader, reader.uint32())); break; + case 5: + message.phase = reader.int32(); + break; default: reader.skipType(tag & 7); break; @@ -22530,6 +22671,20 @@ return "logLinks." + error; } } + if (message.phase != null && message.hasOwnProperty("phase")) + switch (message.phase) { + default: + return "phase: enum value expected"; + case 0: + case 1: + case 2: + case 3: + case 4: + case 5: + case 6: + case 7: + break; + } return null; }; @@ -23326,6 +23481,631 @@ return ListAgentsResponse; })(); + admin.GetTaskMetricsRequest = (function() { + + /** + * Properties of a GetTaskMetricsRequest. + * @memberof flyteidl.admin + * @interface IGetTaskMetricsRequest + * @property {string|null} [taskType] GetTaskMetricsRequest taskType + * @property {Uint8Array|null} [resourceMeta] GetTaskMetricsRequest resourceMeta + * @property {Array.|null} [queries] GetTaskMetricsRequest queries + * @property {google.protobuf.ITimestamp|null} [startTime] GetTaskMetricsRequest startTime + * @property {google.protobuf.ITimestamp|null} [endTime] GetTaskMetricsRequest endTime + * @property {google.protobuf.IDuration|null} [step] GetTaskMetricsRequest step + */ + + /** + * Constructs a new GetTaskMetricsRequest. + * @memberof flyteidl.admin + * @classdesc Represents a GetTaskMetricsRequest. + * @implements IGetTaskMetricsRequest + * @constructor + * @param {flyteidl.admin.IGetTaskMetricsRequest=} [properties] Properties to set + */ + function GetTaskMetricsRequest(properties) { + this.queries = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GetTaskMetricsRequest taskType. + * @member {string} taskType + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @instance + */ + GetTaskMetricsRequest.prototype.taskType = ""; + + /** + * GetTaskMetricsRequest resourceMeta. + * @member {Uint8Array} resourceMeta + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @instance + */ + GetTaskMetricsRequest.prototype.resourceMeta = $util.newBuffer([]); + + /** + * GetTaskMetricsRequest queries. + * @member {Array.} queries + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @instance + */ + GetTaskMetricsRequest.prototype.queries = $util.emptyArray; + + /** + * GetTaskMetricsRequest startTime. + * @member {google.protobuf.ITimestamp|null|undefined} startTime + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @instance + */ + GetTaskMetricsRequest.prototype.startTime = null; + + /** + * GetTaskMetricsRequest endTime. + * @member {google.protobuf.ITimestamp|null|undefined} endTime + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @instance + */ + GetTaskMetricsRequest.prototype.endTime = null; + + /** + * GetTaskMetricsRequest step. + * @member {google.protobuf.IDuration|null|undefined} step + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @instance + */ + GetTaskMetricsRequest.prototype.step = null; + + /** + * Creates a new GetTaskMetricsRequest instance using the specified properties. + * @function create + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @static + * @param {flyteidl.admin.IGetTaskMetricsRequest=} [properties] Properties to set + * @returns {flyteidl.admin.GetTaskMetricsRequest} GetTaskMetricsRequest instance + */ + GetTaskMetricsRequest.create = function create(properties) { + return new GetTaskMetricsRequest(properties); + }; + + /** + * Encodes the specified GetTaskMetricsRequest message. Does not implicitly {@link flyteidl.admin.GetTaskMetricsRequest.verify|verify} messages. + * @function encode + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @static + * @param {flyteidl.admin.IGetTaskMetricsRequest} message GetTaskMetricsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GetTaskMetricsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.taskType != null && message.hasOwnProperty("taskType")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.taskType); + if (message.resourceMeta != null && message.hasOwnProperty("resourceMeta")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.resourceMeta); + if (message.queries != null && message.queries.length) + for (var i = 0; i < message.queries.length; ++i) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.queries[i]); + if (message.startTime != null && message.hasOwnProperty("startTime")) + $root.google.protobuf.Timestamp.encode(message.startTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.endTime != null && message.hasOwnProperty("endTime")) + $root.google.protobuf.Timestamp.encode(message.endTime, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.step != null && message.hasOwnProperty("step")) + $root.google.protobuf.Duration.encode(message.step, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); + return writer; + }; + + /** + * Decodes a GetTaskMetricsRequest message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.admin.GetTaskMetricsRequest} GetTaskMetricsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GetTaskMetricsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.admin.GetTaskMetricsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.taskType = reader.string(); + break; + case 2: + message.resourceMeta = reader.bytes(); + break; + case 3: + if (!(message.queries && message.queries.length)) + message.queries = []; + message.queries.push(reader.string()); + break; + case 4: + message.startTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + case 5: + message.endTime = $root.google.protobuf.Timestamp.decode(reader, reader.uint32()); + break; + case 6: + message.step = $root.google.protobuf.Duration.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies a GetTaskMetricsRequest message. + * @function verify + * @memberof flyteidl.admin.GetTaskMetricsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GetTaskMetricsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.taskType != null && message.hasOwnProperty("taskType")) + if (!$util.isString(message.taskType)) + return "taskType: string expected"; + if (message.resourceMeta != null && message.hasOwnProperty("resourceMeta")) + if (!(message.resourceMeta && typeof message.resourceMeta.length === "number" || $util.isString(message.resourceMeta))) + return "resourceMeta: buffer expected"; + if (message.queries != null && message.hasOwnProperty("queries")) { + if (!Array.isArray(message.queries)) + return "queries: array expected"; + for (var i = 0; i < message.queries.length; ++i) + if (!$util.isString(message.queries[i])) + return "queries: string[] expected"; + } + if (message.startTime != null && message.hasOwnProperty("startTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.startTime); + if (error) + return "startTime." + error; + } + if (message.endTime != null && message.hasOwnProperty("endTime")) { + var error = $root.google.protobuf.Timestamp.verify(message.endTime); + if (error) + return "endTime." + error; + } + if (message.step != null && message.hasOwnProperty("step")) { + var error = $root.google.protobuf.Duration.verify(message.step); + if (error) + return "step." + error; + } + return null; + }; + + return GetTaskMetricsRequest; + })(); + + admin.GetTaskMetricsResponse = (function() { + + /** + * Properties of a GetTaskMetricsResponse. + * @memberof flyteidl.admin + * @interface IGetTaskMetricsResponse + * @property {Array.|null} [results] GetTaskMetricsResponse results + */ + + /** + * Constructs a new GetTaskMetricsResponse. + * @memberof flyteidl.admin + * @classdesc Represents a GetTaskMetricsResponse. + * @implements IGetTaskMetricsResponse + * @constructor + * @param {flyteidl.admin.IGetTaskMetricsResponse=} [properties] Properties to set + */ + function GetTaskMetricsResponse(properties) { + this.results = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GetTaskMetricsResponse results. + * @member {Array.} results + * @memberof flyteidl.admin.GetTaskMetricsResponse + * @instance + */ + GetTaskMetricsResponse.prototype.results = $util.emptyArray; + + /** + * Creates a new GetTaskMetricsResponse instance using the specified properties. + * @function create + * @memberof flyteidl.admin.GetTaskMetricsResponse + * @static + * @param {flyteidl.admin.IGetTaskMetricsResponse=} [properties] Properties to set + * @returns {flyteidl.admin.GetTaskMetricsResponse} GetTaskMetricsResponse instance + */ + GetTaskMetricsResponse.create = function create(properties) { + return new GetTaskMetricsResponse(properties); + }; + + /** + * Encodes the specified GetTaskMetricsResponse message. Does not implicitly {@link flyteidl.admin.GetTaskMetricsResponse.verify|verify} messages. + * @function encode + * @memberof flyteidl.admin.GetTaskMetricsResponse + * @static + * @param {flyteidl.admin.IGetTaskMetricsResponse} message GetTaskMetricsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GetTaskMetricsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.results != null && message.results.length) + for (var i = 0; i < message.results.length; ++i) + $root.flyteidl.core.ExecutionMetricResult.encode(message.results[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Decodes a GetTaskMetricsResponse message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.admin.GetTaskMetricsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.admin.GetTaskMetricsResponse} GetTaskMetricsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GetTaskMetricsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.admin.GetTaskMetricsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.results && message.results.length)) + message.results = []; + message.results.push($root.flyteidl.core.ExecutionMetricResult.decode(reader, reader.uint32())); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies a GetTaskMetricsResponse message. + * @function verify + * @memberof flyteidl.admin.GetTaskMetricsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GetTaskMetricsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.results != null && message.hasOwnProperty("results")) { + if (!Array.isArray(message.results)) + return "results: array expected"; + for (var i = 0; i < message.results.length; ++i) { + var error = $root.flyteidl.core.ExecutionMetricResult.verify(message.results[i]); + if (error) + return "results." + error; + } + } + return null; + }; + + return GetTaskMetricsResponse; + })(); + + admin.GetTaskLogsRequest = (function() { + + /** + * Properties of a GetTaskLogsRequest. + * @memberof flyteidl.admin + * @interface IGetTaskLogsRequest + * @property {string|null} [taskType] GetTaskLogsRequest taskType + * @property {Uint8Array|null} [resourceMeta] GetTaskLogsRequest resourceMeta + * @property {Long|null} [lines] GetTaskLogsRequest lines + * @property {string|null} [token] GetTaskLogsRequest token + */ + + /** + * Constructs a new GetTaskLogsRequest. + * @memberof flyteidl.admin + * @classdesc Represents a GetTaskLogsRequest. + * @implements IGetTaskLogsRequest + * @constructor + * @param {flyteidl.admin.IGetTaskLogsRequest=} [properties] Properties to set + */ + function GetTaskLogsRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GetTaskLogsRequest taskType. + * @member {string} taskType + * @memberof flyteidl.admin.GetTaskLogsRequest + * @instance + */ + GetTaskLogsRequest.prototype.taskType = ""; + + /** + * GetTaskLogsRequest resourceMeta. + * @member {Uint8Array} resourceMeta + * @memberof flyteidl.admin.GetTaskLogsRequest + * @instance + */ + GetTaskLogsRequest.prototype.resourceMeta = $util.newBuffer([]); + + /** + * GetTaskLogsRequest lines. + * @member {Long} lines + * @memberof flyteidl.admin.GetTaskLogsRequest + * @instance + */ + GetTaskLogsRequest.prototype.lines = $util.Long ? $util.Long.fromBits(0,0,true) : 0; + + /** + * GetTaskLogsRequest token. + * @member {string} token + * @memberof flyteidl.admin.GetTaskLogsRequest + * @instance + */ + GetTaskLogsRequest.prototype.token = ""; + + /** + * Creates a new GetTaskLogsRequest instance using the specified properties. + * @function create + * @memberof flyteidl.admin.GetTaskLogsRequest + * @static + * @param {flyteidl.admin.IGetTaskLogsRequest=} [properties] Properties to set + * @returns {flyteidl.admin.GetTaskLogsRequest} GetTaskLogsRequest instance + */ + GetTaskLogsRequest.create = function create(properties) { + return new GetTaskLogsRequest(properties); + }; + + /** + * Encodes the specified GetTaskLogsRequest message. Does not implicitly {@link flyteidl.admin.GetTaskLogsRequest.verify|verify} messages. + * @function encode + * @memberof flyteidl.admin.GetTaskLogsRequest + * @static + * @param {flyteidl.admin.IGetTaskLogsRequest} message GetTaskLogsRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GetTaskLogsRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.taskType != null && message.hasOwnProperty("taskType")) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.taskType); + if (message.resourceMeta != null && message.hasOwnProperty("resourceMeta")) + writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.resourceMeta); + if (message.lines != null && message.hasOwnProperty("lines")) + writer.uint32(/* id 3, wireType 0 =*/24).uint64(message.lines); + if (message.token != null && message.hasOwnProperty("token")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.token); + return writer; + }; + + /** + * Decodes a GetTaskLogsRequest message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.admin.GetTaskLogsRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.admin.GetTaskLogsRequest} GetTaskLogsRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GetTaskLogsRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.admin.GetTaskLogsRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.taskType = reader.string(); + break; + case 2: + message.resourceMeta = reader.bytes(); + break; + case 3: + message.lines = reader.uint64(); + break; + case 4: + message.token = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies a GetTaskLogsRequest message. + * @function verify + * @memberof flyteidl.admin.GetTaskLogsRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GetTaskLogsRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.taskType != null && message.hasOwnProperty("taskType")) + if (!$util.isString(message.taskType)) + return "taskType: string expected"; + if (message.resourceMeta != null && message.hasOwnProperty("resourceMeta")) + if (!(message.resourceMeta && typeof message.resourceMeta.length === "number" || $util.isString(message.resourceMeta))) + return "resourceMeta: buffer expected"; + if (message.lines != null && message.hasOwnProperty("lines")) + if (!$util.isInteger(message.lines) && !(message.lines && $util.isInteger(message.lines.low) && $util.isInteger(message.lines.high))) + return "lines: integer|Long expected"; + if (message.token != null && message.hasOwnProperty("token")) + if (!$util.isString(message.token)) + return "token: string expected"; + return null; + }; + + return GetTaskLogsRequest; + })(); + + admin.GetTaskLogsResponse = (function() { + + /** + * Properties of a GetTaskLogsResponse. + * @memberof flyteidl.admin + * @interface IGetTaskLogsResponse + * @property {Array.|null} [results] GetTaskLogsResponse results + * @property {string|null} [token] GetTaskLogsResponse token + */ + + /** + * Constructs a new GetTaskLogsResponse. + * @memberof flyteidl.admin + * @classdesc Represents a GetTaskLogsResponse. + * @implements IGetTaskLogsResponse + * @constructor + * @param {flyteidl.admin.IGetTaskLogsResponse=} [properties] Properties to set + */ + function GetTaskLogsResponse(properties) { + this.results = []; + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GetTaskLogsResponse results. + * @member {Array.} results + * @memberof flyteidl.admin.GetTaskLogsResponse + * @instance + */ + GetTaskLogsResponse.prototype.results = $util.emptyArray; + + /** + * GetTaskLogsResponse token. + * @member {string} token + * @memberof flyteidl.admin.GetTaskLogsResponse + * @instance + */ + GetTaskLogsResponse.prototype.token = ""; + + /** + * Creates a new GetTaskLogsResponse instance using the specified properties. + * @function create + * @memberof flyteidl.admin.GetTaskLogsResponse + * @static + * @param {flyteidl.admin.IGetTaskLogsResponse=} [properties] Properties to set + * @returns {flyteidl.admin.GetTaskLogsResponse} GetTaskLogsResponse instance + */ + GetTaskLogsResponse.create = function create(properties) { + return new GetTaskLogsResponse(properties); + }; + + /** + * Encodes the specified GetTaskLogsResponse message. Does not implicitly {@link flyteidl.admin.GetTaskLogsResponse.verify|verify} messages. + * @function encode + * @memberof flyteidl.admin.GetTaskLogsResponse + * @static + * @param {flyteidl.admin.IGetTaskLogsResponse} message GetTaskLogsResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GetTaskLogsResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.results != null && message.results.length) + for (var i = 0; i < message.results.length; ++i) + writer.uint32(/* id 1, wireType 2 =*/10).string(message.results[i]); + if (message.token != null && message.hasOwnProperty("token")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.token); + return writer; + }; + + /** + * Decodes a GetTaskLogsResponse message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.admin.GetTaskLogsResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.admin.GetTaskLogsResponse} GetTaskLogsResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GetTaskLogsResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.admin.GetTaskLogsResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (!(message.results && message.results.length)) + message.results = []; + message.results.push(reader.string()); + break; + case 2: + message.token = reader.string(); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies a GetTaskLogsResponse message. + * @function verify + * @memberof flyteidl.admin.GetTaskLogsResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GetTaskLogsResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.results != null && message.hasOwnProperty("results")) { + if (!Array.isArray(message.results)) + return "results: array expected"; + for (var i = 0; i < message.results.length; ++i) + if (!$util.isString(message.results[i])) + return "results: string[] expected"; + } + if (message.token != null && message.hasOwnProperty("token")) + if (!$util.isString(message.token)) + return "token: string expected"; + return null; + }; + + return GetTaskLogsResponse; + })(); + admin.ClusterAssignment = (function() { /** @@ -23445,6 +24225,7 @@ * @property {string|null} [project] NamedEntityIdentifier project * @property {string|null} [domain] NamedEntityIdentifier domain * @property {string|null} [name] NamedEntityIdentifier name + * @property {string|null} [org] NamedEntityIdentifier org */ /** @@ -23486,6 +24267,14 @@ */ NamedEntityIdentifier.prototype.name = ""; + /** + * NamedEntityIdentifier org. + * @member {string} org + * @memberof flyteidl.admin.NamedEntityIdentifier + * @instance + */ + NamedEntityIdentifier.prototype.org = ""; + /** * Creates a new NamedEntityIdentifier instance using the specified properties. * @function create @@ -23516,6 +24305,8 @@ writer.uint32(/* id 2, wireType 2 =*/18).string(message.domain); if (message.name != null && message.hasOwnProperty("name")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.name); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.org); return writer; }; @@ -23546,6 +24337,9 @@ case 3: message.name = reader.string(); break; + case 4: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -23574,6 +24368,9 @@ if (message.name != null && message.hasOwnProperty("name")) if (!$util.isString(message.name)) return "name: string expected"; + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -24043,6 +24840,7 @@ * @property {string|null} [token] NamedEntityIdentifierListRequest token * @property {flyteidl.admin.ISort|null} [sortBy] NamedEntityIdentifierListRequest sortBy * @property {string|null} [filters] NamedEntityIdentifierListRequest filters + * @property {string|null} [org] NamedEntityIdentifierListRequest org */ /** @@ -24108,6 +24906,14 @@ */ NamedEntityIdentifierListRequest.prototype.filters = ""; + /** + * NamedEntityIdentifierListRequest org. + * @member {string} org + * @memberof flyteidl.admin.NamedEntityIdentifierListRequest + * @instance + */ + NamedEntityIdentifierListRequest.prototype.org = ""; + /** * Creates a new NamedEntityIdentifierListRequest instance using the specified properties. * @function create @@ -24144,6 +24950,8 @@ $root.flyteidl.admin.Sort.encode(message.sortBy, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); if (message.filters != null && message.hasOwnProperty("filters")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.filters); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.org); return writer; }; @@ -24183,6 +24991,9 @@ case 6: message.filters = reader.string(); break; + case 7: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -24222,6 +25033,9 @@ if (message.filters != null && message.hasOwnProperty("filters")) if (!$util.isString(message.filters)) return "filters: string expected"; + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -24241,6 +25055,7 @@ * @property {string|null} [token] NamedEntityListRequest token * @property {flyteidl.admin.ISort|null} [sortBy] NamedEntityListRequest sortBy * @property {string|null} [filters] NamedEntityListRequest filters + * @property {string|null} [org] NamedEntityListRequest org */ /** @@ -24314,6 +25129,14 @@ */ NamedEntityListRequest.prototype.filters = ""; + /** + * NamedEntityListRequest org. + * @member {string} org + * @memberof flyteidl.admin.NamedEntityListRequest + * @instance + */ + NamedEntityListRequest.prototype.org = ""; + /** * Creates a new NamedEntityListRequest instance using the specified properties. * @function create @@ -24352,6 +25175,8 @@ $root.flyteidl.admin.Sort.encode(message.sortBy, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); if (message.filters != null && message.hasOwnProperty("filters")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.filters); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 8, wireType 2 =*/66).string(message.org); return writer; }; @@ -24394,6 +25219,9 @@ case 7: message.filters = reader.string(); break; + case 8: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -24444,6 +25272,9 @@ if (message.filters != null && message.hasOwnProperty("filters")) if (!$util.isString(message.filters)) return "filters: string expected"; + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -28758,6 +29589,7 @@ * @property {string|null} [name] ExecutionCreateRequest name * @property {flyteidl.admin.IExecutionSpec|null} [spec] ExecutionCreateRequest spec * @property {flyteidl.core.ILiteralMap|null} [inputs] ExecutionCreateRequest inputs + * @property {string|null} [org] ExecutionCreateRequest org */ /** @@ -28815,6 +29647,14 @@ */ ExecutionCreateRequest.prototype.inputs = null; + /** + * ExecutionCreateRequest org. + * @member {string} org + * @memberof flyteidl.admin.ExecutionCreateRequest + * @instance + */ + ExecutionCreateRequest.prototype.org = ""; + /** * Creates a new ExecutionCreateRequest instance using the specified properties. * @function create @@ -28849,6 +29689,8 @@ $root.flyteidl.admin.ExecutionSpec.encode(message.spec, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); if (message.inputs != null && message.hasOwnProperty("inputs")) $root.flyteidl.core.LiteralMap.encode(message.inputs, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.org); return writer; }; @@ -28885,6 +29727,9 @@ case 5: message.inputs = $root.flyteidl.core.LiteralMap.decode(reader, reader.uint32()); break; + case 6: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -28923,6 +29768,9 @@ if (error) return "inputs." + error; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -34266,6 +35114,7 @@ * @property {number|null} [limit] ActiveLaunchPlanListRequest limit * @property {string|null} [token] ActiveLaunchPlanListRequest token * @property {flyteidl.admin.ISort|null} [sortBy] ActiveLaunchPlanListRequest sortBy + * @property {string|null} [org] ActiveLaunchPlanListRequest org */ /** @@ -34323,6 +35172,14 @@ */ ActiveLaunchPlanListRequest.prototype.sortBy = null; + /** + * ActiveLaunchPlanListRequest org. + * @member {string} org + * @memberof flyteidl.admin.ActiveLaunchPlanListRequest + * @instance + */ + ActiveLaunchPlanListRequest.prototype.org = ""; + /** * Creates a new ActiveLaunchPlanListRequest instance using the specified properties. * @function create @@ -34357,6 +35214,8 @@ writer.uint32(/* id 4, wireType 2 =*/34).string(message.token); if (message.sortBy != null && message.hasOwnProperty("sortBy")) $root.flyteidl.admin.Sort.encode(message.sortBy, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.org); return writer; }; @@ -34393,6 +35252,9 @@ case 5: message.sortBy = $root.flyteidl.admin.Sort.decode(reader, reader.uint32()); break; + case 6: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -34429,6 +35291,9 @@ if (error) return "sortBy." + error; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -36429,6 +37294,7 @@ * @property {string|null} [project] MatchableAttributesConfiguration project * @property {string|null} [workflow] MatchableAttributesConfiguration workflow * @property {string|null} [launchPlan] MatchableAttributesConfiguration launchPlan + * @property {string|null} [org] MatchableAttributesConfiguration org */ /** @@ -36486,6 +37352,14 @@ */ MatchableAttributesConfiguration.prototype.launchPlan = ""; + /** + * MatchableAttributesConfiguration org. + * @member {string} org + * @memberof flyteidl.admin.MatchableAttributesConfiguration + * @instance + */ + MatchableAttributesConfiguration.prototype.org = ""; + /** * Creates a new MatchableAttributesConfiguration instance using the specified properties. * @function create @@ -36520,6 +37394,8 @@ writer.uint32(/* id 4, wireType 2 =*/34).string(message.workflow); if (message.launchPlan != null && message.hasOwnProperty("launchPlan")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.launchPlan); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 6, wireType 2 =*/50).string(message.org); return writer; }; @@ -36556,6 +37432,9 @@ case 5: message.launchPlan = reader.string(); break; + case 6: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -36592,6 +37471,9 @@ if (message.launchPlan != null && message.hasOwnProperty("launchPlan")) if (!$util.isString(message.launchPlan)) return "launchPlan: string expected"; + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -36605,6 +37487,7 @@ * @memberof flyteidl.admin * @interface IListMatchableAttributesRequest * @property {flyteidl.admin.MatchableResource|null} [resourceType] ListMatchableAttributesRequest resourceType + * @property {string|null} [org] ListMatchableAttributesRequest org */ /** @@ -36630,6 +37513,14 @@ */ ListMatchableAttributesRequest.prototype.resourceType = 0; + /** + * ListMatchableAttributesRequest org. + * @member {string} org + * @memberof flyteidl.admin.ListMatchableAttributesRequest + * @instance + */ + ListMatchableAttributesRequest.prototype.org = ""; + /** * Creates a new ListMatchableAttributesRequest instance using the specified properties. * @function create @@ -36656,6 +37547,8 @@ writer = $Writer.create(); if (message.resourceType != null && message.hasOwnProperty("resourceType")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.resourceType); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 2, wireType 2 =*/18).string(message.org); return writer; }; @@ -36680,6 +37573,9 @@ case 1: message.resourceType = reader.int32(); break; + case 2: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -36713,6 +37609,9 @@ case 7: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -38439,6 +39338,7 @@ case 4: case 5: case 6: + case 7: break; } if (message.catalogKey != null && message.hasOwnProperty("catalogKey")) { @@ -38922,6 +39822,230 @@ return NodeExecutionGetDataResponse; })(); + admin.GetDynamicNodeWorkflowRequest = (function() { + + /** + * Properties of a GetDynamicNodeWorkflowRequest. + * @memberof flyteidl.admin + * @interface IGetDynamicNodeWorkflowRequest + * @property {flyteidl.core.INodeExecutionIdentifier|null} [id] GetDynamicNodeWorkflowRequest id + */ + + /** + * Constructs a new GetDynamicNodeWorkflowRequest. + * @memberof flyteidl.admin + * @classdesc Represents a GetDynamicNodeWorkflowRequest. + * @implements IGetDynamicNodeWorkflowRequest + * @constructor + * @param {flyteidl.admin.IGetDynamicNodeWorkflowRequest=} [properties] Properties to set + */ + function GetDynamicNodeWorkflowRequest(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * GetDynamicNodeWorkflowRequest id. + * @member {flyteidl.core.INodeExecutionIdentifier|null|undefined} id + * @memberof flyteidl.admin.GetDynamicNodeWorkflowRequest + * @instance + */ + GetDynamicNodeWorkflowRequest.prototype.id = null; + + /** + * Creates a new GetDynamicNodeWorkflowRequest instance using the specified properties. + * @function create + * @memberof flyteidl.admin.GetDynamicNodeWorkflowRequest + * @static + * @param {flyteidl.admin.IGetDynamicNodeWorkflowRequest=} [properties] Properties to set + * @returns {flyteidl.admin.GetDynamicNodeWorkflowRequest} GetDynamicNodeWorkflowRequest instance + */ + GetDynamicNodeWorkflowRequest.create = function create(properties) { + return new GetDynamicNodeWorkflowRequest(properties); + }; + + /** + * Encodes the specified GetDynamicNodeWorkflowRequest message. Does not implicitly {@link flyteidl.admin.GetDynamicNodeWorkflowRequest.verify|verify} messages. + * @function encode + * @memberof flyteidl.admin.GetDynamicNodeWorkflowRequest + * @static + * @param {flyteidl.admin.IGetDynamicNodeWorkflowRequest} message GetDynamicNodeWorkflowRequest message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + GetDynamicNodeWorkflowRequest.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.id != null && message.hasOwnProperty("id")) + $root.flyteidl.core.NodeExecutionIdentifier.encode(message.id, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Decodes a GetDynamicNodeWorkflowRequest message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.admin.GetDynamicNodeWorkflowRequest + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.admin.GetDynamicNodeWorkflowRequest} GetDynamicNodeWorkflowRequest + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + GetDynamicNodeWorkflowRequest.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.admin.GetDynamicNodeWorkflowRequest(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.id = $root.flyteidl.core.NodeExecutionIdentifier.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies a GetDynamicNodeWorkflowRequest message. + * @function verify + * @memberof flyteidl.admin.GetDynamicNodeWorkflowRequest + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + GetDynamicNodeWorkflowRequest.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.id != null && message.hasOwnProperty("id")) { + var error = $root.flyteidl.core.NodeExecutionIdentifier.verify(message.id); + if (error) + return "id." + error; + } + return null; + }; + + return GetDynamicNodeWorkflowRequest; + })(); + + admin.DynamicNodeWorkflowResponse = (function() { + + /** + * Properties of a DynamicNodeWorkflowResponse. + * @memberof flyteidl.admin + * @interface IDynamicNodeWorkflowResponse + * @property {flyteidl.core.ICompiledWorkflowClosure|null} [compiledWorkflow] DynamicNodeWorkflowResponse compiledWorkflow + */ + + /** + * Constructs a new DynamicNodeWorkflowResponse. + * @memberof flyteidl.admin + * @classdesc Represents a DynamicNodeWorkflowResponse. + * @implements IDynamicNodeWorkflowResponse + * @constructor + * @param {flyteidl.admin.IDynamicNodeWorkflowResponse=} [properties] Properties to set + */ + function DynamicNodeWorkflowResponse(properties) { + if (properties) + for (var keys = Object.keys(properties), i = 0; i < keys.length; ++i) + if (properties[keys[i]] != null) + this[keys[i]] = properties[keys[i]]; + } + + /** + * DynamicNodeWorkflowResponse compiledWorkflow. + * @member {flyteidl.core.ICompiledWorkflowClosure|null|undefined} compiledWorkflow + * @memberof flyteidl.admin.DynamicNodeWorkflowResponse + * @instance + */ + DynamicNodeWorkflowResponse.prototype.compiledWorkflow = null; + + /** + * Creates a new DynamicNodeWorkflowResponse instance using the specified properties. + * @function create + * @memberof flyteidl.admin.DynamicNodeWorkflowResponse + * @static + * @param {flyteidl.admin.IDynamicNodeWorkflowResponse=} [properties] Properties to set + * @returns {flyteidl.admin.DynamicNodeWorkflowResponse} DynamicNodeWorkflowResponse instance + */ + DynamicNodeWorkflowResponse.create = function create(properties) { + return new DynamicNodeWorkflowResponse(properties); + }; + + /** + * Encodes the specified DynamicNodeWorkflowResponse message. Does not implicitly {@link flyteidl.admin.DynamicNodeWorkflowResponse.verify|verify} messages. + * @function encode + * @memberof flyteidl.admin.DynamicNodeWorkflowResponse + * @static + * @param {flyteidl.admin.IDynamicNodeWorkflowResponse} message DynamicNodeWorkflowResponse message or plain object to encode + * @param {$protobuf.Writer} [writer] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + DynamicNodeWorkflowResponse.encode = function encode(message, writer) { + if (!writer) + writer = $Writer.create(); + if (message.compiledWorkflow != null && message.hasOwnProperty("compiledWorkflow")) + $root.flyteidl.core.CompiledWorkflowClosure.encode(message.compiledWorkflow, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); + return writer; + }; + + /** + * Decodes a DynamicNodeWorkflowResponse message from the specified reader or buffer. + * @function decode + * @memberof flyteidl.admin.DynamicNodeWorkflowResponse + * @static + * @param {$protobuf.Reader|Uint8Array} reader Reader or buffer to decode from + * @param {number} [length] Message length if known beforehand + * @returns {flyteidl.admin.DynamicNodeWorkflowResponse} DynamicNodeWorkflowResponse + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + DynamicNodeWorkflowResponse.decode = function decode(reader, length) { + if (!(reader instanceof $Reader)) + reader = $Reader.create(reader); + var end = length === undefined ? reader.len : reader.pos + length, message = new $root.flyteidl.admin.DynamicNodeWorkflowResponse(); + while (reader.pos < end) { + var tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + message.compiledWorkflow = $root.flyteidl.core.CompiledWorkflowClosure.decode(reader, reader.uint32()); + break; + default: + reader.skipType(tag & 7); + break; + } + } + return message; + }; + + /** + * Verifies a DynamicNodeWorkflowResponse message. + * @function verify + * @memberof flyteidl.admin.DynamicNodeWorkflowResponse + * @static + * @param {Object.} message Plain object to verify + * @returns {string|null} `null` if valid, otherwise the reason why it is not + */ + DynamicNodeWorkflowResponse.verify = function verify(message) { + if (typeof message !== "object" || message === null) + return "object expected"; + if (message.compiledWorkflow != null && message.hasOwnProperty("compiledWorkflow")) { + var error = $root.flyteidl.core.CompiledWorkflowClosure.verify(message.compiledWorkflow); + if (error) + return "compiledWorkflow." + error; + } + return null; + }; + + return DynamicNodeWorkflowResponse; + })(); + admin.EmailMessage = (function() { /** @@ -39230,6 +40354,7 @@ * @property {string|null} [description] Project description * @property {flyteidl.admin.ILabels|null} [labels] Project labels * @property {flyteidl.admin.Project.ProjectState|null} [state] Project state + * @property {string|null} [org] Project org */ /** @@ -39296,6 +40421,14 @@ */ Project.prototype.state = 0; + /** + * Project org. + * @member {string} org + * @memberof flyteidl.admin.Project + * @instance + */ + Project.prototype.org = ""; + /** * Creates a new Project instance using the specified properties. * @function create @@ -39333,6 +40466,8 @@ $root.flyteidl.admin.Labels.encode(message.labels, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); if (message.state != null && message.hasOwnProperty("state")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.state); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 7, wireType 2 =*/58).string(message.org); return writer; }; @@ -39374,6 +40509,9 @@ case 6: message.state = reader.int32(); break; + case 7: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -39425,6 +40563,9 @@ case 2: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -39594,6 +40735,7 @@ * @property {string|null} [token] ProjectListRequest token * @property {string|null} [filters] ProjectListRequest filters * @property {flyteidl.admin.ISort|null} [sortBy] ProjectListRequest sortBy + * @property {string|null} [org] ProjectListRequest org */ /** @@ -39643,6 +40785,14 @@ */ ProjectListRequest.prototype.sortBy = null; + /** + * ProjectListRequest org. + * @member {string} org + * @memberof flyteidl.admin.ProjectListRequest + * @instance + */ + ProjectListRequest.prototype.org = ""; + /** * Creates a new ProjectListRequest instance using the specified properties. * @function create @@ -39675,6 +40825,8 @@ writer.uint32(/* id 3, wireType 2 =*/26).string(message.filters); if (message.sortBy != null && message.hasOwnProperty("sortBy")) $root.flyteidl.admin.Sort.encode(message.sortBy, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.org); return writer; }; @@ -39708,6 +40860,9 @@ case 4: message.sortBy = $root.flyteidl.admin.Sort.decode(reader, reader.uint32()); break; + case 5: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -39741,6 +40896,9 @@ if (error) return "sortBy." + error; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -40053,6 +41211,7 @@ * @interface IProjectAttributes * @property {string|null} [project] ProjectAttributes project * @property {flyteidl.admin.IMatchingAttributes|null} [matchingAttributes] ProjectAttributes matchingAttributes + * @property {string|null} [org] ProjectAttributes org */ /** @@ -40086,6 +41245,14 @@ */ ProjectAttributes.prototype.matchingAttributes = null; + /** + * ProjectAttributes org. + * @member {string} org + * @memberof flyteidl.admin.ProjectAttributes + * @instance + */ + ProjectAttributes.prototype.org = ""; + /** * Creates a new ProjectAttributes instance using the specified properties. * @function create @@ -40114,6 +41281,8 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.project); if (message.matchingAttributes != null && message.hasOwnProperty("matchingAttributes")) $root.flyteidl.admin.MatchingAttributes.encode(message.matchingAttributes, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.org); return writer; }; @@ -40141,6 +41310,9 @@ case 2: message.matchingAttributes = $root.flyteidl.admin.MatchingAttributes.decode(reader, reader.uint32()); break; + case 3: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -40168,6 +41340,9 @@ if (error) return "matchingAttributes." + error; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -40387,6 +41562,7 @@ * @interface IProjectAttributesGetRequest * @property {string|null} [project] ProjectAttributesGetRequest project * @property {flyteidl.admin.MatchableResource|null} [resourceType] ProjectAttributesGetRequest resourceType + * @property {string|null} [org] ProjectAttributesGetRequest org */ /** @@ -40420,6 +41596,14 @@ */ ProjectAttributesGetRequest.prototype.resourceType = 0; + /** + * ProjectAttributesGetRequest org. + * @member {string} org + * @memberof flyteidl.admin.ProjectAttributesGetRequest + * @instance + */ + ProjectAttributesGetRequest.prototype.org = ""; + /** * Creates a new ProjectAttributesGetRequest instance using the specified properties. * @function create @@ -40448,6 +41632,8 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.project); if (message.resourceType != null && message.hasOwnProperty("resourceType")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.resourceType); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.org); return writer; }; @@ -40475,6 +41661,9 @@ case 2: message.resourceType = reader.int32(); break; + case 3: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -40511,6 +41700,9 @@ case 7: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -40637,6 +41829,7 @@ * @interface IProjectAttributesDeleteRequest * @property {string|null} [project] ProjectAttributesDeleteRequest project * @property {flyteidl.admin.MatchableResource|null} [resourceType] ProjectAttributesDeleteRequest resourceType + * @property {string|null} [org] ProjectAttributesDeleteRequest org */ /** @@ -40670,6 +41863,14 @@ */ ProjectAttributesDeleteRequest.prototype.resourceType = 0; + /** + * ProjectAttributesDeleteRequest org. + * @member {string} org + * @memberof flyteidl.admin.ProjectAttributesDeleteRequest + * @instance + */ + ProjectAttributesDeleteRequest.prototype.org = ""; + /** * Creates a new ProjectAttributesDeleteRequest instance using the specified properties. * @function create @@ -40698,6 +41899,8 @@ writer.uint32(/* id 1, wireType 2 =*/10).string(message.project); if (message.resourceType != null && message.hasOwnProperty("resourceType")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.resourceType); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 3, wireType 2 =*/26).string(message.org); return writer; }; @@ -40725,6 +41928,9 @@ case 2: message.resourceType = reader.int32(); break; + case 3: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -40761,6 +41967,9 @@ case 7: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -40869,6 +42078,7 @@ * @property {string|null} [project] ProjectDomainAttributes project * @property {string|null} [domain] ProjectDomainAttributes domain * @property {flyteidl.admin.IMatchingAttributes|null} [matchingAttributes] ProjectDomainAttributes matchingAttributes + * @property {string|null} [org] ProjectDomainAttributes org */ /** @@ -40910,6 +42120,14 @@ */ ProjectDomainAttributes.prototype.matchingAttributes = null; + /** + * ProjectDomainAttributes org. + * @member {string} org + * @memberof flyteidl.admin.ProjectDomainAttributes + * @instance + */ + ProjectDomainAttributes.prototype.org = ""; + /** * Creates a new ProjectDomainAttributes instance using the specified properties. * @function create @@ -40940,6 +42158,8 @@ writer.uint32(/* id 2, wireType 2 =*/18).string(message.domain); if (message.matchingAttributes != null && message.hasOwnProperty("matchingAttributes")) $root.flyteidl.admin.MatchingAttributes.encode(message.matchingAttributes, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.org); return writer; }; @@ -40970,6 +42190,9 @@ case 3: message.matchingAttributes = $root.flyteidl.admin.MatchingAttributes.decode(reader, reader.uint32()); break; + case 4: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -41000,6 +42223,9 @@ if (error) return "matchingAttributes." + error; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -41220,6 +42446,7 @@ * @property {string|null} [project] ProjectDomainAttributesGetRequest project * @property {string|null} [domain] ProjectDomainAttributesGetRequest domain * @property {flyteidl.admin.MatchableResource|null} [resourceType] ProjectDomainAttributesGetRequest resourceType + * @property {string|null} [org] ProjectDomainAttributesGetRequest org */ /** @@ -41261,6 +42488,14 @@ */ ProjectDomainAttributesGetRequest.prototype.resourceType = 0; + /** + * ProjectDomainAttributesGetRequest org. + * @member {string} org + * @memberof flyteidl.admin.ProjectDomainAttributesGetRequest + * @instance + */ + ProjectDomainAttributesGetRequest.prototype.org = ""; + /** * Creates a new ProjectDomainAttributesGetRequest instance using the specified properties. * @function create @@ -41291,6 +42526,8 @@ writer.uint32(/* id 2, wireType 2 =*/18).string(message.domain); if (message.resourceType != null && message.hasOwnProperty("resourceType")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.resourceType); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.org); return writer; }; @@ -41321,6 +42558,9 @@ case 3: message.resourceType = reader.int32(); break; + case 4: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -41360,6 +42600,9 @@ case 7: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -41487,6 +42730,7 @@ * @property {string|null} [project] ProjectDomainAttributesDeleteRequest project * @property {string|null} [domain] ProjectDomainAttributesDeleteRequest domain * @property {flyteidl.admin.MatchableResource|null} [resourceType] ProjectDomainAttributesDeleteRequest resourceType + * @property {string|null} [org] ProjectDomainAttributesDeleteRequest org */ /** @@ -41528,6 +42772,14 @@ */ ProjectDomainAttributesDeleteRequest.prototype.resourceType = 0; + /** + * ProjectDomainAttributesDeleteRequest org. + * @member {string} org + * @memberof flyteidl.admin.ProjectDomainAttributesDeleteRequest + * @instance + */ + ProjectDomainAttributesDeleteRequest.prototype.org = ""; + /** * Creates a new ProjectDomainAttributesDeleteRequest instance using the specified properties. * @function create @@ -41558,6 +42810,8 @@ writer.uint32(/* id 2, wireType 2 =*/18).string(message.domain); if (message.resourceType != null && message.hasOwnProperty("resourceType")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.resourceType); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 4, wireType 2 =*/34).string(message.org); return writer; }; @@ -41588,6 +42842,9 @@ case 3: message.resourceType = reader.int32(); break; + case 4: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -41627,6 +42884,9 @@ case 7: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -46303,6 +47563,7 @@ * @property {string|null} [domain] WorkflowAttributes domain * @property {string|null} [workflow] WorkflowAttributes workflow * @property {flyteidl.admin.IMatchingAttributes|null} [matchingAttributes] WorkflowAttributes matchingAttributes + * @property {string|null} [org] WorkflowAttributes org */ /** @@ -46352,6 +47613,14 @@ */ WorkflowAttributes.prototype.matchingAttributes = null; + /** + * WorkflowAttributes org. + * @member {string} org + * @memberof flyteidl.admin.WorkflowAttributes + * @instance + */ + WorkflowAttributes.prototype.org = ""; + /** * Creates a new WorkflowAttributes instance using the specified properties. * @function create @@ -46384,6 +47653,8 @@ writer.uint32(/* id 3, wireType 2 =*/26).string(message.workflow); if (message.matchingAttributes != null && message.hasOwnProperty("matchingAttributes")) $root.flyteidl.admin.MatchingAttributes.encode(message.matchingAttributes, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.org); return writer; }; @@ -46417,6 +47688,9 @@ case 4: message.matchingAttributes = $root.flyteidl.admin.MatchingAttributes.decode(reader, reader.uint32()); break; + case 5: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -46450,6 +47724,9 @@ if (error) return "matchingAttributes." + error; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -46671,6 +47948,7 @@ * @property {string|null} [domain] WorkflowAttributesGetRequest domain * @property {string|null} [workflow] WorkflowAttributesGetRequest workflow * @property {flyteidl.admin.MatchableResource|null} [resourceType] WorkflowAttributesGetRequest resourceType + * @property {string|null} [org] WorkflowAttributesGetRequest org */ /** @@ -46720,6 +47998,14 @@ */ WorkflowAttributesGetRequest.prototype.resourceType = 0; + /** + * WorkflowAttributesGetRequest org. + * @member {string} org + * @memberof flyteidl.admin.WorkflowAttributesGetRequest + * @instance + */ + WorkflowAttributesGetRequest.prototype.org = ""; + /** * Creates a new WorkflowAttributesGetRequest instance using the specified properties. * @function create @@ -46752,6 +48038,8 @@ writer.uint32(/* id 3, wireType 2 =*/26).string(message.workflow); if (message.resourceType != null && message.hasOwnProperty("resourceType")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.resourceType); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.org); return writer; }; @@ -46785,6 +48073,9 @@ case 4: message.resourceType = reader.int32(); break; + case 5: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -46827,6 +48118,9 @@ case 7: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -46955,6 +48249,7 @@ * @property {string|null} [domain] WorkflowAttributesDeleteRequest domain * @property {string|null} [workflow] WorkflowAttributesDeleteRequest workflow * @property {flyteidl.admin.MatchableResource|null} [resourceType] WorkflowAttributesDeleteRequest resourceType + * @property {string|null} [org] WorkflowAttributesDeleteRequest org */ /** @@ -47004,6 +48299,14 @@ */ WorkflowAttributesDeleteRequest.prototype.resourceType = 0; + /** + * WorkflowAttributesDeleteRequest org. + * @member {string} org + * @memberof flyteidl.admin.WorkflowAttributesDeleteRequest + * @instance + */ + WorkflowAttributesDeleteRequest.prototype.org = ""; + /** * Creates a new WorkflowAttributesDeleteRequest instance using the specified properties. * @function create @@ -47036,6 +48339,8 @@ writer.uint32(/* id 3, wireType 2 =*/26).string(message.workflow); if (message.resourceType != null && message.hasOwnProperty("resourceType")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.resourceType); + if (message.org != null && message.hasOwnProperty("org")) + writer.uint32(/* id 5, wireType 2 =*/42).string(message.org); return writer; }; @@ -47069,6 +48374,9 @@ case 4: message.resourceType = reader.int32(); break; + case 5: + message.org = reader.string(); + break; default: reader.skipType(tag & 7); break; @@ -47111,6 +48419,9 @@ case 7: break; } + if (message.org != null && message.hasOwnProperty("org")) + if (!$util.isString(message.org)) + return "org: string expected"; return null; }; @@ -48046,6 +49357,39 @@ * @variation 2 */ + /** + * Callback as used by {@link flyteidl.service.AdminService#getDynamicNodeWorkflow}. + * @memberof flyteidl.service.AdminService + * @typedef GetDynamicNodeWorkflowCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {flyteidl.admin.DynamicNodeWorkflowResponse} [response] DynamicNodeWorkflowResponse + */ + + /** + * Calls GetDynamicNodeWorkflow. + * @function getDynamicNodeWorkflow + * @memberof flyteidl.service.AdminService + * @instance + * @param {flyteidl.admin.IGetDynamicNodeWorkflowRequest} request GetDynamicNodeWorkflowRequest message or plain object + * @param {flyteidl.service.AdminService.GetDynamicNodeWorkflowCallback} callback Node-style callback called with the error, if any, and DynamicNodeWorkflowResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(AdminService.prototype.getDynamicNodeWorkflow = function getDynamicNodeWorkflow(request, callback) { + return this.rpcCall(getDynamicNodeWorkflow, $root.flyteidl.admin.GetDynamicNodeWorkflowRequest, $root.flyteidl.admin.DynamicNodeWorkflowResponse, request, callback); + }, "name", { value: "GetDynamicNodeWorkflow" }); + + /** + * Calls GetDynamicNodeWorkflow. + * @function getDynamicNodeWorkflow + * @memberof flyteidl.service.AdminService + * @instance + * @param {flyteidl.admin.IGetDynamicNodeWorkflowRequest} request GetDynamicNodeWorkflowRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + /** * Callback as used by {@link flyteidl.service.AdminService#listNodeExecutions}. * @memberof flyteidl.service.AdminService @@ -49137,6 +50481,72 @@ * @variation 2 */ + /** + * Callback as used by {@link flyteidl.service.AsyncAgentService#getTaskMetrics}. + * @memberof flyteidl.service.AsyncAgentService + * @typedef GetTaskMetricsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {flyteidl.admin.GetTaskMetricsResponse} [response] GetTaskMetricsResponse + */ + + /** + * Calls GetTaskMetrics. + * @function getTaskMetrics + * @memberof flyteidl.service.AsyncAgentService + * @instance + * @param {flyteidl.admin.IGetTaskMetricsRequest} request GetTaskMetricsRequest message or plain object + * @param {flyteidl.service.AsyncAgentService.GetTaskMetricsCallback} callback Node-style callback called with the error, if any, and GetTaskMetricsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(AsyncAgentService.prototype.getTaskMetrics = function getTaskMetrics(request, callback) { + return this.rpcCall(getTaskMetrics, $root.flyteidl.admin.GetTaskMetricsRequest, $root.flyteidl.admin.GetTaskMetricsResponse, request, callback); + }, "name", { value: "GetTaskMetrics" }); + + /** + * Calls GetTaskMetrics. + * @function getTaskMetrics + * @memberof flyteidl.service.AsyncAgentService + * @instance + * @param {flyteidl.admin.IGetTaskMetricsRequest} request GetTaskMetricsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + + /** + * Callback as used by {@link flyteidl.service.AsyncAgentService#getTaskLogs}. + * @memberof flyteidl.service.AsyncAgentService + * @typedef GetTaskLogsCallback + * @type {function} + * @param {Error|null} error Error, if any + * @param {flyteidl.admin.GetTaskLogsResponse} [response] GetTaskLogsResponse + */ + + /** + * Calls GetTaskLogs. + * @function getTaskLogs + * @memberof flyteidl.service.AsyncAgentService + * @instance + * @param {flyteidl.admin.IGetTaskLogsRequest} request GetTaskLogsRequest message or plain object + * @param {flyteidl.service.AsyncAgentService.GetTaskLogsCallback} callback Node-style callback called with the error, if any, and GetTaskLogsResponse + * @returns {undefined} + * @variation 1 + */ + Object.defineProperty(AsyncAgentService.prototype.getTaskLogs = function getTaskLogs(request, callback) { + return this.rpcCall(getTaskLogs, $root.flyteidl.admin.GetTaskLogsRequest, $root.flyteidl.admin.GetTaskLogsResponse, request, callback); + }, "name", { value: "GetTaskLogs" }); + + /** + * Calls GetTaskLogs. + * @function getTaskLogs + * @memberof flyteidl.service.AsyncAgentService + * @instance + * @param {flyteidl.admin.IGetTaskLogsRequest} request GetTaskLogsRequest message or plain object + * @returns {Promise} Promise + * @variation 2 + */ + return AsyncAgentService; })(); diff --git a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py index 5d515cb0c4..f23ad50819 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.py @@ -13,12 +13,14 @@ from flyteidl.core import literals_pb2 as flyteidl_dot_core_dot_literals__pb2 from flyteidl.core import tasks_pb2 as flyteidl_dot_core_dot_tasks__pb2 -from flyteidl.core import interface_pb2 as flyteidl_dot_core_dot_interface__pb2 from flyteidl.core import identifier_pb2 as flyteidl_dot_core_dot_identifier__pb2 from flyteidl.core import execution_pb2 as flyteidl_dot_core_dot_execution__pb2 +from flyteidl.core import metrics_pb2 as flyteidl_dot_core_dot_metrics__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x66lyteidl/admin/agent.proto\x12\x0e\x66lyteidl.admin\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/execution.proto\"\x98\x05\n\x15TaskExecutionMetadata\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\x12I\n\x06labels\x18\x03 \x03(\x0b\x32\x31.flyteidl.admin.TaskExecutionMetadata.LabelsEntryR\x06labels\x12X\n\x0b\x61nnotations\x18\x04 \x03(\x0b\x32\x36.flyteidl.admin.TaskExecutionMetadata.AnnotationsEntryR\x0b\x61nnotations\x12.\n\x13k8s_service_account\x18\x05 \x01(\tR\x11k8sServiceAccount\x12t\n\x15\x65nvironment_variables\x18\x06 \x03(\x0b\x32?.flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntryR\x14\x65nvironmentVariables\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a>\n\x10\x41nnotationsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1aG\n\x19\x45nvironmentVariablesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x83\x02\n\x11\x43reateTaskRequest\x12\x31\n\x06inputs\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x37\n\x08template\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x03 \x01(\tR\x0coutputPrefix\x12]\n\x17task_execution_metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.TaskExecutionMetadataR\x15taskExecutionMetadata\"z\n\x12\x43reateTaskResponse\x12%\n\rresource_meta\x18\x01 \x01(\x0cH\x00R\x0cresourceMeta\x12\x36\n\x08resource\x18\x02 \x01(\x0b\x32\x18.flyteidl.admin.ResourceH\x00R\x08resourceB\x05\n\x03res\"R\n\x0eGetTaskRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\"|\n\x0fGetTaskResponse\x12\x34\n\x08resource\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ResourceR\x08resource\x12\x33\n\tlog_links\x18\x02 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x08logLinks\"\xbb\x01\n\x08Resource\x12+\n\x05state\x18\x01 \x01(\x0e\x32\x15.flyteidl.admin.StateR\x05state\x12\x33\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x07outputs\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x33\n\tlog_links\x18\x04 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x08logLinks\"U\n\x11\x44\x65leteTaskRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\"\x14\n\x12\x44\x65leteTaskResponse\"M\n\x05\x41gent\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x30\n\x14supported_task_types\x18\x02 \x03(\tR\x12supportedTaskTypes\"%\n\x0fGetAgentRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"?\n\x10GetAgentResponse\x12+\n\x05\x61gent\x18\x01 \x01(\x0b\x32\x15.flyteidl.admin.AgentR\x05\x61gent\"\x13\n\x11ListAgentsRequest\"C\n\x12ListAgentsResponse\x12-\n\x06\x61gents\x18\x01 \x03(\x0b\x32\x15.flyteidl.admin.AgentR\x06\x61gents*^\n\x05State\x12\x15\n\x11RETRYABLE_FAILURE\x10\x00\x12\x15\n\x11PERMANENT_FAILURE\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x42\xb6\x01\n\x12\x63om.flyteidl.adminB\nAgentProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1a\x66lyteidl/admin/agent.proto\x12\x0e\x66lyteidl.admin\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1b\x66lyteidl/core/metrics.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x98\x05\n\x15TaskExecutionMetadata\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\x12I\n\x06labels\x18\x03 \x03(\x0b\x32\x31.flyteidl.admin.TaskExecutionMetadata.LabelsEntryR\x06labels\x12X\n\x0b\x61nnotations\x18\x04 \x03(\x0b\x32\x36.flyteidl.admin.TaskExecutionMetadata.AnnotationsEntryR\x0b\x61nnotations\x12.\n\x13k8s_service_account\x18\x05 \x01(\tR\x11k8sServiceAccount\x12t\n\x15\x65nvironment_variables\x18\x06 \x03(\x0b\x32?.flyteidl.admin.TaskExecutionMetadata.EnvironmentVariablesEntryR\x14\x65nvironmentVariables\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a>\n\x10\x41nnotationsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1aG\n\x19\x45nvironmentVariablesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x83\x02\n\x11\x43reateTaskRequest\x12\x31\n\x06inputs\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x37\n\x08template\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x03 \x01(\tR\x0coutputPrefix\x12]\n\x17task_execution_metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.TaskExecutionMetadataR\x15taskExecutionMetadata\"z\n\x12\x43reateTaskResponse\x12%\n\rresource_meta\x18\x01 \x01(\x0cH\x00R\x0cresourceMeta\x12\x36\n\x08resource\x18\x02 \x01(\x0b\x32\x18.flyteidl.admin.ResourceH\x00R\x08resourceB\x05\n\x03res\"R\n\x0eGetTaskRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\"|\n\x0fGetTaskResponse\x12\x34\n\x08resource\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ResourceR\x08resource\x12\x33\n\tlog_links\x18\x02 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x08logLinks\"\xf9\x01\n\x08Resource\x12/\n\x05state\x18\x01 \x01(\x0e\x32\x15.flyteidl.admin.StateB\x02\x18\x01R\x05state\x12\x33\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x07outputs\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x33\n\tlog_links\x18\x04 \x03(\x0b\x32\x16.flyteidl.core.TaskLogR\x08logLinks\x12\x38\n\x05phase\x18\x05 \x01(\x0e\x32\".flyteidl.core.TaskExecution.PhaseR\x05phase\"U\n\x11\x44\x65leteTaskRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\"\x14\n\x12\x44\x65leteTaskResponse\"M\n\x05\x41gent\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x30\n\x14supported_task_types\x18\x02 \x03(\tR\x12supportedTaskTypes\"%\n\x0fGetAgentRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"?\n\x10GetAgentResponse\x12+\n\x05\x61gent\x18\x01 \x01(\x0b\x32\x15.flyteidl.admin.AgentR\x05\x61gent\"\x13\n\x11ListAgentsRequest\"C\n\x12ListAgentsResponse\x12-\n\x06\x61gents\x18\x01 \x03(\x0b\x32\x15.flyteidl.admin.AgentR\x06\x61gents\"\x94\x02\n\x15GetTaskMetricsRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x18\n\x07queries\x18\x03 \x03(\tR\x07queries\x12\x39\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12-\n\x04step\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x04step\"X\n\x16GetTaskMetricsResponse\x12>\n\x07results\x18\x01 \x03(\x0b\x32$.flyteidl.core.ExecutionMetricResultR\x07results\"\x82\x01\n\x12GetTaskLogsRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12#\n\rresource_meta\x18\x02 \x01(\x0cR\x0cresourceMeta\x12\x14\n\x05lines\x18\x03 \x01(\x04R\x05lines\x12\x14\n\x05token\x18\x04 \x01(\tR\x05token\"E\n\x13GetTaskLogsResponse\x12\x18\n\x07results\x18\x01 \x03(\tR\x07results\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token*^\n\x05State\x12\x15\n\x11RETRYABLE_FAILURE\x10\x00\x12\x15\n\x11PERMANENT_FAILURE\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x42\xb6\x01\n\x12\x63om.flyteidl.adminB\nAgentProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -33,38 +35,48 @@ _TASKEXECUTIONMETADATA_ANNOTATIONSENTRY._serialized_options = b'8\001' _TASKEXECUTIONMETADATA_ENVIRONMENTVARIABLESENTRY._options = None _TASKEXECUTIONMETADATA_ENVIRONMENTVARIABLESENTRY._serialized_options = b'8\001' - _globals['_STATE']._serialized_start=2032 - _globals['_STATE']._serialized_end=2126 - _globals['_TASKEXECUTIONMETADATA']._serialized_start=198 - _globals['_TASKEXECUTIONMETADATA']._serialized_end=862 - _globals['_TASKEXECUTIONMETADATA_LABELSENTRY']._serialized_start=668 - _globals['_TASKEXECUTIONMETADATA_LABELSENTRY']._serialized_end=725 - _globals['_TASKEXECUTIONMETADATA_ANNOTATIONSENTRY']._serialized_start=727 - _globals['_TASKEXECUTIONMETADATA_ANNOTATIONSENTRY']._serialized_end=789 - _globals['_TASKEXECUTIONMETADATA_ENVIRONMENTVARIABLESENTRY']._serialized_start=791 - _globals['_TASKEXECUTIONMETADATA_ENVIRONMENTVARIABLESENTRY']._serialized_end=862 - _globals['_CREATETASKREQUEST']._serialized_start=865 - _globals['_CREATETASKREQUEST']._serialized_end=1124 - _globals['_CREATETASKRESPONSE']._serialized_start=1126 - _globals['_CREATETASKRESPONSE']._serialized_end=1248 - _globals['_GETTASKREQUEST']._serialized_start=1250 - _globals['_GETTASKREQUEST']._serialized_end=1332 - _globals['_GETTASKRESPONSE']._serialized_start=1334 - _globals['_GETTASKRESPONSE']._serialized_end=1458 - _globals['_RESOURCE']._serialized_start=1461 - _globals['_RESOURCE']._serialized_end=1648 - _globals['_DELETETASKREQUEST']._serialized_start=1650 - _globals['_DELETETASKREQUEST']._serialized_end=1735 - _globals['_DELETETASKRESPONSE']._serialized_start=1737 - _globals['_DELETETASKRESPONSE']._serialized_end=1757 - _globals['_AGENT']._serialized_start=1759 - _globals['_AGENT']._serialized_end=1836 - _globals['_GETAGENTREQUEST']._serialized_start=1838 - _globals['_GETAGENTREQUEST']._serialized_end=1875 - _globals['_GETAGENTRESPONSE']._serialized_start=1877 - _globals['_GETAGENTRESPONSE']._serialized_end=1940 - _globals['_LISTAGENTSREQUEST']._serialized_start=1942 - _globals['_LISTAGENTSREQUEST']._serialized_end=1961 - _globals['_LISTAGENTSRESPONSE']._serialized_start=1963 - _globals['_LISTAGENTSRESPONSE']._serialized_end=2030 + _RESOURCE.fields_by_name['state']._options = None + _RESOURCE.fields_by_name['state']._serialized_options = b'\030\001' + _globals['_STATE']._serialized_start=2730 + _globals['_STATE']._serialized_end=2824 + _globals['_TASKEXECUTIONMETADATA']._serialized_start=261 + _globals['_TASKEXECUTIONMETADATA']._serialized_end=925 + _globals['_TASKEXECUTIONMETADATA_LABELSENTRY']._serialized_start=731 + _globals['_TASKEXECUTIONMETADATA_LABELSENTRY']._serialized_end=788 + _globals['_TASKEXECUTIONMETADATA_ANNOTATIONSENTRY']._serialized_start=790 + _globals['_TASKEXECUTIONMETADATA_ANNOTATIONSENTRY']._serialized_end=852 + _globals['_TASKEXECUTIONMETADATA_ENVIRONMENTVARIABLESENTRY']._serialized_start=854 + _globals['_TASKEXECUTIONMETADATA_ENVIRONMENTVARIABLESENTRY']._serialized_end=925 + _globals['_CREATETASKREQUEST']._serialized_start=928 + _globals['_CREATETASKREQUEST']._serialized_end=1187 + _globals['_CREATETASKRESPONSE']._serialized_start=1189 + _globals['_CREATETASKRESPONSE']._serialized_end=1311 + _globals['_GETTASKREQUEST']._serialized_start=1313 + _globals['_GETTASKREQUEST']._serialized_end=1395 + _globals['_GETTASKRESPONSE']._serialized_start=1397 + _globals['_GETTASKRESPONSE']._serialized_end=1521 + _globals['_RESOURCE']._serialized_start=1524 + _globals['_RESOURCE']._serialized_end=1773 + _globals['_DELETETASKREQUEST']._serialized_start=1775 + _globals['_DELETETASKREQUEST']._serialized_end=1860 + _globals['_DELETETASKRESPONSE']._serialized_start=1862 + _globals['_DELETETASKRESPONSE']._serialized_end=1882 + _globals['_AGENT']._serialized_start=1884 + _globals['_AGENT']._serialized_end=1961 + _globals['_GETAGENTREQUEST']._serialized_start=1963 + _globals['_GETAGENTREQUEST']._serialized_end=2000 + _globals['_GETAGENTRESPONSE']._serialized_start=2002 + _globals['_GETAGENTRESPONSE']._serialized_end=2065 + _globals['_LISTAGENTSREQUEST']._serialized_start=2067 + _globals['_LISTAGENTSREQUEST']._serialized_end=2086 + _globals['_LISTAGENTSRESPONSE']._serialized_start=2088 + _globals['_LISTAGENTSRESPONSE']._serialized_end=2155 + _globals['_GETTASKMETRICSREQUEST']._serialized_start=2158 + _globals['_GETTASKMETRICSREQUEST']._serialized_end=2434 + _globals['_GETTASKMETRICSRESPONSE']._serialized_start=2436 + _globals['_GETTASKMETRICSRESPONSE']._serialized_end=2524 + _globals['_GETTASKLOGSREQUEST']._serialized_start=2527 + _globals['_GETTASKLOGSREQUEST']._serialized_end=2657 + _globals['_GETTASKLOGSRESPONSE']._serialized_start=2659 + _globals['_GETTASKLOGSRESPONSE']._serialized_end=2728 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi index 3b80e41b58..7b7c6beb07 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/agent_pb2.pyi @@ -1,8 +1,10 @@ from flyteidl.core import literals_pb2 as _literals_pb2 from flyteidl.core import tasks_pb2 as _tasks_pb2 -from flyteidl.core import interface_pb2 as _interface_pb2 from flyteidl.core import identifier_pb2 as _identifier_pb2 from flyteidl.core import execution_pb2 as _execution_pb2 +from flyteidl.core import metrics_pb2 as _metrics_pb2 +from google.protobuf import duration_pb2 as _duration_pb2 +from google.protobuf import timestamp_pb2 as _timestamp_pb2 from google.protobuf.internal import containers as _containers from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor @@ -98,16 +100,18 @@ class GetTaskResponse(_message.Message): def __init__(self, resource: _Optional[_Union[Resource, _Mapping]] = ..., log_links: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ...) -> None: ... class Resource(_message.Message): - __slots__ = ["state", "outputs", "message", "log_links"] + __slots__ = ["state", "outputs", "message", "log_links", "phase"] STATE_FIELD_NUMBER: _ClassVar[int] OUTPUTS_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] LOG_LINKS_FIELD_NUMBER: _ClassVar[int] + PHASE_FIELD_NUMBER: _ClassVar[int] state: State outputs: _literals_pb2.LiteralMap message: str log_links: _containers.RepeatedCompositeFieldContainer[_execution_pb2.TaskLog] - def __init__(self, state: _Optional[_Union[State, str]] = ..., outputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., message: _Optional[str] = ..., log_links: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ...) -> None: ... + phase: _execution_pb2.TaskExecution.Phase + def __init__(self, state: _Optional[_Union[State, str]] = ..., outputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., message: _Optional[str] = ..., log_links: _Optional[_Iterable[_Union[_execution_pb2.TaskLog, _Mapping]]] = ..., phase: _Optional[_Union[_execution_pb2.TaskExecution.Phase, str]] = ...) -> None: ... class DeleteTaskRequest(_message.Message): __slots__ = ["task_type", "resource_meta"] @@ -150,3 +154,45 @@ class ListAgentsResponse(_message.Message): AGENTS_FIELD_NUMBER: _ClassVar[int] agents: _containers.RepeatedCompositeFieldContainer[Agent] def __init__(self, agents: _Optional[_Iterable[_Union[Agent, _Mapping]]] = ...) -> None: ... + +class GetTaskMetricsRequest(_message.Message): + __slots__ = ["task_type", "resource_meta", "queries", "start_time", "end_time", "step"] + TASK_TYPE_FIELD_NUMBER: _ClassVar[int] + RESOURCE_META_FIELD_NUMBER: _ClassVar[int] + QUERIES_FIELD_NUMBER: _ClassVar[int] + START_TIME_FIELD_NUMBER: _ClassVar[int] + END_TIME_FIELD_NUMBER: _ClassVar[int] + STEP_FIELD_NUMBER: _ClassVar[int] + task_type: str + resource_meta: bytes + queries: _containers.RepeatedScalarFieldContainer[str] + start_time: _timestamp_pb2.Timestamp + end_time: _timestamp_pb2.Timestamp + step: _duration_pb2.Duration + def __init__(self, task_type: _Optional[str] = ..., resource_meta: _Optional[bytes] = ..., queries: _Optional[_Iterable[str]] = ..., start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., step: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ...) -> None: ... + +class GetTaskMetricsResponse(_message.Message): + __slots__ = ["results"] + RESULTS_FIELD_NUMBER: _ClassVar[int] + results: _containers.RepeatedCompositeFieldContainer[_metrics_pb2.ExecutionMetricResult] + def __init__(self, results: _Optional[_Iterable[_Union[_metrics_pb2.ExecutionMetricResult, _Mapping]]] = ...) -> None: ... + +class GetTaskLogsRequest(_message.Message): + __slots__ = ["task_type", "resource_meta", "lines", "token"] + TASK_TYPE_FIELD_NUMBER: _ClassVar[int] + RESOURCE_META_FIELD_NUMBER: _ClassVar[int] + LINES_FIELD_NUMBER: _ClassVar[int] + TOKEN_FIELD_NUMBER: _ClassVar[int] + task_type: str + resource_meta: bytes + lines: int + token: str + def __init__(self, task_type: _Optional[str] = ..., resource_meta: _Optional[bytes] = ..., lines: _Optional[int] = ..., token: _Optional[str] = ...) -> None: ... + +class GetTaskLogsResponse(_message.Message): + __slots__ = ["results", "token"] + RESULTS_FIELD_NUMBER: _ClassVar[int] + TOKEN_FIELD_NUMBER: _ClassVar[int] + results: _containers.RepeatedScalarFieldContainer[str] + token: str + def __init__(self, results: _Optional[_Iterable[str]] = ..., token: _Optional[str] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.py index 7c99413e09..c12fa17b0f 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.py @@ -17,7 +17,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/admin/common.proto\x12\x0e\x66lyteidl.admin\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"]\n\x15NamedEntityIdentifier\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\"o\n\x13NamedEntityMetadata\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x36\n\x05state\x18\x02 \x01(\x0e\x32 .flyteidl.admin.NamedEntityStateR\x05state\"\xc7\x01\n\x0bNamedEntity\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32#.flyteidl.admin.NamedEntityMetadataR\x08metadata\"\x82\x01\n\x04Sort\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12<\n\tdirection\x18\x02 \x01(\x0e\x32\x1e.flyteidl.admin.Sort.DirectionR\tdirection\"*\n\tDirection\x12\x0e\n\nDESCENDING\x10\x00\x12\r\n\tASCENDING\x10\x01\"\xc9\x01\n NamedEntityIdentifierListRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x14\n\x05limit\x18\x03 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x04 \x01(\tR\x05token\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12\x18\n\x07\x66ilters\x18\x06 \x01(\tR\x07\x66ilters\"\x81\x02\n\x16NamedEntityListRequest\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x18\n\x07project\x18\x02 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x03 \x01(\tR\x06\x64omain\x12\x14\n\x05limit\x18\x04 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x05 \x01(\tR\x05token\x12-\n\x07sort_by\x18\x06 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12\x18\n\x07\x66ilters\x18\x07 \x01(\tR\x07\x66ilters\"t\n\x19NamedEntityIdentifierList\x12\x41\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x08\x65ntities\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"`\n\x0fNamedEntityList\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x1b.flyteidl.admin.NamedEntityR\x08\x65ntities\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\x90\x01\n\x15NamedEntityGetRequest\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\"\xd4\x01\n\x18NamedEntityUpdateRequest\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32#.flyteidl.admin.NamedEntityMetadataR\x08metadata\"\x1b\n\x19NamedEntityUpdateResponse\"=\n\x10ObjectGetRequest\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\"\xc1\x01\n\x13ResourceListRequest\x12\x35\n\x02id\x18\x01 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\">\n\x11\x45mailNotification\x12)\n\x10recipients_email\x18\x01 \x03(\tR\x0frecipientsEmail\"B\n\x15PagerDutyNotification\x12)\n\x10recipients_email\x18\x01 \x03(\tR\x0frecipientsEmail\">\n\x11SlackNotification\x12)\n\x10recipients_email\x18\x01 \x03(\tR\x0frecipientsEmail\"\x94\x02\n\x0cNotification\x12>\n\x06phases\x18\x01 \x03(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x06phases\x12\x39\n\x05\x65mail\x18\x02 \x01(\x0b\x32!.flyteidl.admin.EmailNotificationH\x00R\x05\x65mail\x12\x46\n\npager_duty\x18\x03 \x01(\x0b\x32%.flyteidl.admin.PagerDutyNotificationH\x00R\tpagerDuty\x12\x39\n\x05slack\x18\x04 \x01(\x0b\x32!.flyteidl.admin.SlackNotificationH\x00R\x05slackB\x06\n\x04type\"5\n\x07UrlBlob\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\x14\n\x05\x62ytes\x18\x02 \x01(\x03R\x05\x62ytes:\x02\x18\x01\"\x7f\n\x06Labels\x12:\n\x06values\x18\x01 \x03(\x0b\x32\".flyteidl.admin.Labels.ValuesEntryR\x06values\x1a\x39\n\x0bValuesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x89\x01\n\x0b\x41nnotations\x12?\n\x06values\x18\x01 \x03(\x0b\x32\'.flyteidl.admin.Annotations.ValuesEntryR\x06values\x1a\x39\n\x0bValuesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\";\n\x04\x45nvs\x12\x33\n\x06values\x18\x01 \x03(\x0b\x32\x1b.flyteidl.core.KeyValuePairR\x06values\"z\n\x08\x41uthRole\x12,\n\x12\x61ssumable_iam_role\x18\x01 \x01(\tR\x10\x61ssumableIamRole\x12<\n\x1akubernetes_service_account\x18\x02 \x01(\tR\x18kubernetesServiceAccount:\x02\x18\x01\"K\n\x13RawOutputDataConfig\x12\x34\n\x16output_location_prefix\x18\x01 \x01(\tR\x14outputLocationPrefix\"Q\n\tFlyteURLs\x12\x16\n\x06inputs\x18\x01 \x01(\tR\x06inputs\x12\x18\n\x07outputs\x18\x02 \x01(\tR\x07outputs\x12\x12\n\x04\x64\x65\x63k\x18\x03 \x01(\tR\x04\x64\x65\x63k*\\\n\x10NamedEntityState\x12\x17\n\x13NAMED_ENTITY_ACTIVE\x10\x00\x12\x19\n\x15NAMED_ENTITY_ARCHIVED\x10\x01\x12\x14\n\x10SYSTEM_GENERATED\x10\x02\x42\xb7\x01\n\x12\x63om.flyteidl.adminB\x0b\x43ommonProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/admin/common.proto\x12\x0e\x66lyteidl.admin\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"o\n\x15NamedEntityIdentifier\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x10\n\x03org\x18\x04 \x01(\tR\x03org\"o\n\x13NamedEntityMetadata\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x36\n\x05state\x18\x02 \x01(\x0e\x32 .flyteidl.admin.NamedEntityStateR\x05state\"\xc7\x01\n\x0bNamedEntity\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32#.flyteidl.admin.NamedEntityMetadataR\x08metadata\"\x82\x01\n\x04Sort\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12<\n\tdirection\x18\x02 \x01(\x0e\x32\x1e.flyteidl.admin.Sort.DirectionR\tdirection\"*\n\tDirection\x12\x0e\n\nDESCENDING\x10\x00\x12\r\n\tASCENDING\x10\x01\"\xdb\x01\n NamedEntityIdentifierListRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x14\n\x05limit\x18\x03 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x04 \x01(\tR\x05token\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12\x18\n\x07\x66ilters\x18\x06 \x01(\tR\x07\x66ilters\x12\x10\n\x03org\x18\x07 \x01(\tR\x03org\"\x93\x02\n\x16NamedEntityListRequest\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x18\n\x07project\x18\x02 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x03 \x01(\tR\x06\x64omain\x12\x14\n\x05limit\x18\x04 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x05 \x01(\tR\x05token\x12-\n\x07sort_by\x18\x06 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12\x18\n\x07\x66ilters\x18\x07 \x01(\tR\x07\x66ilters\x12\x10\n\x03org\x18\x08 \x01(\tR\x03org\"t\n\x19NamedEntityIdentifierList\x12\x41\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x08\x65ntities\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"`\n\x0fNamedEntityList\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32\x1b.flyteidl.admin.NamedEntityR\x08\x65ntities\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\x90\x01\n\x15NamedEntityGetRequest\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\"\xd4\x01\n\x18NamedEntityUpdateRequest\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x35\n\x02id\x18\x02 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32#.flyteidl.admin.NamedEntityMetadataR\x08metadata\"\x1b\n\x19NamedEntityUpdateResponse\"=\n\x10ObjectGetRequest\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\"\xc1\x01\n\x13ResourceListRequest\x12\x35\n\x02id\x18\x01 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\">\n\x11\x45mailNotification\x12)\n\x10recipients_email\x18\x01 \x03(\tR\x0frecipientsEmail\"B\n\x15PagerDutyNotification\x12)\n\x10recipients_email\x18\x01 \x03(\tR\x0frecipientsEmail\">\n\x11SlackNotification\x12)\n\x10recipients_email\x18\x01 \x03(\tR\x0frecipientsEmail\"\x94\x02\n\x0cNotification\x12>\n\x06phases\x18\x01 \x03(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x06phases\x12\x39\n\x05\x65mail\x18\x02 \x01(\x0b\x32!.flyteidl.admin.EmailNotificationH\x00R\x05\x65mail\x12\x46\n\npager_duty\x18\x03 \x01(\x0b\x32%.flyteidl.admin.PagerDutyNotificationH\x00R\tpagerDuty\x12\x39\n\x05slack\x18\x04 \x01(\x0b\x32!.flyteidl.admin.SlackNotificationH\x00R\x05slackB\x06\n\x04type\"5\n\x07UrlBlob\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\x14\n\x05\x62ytes\x18\x02 \x01(\x03R\x05\x62ytes:\x02\x18\x01\"\x7f\n\x06Labels\x12:\n\x06values\x18\x01 \x03(\x0b\x32\".flyteidl.admin.Labels.ValuesEntryR\x06values\x1a\x39\n\x0bValuesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x89\x01\n\x0b\x41nnotations\x12?\n\x06values\x18\x01 \x03(\x0b\x32\'.flyteidl.admin.Annotations.ValuesEntryR\x06values\x1a\x39\n\x0bValuesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\";\n\x04\x45nvs\x12\x33\n\x06values\x18\x01 \x03(\x0b\x32\x1b.flyteidl.core.KeyValuePairR\x06values\"z\n\x08\x41uthRole\x12,\n\x12\x61ssumable_iam_role\x18\x01 \x01(\tR\x10\x61ssumableIamRole\x12<\n\x1akubernetes_service_account\x18\x02 \x01(\tR\x18kubernetesServiceAccount:\x02\x18\x01\"K\n\x13RawOutputDataConfig\x12\x34\n\x16output_location_prefix\x18\x01 \x01(\tR\x14outputLocationPrefix\"Q\n\tFlyteURLs\x12\x16\n\x06inputs\x18\x01 \x01(\tR\x06inputs\x12\x18\n\x07outputs\x18\x02 \x01(\tR\x07outputs\x12\x12\n\x04\x64\x65\x63k\x18\x03 \x01(\tR\x04\x64\x65\x63k*\\\n\x10NamedEntityState\x12\x17\n\x13NAMED_ENTITY_ACTIVE\x10\x00\x12\x19\n\x15NAMED_ENTITY_ARCHIVED\x10\x01\x12\x14\n\x10SYSTEM_GENERATED\x10\x02\x42\xb7\x01\n\x12\x63om.flyteidl.adminB\x0b\x43ommonProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -34,60 +34,60 @@ _ANNOTATIONS_VALUESENTRY._serialized_options = b'8\001' _AUTHROLE._options = None _AUTHROLE._serialized_options = b'\030\001' - _globals['_NAMEDENTITYSTATE']._serialized_start=3190 - _globals['_NAMEDENTITYSTATE']._serialized_end=3282 + _globals['_NAMEDENTITYSTATE']._serialized_start=3244 + _globals['_NAMEDENTITYSTATE']._serialized_end=3336 _globals['_NAMEDENTITYIDENTIFIER']._serialized_start=173 - _globals['_NAMEDENTITYIDENTIFIER']._serialized_end=266 - _globals['_NAMEDENTITYMETADATA']._serialized_start=268 - _globals['_NAMEDENTITYMETADATA']._serialized_end=379 - _globals['_NAMEDENTITY']._serialized_start=382 - _globals['_NAMEDENTITY']._serialized_end=581 - _globals['_SORT']._serialized_start=584 - _globals['_SORT']._serialized_end=714 - _globals['_SORT_DIRECTION']._serialized_start=672 - _globals['_SORT_DIRECTION']._serialized_end=714 - _globals['_NAMEDENTITYIDENTIFIERLISTREQUEST']._serialized_start=717 - _globals['_NAMEDENTITYIDENTIFIERLISTREQUEST']._serialized_end=918 - _globals['_NAMEDENTITYLISTREQUEST']._serialized_start=921 - _globals['_NAMEDENTITYLISTREQUEST']._serialized_end=1178 - _globals['_NAMEDENTITYIDENTIFIERLIST']._serialized_start=1180 - _globals['_NAMEDENTITYIDENTIFIERLIST']._serialized_end=1296 - _globals['_NAMEDENTITYLIST']._serialized_start=1298 - _globals['_NAMEDENTITYLIST']._serialized_end=1394 - _globals['_NAMEDENTITYGETREQUEST']._serialized_start=1397 - _globals['_NAMEDENTITYGETREQUEST']._serialized_end=1541 - _globals['_NAMEDENTITYUPDATEREQUEST']._serialized_start=1544 - _globals['_NAMEDENTITYUPDATEREQUEST']._serialized_end=1756 - _globals['_NAMEDENTITYUPDATERESPONSE']._serialized_start=1758 - _globals['_NAMEDENTITYUPDATERESPONSE']._serialized_end=1785 - _globals['_OBJECTGETREQUEST']._serialized_start=1787 - _globals['_OBJECTGETREQUEST']._serialized_end=1848 - _globals['_RESOURCELISTREQUEST']._serialized_start=1851 - _globals['_RESOURCELISTREQUEST']._serialized_end=2044 - _globals['_EMAILNOTIFICATION']._serialized_start=2046 - _globals['_EMAILNOTIFICATION']._serialized_end=2108 - _globals['_PAGERDUTYNOTIFICATION']._serialized_start=2110 - _globals['_PAGERDUTYNOTIFICATION']._serialized_end=2176 - _globals['_SLACKNOTIFICATION']._serialized_start=2178 - _globals['_SLACKNOTIFICATION']._serialized_end=2240 - _globals['_NOTIFICATION']._serialized_start=2243 - _globals['_NOTIFICATION']._serialized_end=2519 - _globals['_URLBLOB']._serialized_start=2521 - _globals['_URLBLOB']._serialized_end=2574 - _globals['_LABELS']._serialized_start=2576 - _globals['_LABELS']._serialized_end=2703 - _globals['_LABELS_VALUESENTRY']._serialized_start=2646 - _globals['_LABELS_VALUESENTRY']._serialized_end=2703 - _globals['_ANNOTATIONS']._serialized_start=2706 - _globals['_ANNOTATIONS']._serialized_end=2843 - _globals['_ANNOTATIONS_VALUESENTRY']._serialized_start=2646 - _globals['_ANNOTATIONS_VALUESENTRY']._serialized_end=2703 - _globals['_ENVS']._serialized_start=2845 - _globals['_ENVS']._serialized_end=2904 - _globals['_AUTHROLE']._serialized_start=2906 - _globals['_AUTHROLE']._serialized_end=3028 - _globals['_RAWOUTPUTDATACONFIG']._serialized_start=3030 - _globals['_RAWOUTPUTDATACONFIG']._serialized_end=3105 - _globals['_FLYTEURLS']._serialized_start=3107 - _globals['_FLYTEURLS']._serialized_end=3188 + _globals['_NAMEDENTITYIDENTIFIER']._serialized_end=284 + _globals['_NAMEDENTITYMETADATA']._serialized_start=286 + _globals['_NAMEDENTITYMETADATA']._serialized_end=397 + _globals['_NAMEDENTITY']._serialized_start=400 + _globals['_NAMEDENTITY']._serialized_end=599 + _globals['_SORT']._serialized_start=602 + _globals['_SORT']._serialized_end=732 + _globals['_SORT_DIRECTION']._serialized_start=690 + _globals['_SORT_DIRECTION']._serialized_end=732 + _globals['_NAMEDENTITYIDENTIFIERLISTREQUEST']._serialized_start=735 + _globals['_NAMEDENTITYIDENTIFIERLISTREQUEST']._serialized_end=954 + _globals['_NAMEDENTITYLISTREQUEST']._serialized_start=957 + _globals['_NAMEDENTITYLISTREQUEST']._serialized_end=1232 + _globals['_NAMEDENTITYIDENTIFIERLIST']._serialized_start=1234 + _globals['_NAMEDENTITYIDENTIFIERLIST']._serialized_end=1350 + _globals['_NAMEDENTITYLIST']._serialized_start=1352 + _globals['_NAMEDENTITYLIST']._serialized_end=1448 + _globals['_NAMEDENTITYGETREQUEST']._serialized_start=1451 + _globals['_NAMEDENTITYGETREQUEST']._serialized_end=1595 + _globals['_NAMEDENTITYUPDATEREQUEST']._serialized_start=1598 + _globals['_NAMEDENTITYUPDATEREQUEST']._serialized_end=1810 + _globals['_NAMEDENTITYUPDATERESPONSE']._serialized_start=1812 + _globals['_NAMEDENTITYUPDATERESPONSE']._serialized_end=1839 + _globals['_OBJECTGETREQUEST']._serialized_start=1841 + _globals['_OBJECTGETREQUEST']._serialized_end=1902 + _globals['_RESOURCELISTREQUEST']._serialized_start=1905 + _globals['_RESOURCELISTREQUEST']._serialized_end=2098 + _globals['_EMAILNOTIFICATION']._serialized_start=2100 + _globals['_EMAILNOTIFICATION']._serialized_end=2162 + _globals['_PAGERDUTYNOTIFICATION']._serialized_start=2164 + _globals['_PAGERDUTYNOTIFICATION']._serialized_end=2230 + _globals['_SLACKNOTIFICATION']._serialized_start=2232 + _globals['_SLACKNOTIFICATION']._serialized_end=2294 + _globals['_NOTIFICATION']._serialized_start=2297 + _globals['_NOTIFICATION']._serialized_end=2573 + _globals['_URLBLOB']._serialized_start=2575 + _globals['_URLBLOB']._serialized_end=2628 + _globals['_LABELS']._serialized_start=2630 + _globals['_LABELS']._serialized_end=2757 + _globals['_LABELS_VALUESENTRY']._serialized_start=2700 + _globals['_LABELS_VALUESENTRY']._serialized_end=2757 + _globals['_ANNOTATIONS']._serialized_start=2760 + _globals['_ANNOTATIONS']._serialized_end=2897 + _globals['_ANNOTATIONS_VALUESENTRY']._serialized_start=2700 + _globals['_ANNOTATIONS_VALUESENTRY']._serialized_end=2757 + _globals['_ENVS']._serialized_start=2899 + _globals['_ENVS']._serialized_end=2958 + _globals['_AUTHROLE']._serialized_start=2960 + _globals['_AUTHROLE']._serialized_end=3082 + _globals['_RAWOUTPUTDATACONFIG']._serialized_start=3084 + _globals['_RAWOUTPUTDATACONFIG']._serialized_end=3159 + _globals['_FLYTEURLS']._serialized_start=3161 + _globals['_FLYTEURLS']._serialized_end=3242 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.pyi index 9c81d7cf45..420818fb95 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/common_pb2.pyi @@ -20,14 +20,16 @@ NAMED_ENTITY_ARCHIVED: NamedEntityState SYSTEM_GENERATED: NamedEntityState class NamedEntityIdentifier(_message.Message): - __slots__ = ["project", "domain", "name"] + __slots__ = ["project", "domain", "name", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str name: str - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class NamedEntityMetadata(_message.Message): __slots__ = ["description", "state"] @@ -62,23 +64,25 @@ class Sort(_message.Message): def __init__(self, key: _Optional[str] = ..., direction: _Optional[_Union[Sort.Direction, str]] = ...) -> None: ... class NamedEntityIdentifierListRequest(_message.Message): - __slots__ = ["project", "domain", "limit", "token", "sort_by", "filters"] + __slots__ = ["project", "domain", "limit", "token", "sort_by", "filters", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] LIMIT_FIELD_NUMBER: _ClassVar[int] TOKEN_FIELD_NUMBER: _ClassVar[int] SORT_BY_FIELD_NUMBER: _ClassVar[int] FILTERS_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str limit: int token: str sort_by: Sort filters: str - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., limit: _Optional[int] = ..., token: _Optional[str] = ..., sort_by: _Optional[_Union[Sort, _Mapping]] = ..., filters: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., limit: _Optional[int] = ..., token: _Optional[str] = ..., sort_by: _Optional[_Union[Sort, _Mapping]] = ..., filters: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class NamedEntityListRequest(_message.Message): - __slots__ = ["resource_type", "project", "domain", "limit", "token", "sort_by", "filters"] + __slots__ = ["resource_type", "project", "domain", "limit", "token", "sort_by", "filters", "org"] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] @@ -86,6 +90,7 @@ class NamedEntityListRequest(_message.Message): TOKEN_FIELD_NUMBER: _ClassVar[int] SORT_BY_FIELD_NUMBER: _ClassVar[int] FILTERS_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] resource_type: _identifier_pb2.ResourceType project: str domain: str @@ -93,7 +98,8 @@ class NamedEntityListRequest(_message.Message): token: str sort_by: Sort filters: str - def __init__(self, resource_type: _Optional[_Union[_identifier_pb2.ResourceType, str]] = ..., project: _Optional[str] = ..., domain: _Optional[str] = ..., limit: _Optional[int] = ..., token: _Optional[str] = ..., sort_by: _Optional[_Union[Sort, _Mapping]] = ..., filters: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, resource_type: _Optional[_Union[_identifier_pb2.ResourceType, str]] = ..., project: _Optional[str] = ..., domain: _Optional[str] = ..., limit: _Optional[int] = ..., token: _Optional[str] = ..., sort_by: _Optional[_Union[Sort, _Mapping]] = ..., filters: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class NamedEntityIdentifierList(_message.Message): __slots__ = ["entities", "token"] diff --git a/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py index f71cdab8a2..918db78e56 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.py @@ -24,7 +24,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66lyteidl/admin/execution.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/cluster_assignment.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1b\x66lyteidl/core/metrics.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xc4\x01\n\x16\x45xecutionCreateRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x31\n\x04spec\x18\x04 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12\x31\n\x06inputs\x18\x05 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\"\x99\x01\n\x18\x45xecutionRelaunchRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\'\n\x0foverwrite_cache\x18\x04 \x01(\x08R\x0eoverwriteCacheJ\x04\x08\x02\x10\x03\"\xa8\x01\n\x17\x45xecutionRecoverRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\"U\n\x17\x45xecutionCreateResponse\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"Y\n\x1bWorkflowExecutionGetRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\xb6\x01\n\tExecution\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x31\n\x04spec\x18\x02 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12:\n\x07\x63losure\x18\x03 \x01(\x0b\x32 .flyteidl.admin.ExecutionClosureR\x07\x63losure\"`\n\rExecutionList\x12\x39\n\nexecutions\x18\x01 \x03(\x0b\x32\x19.flyteidl.admin.ExecutionR\nexecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"e\n\x0eLiteralMapBlob\x12\x37\n\x06values\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\x06values\x12\x12\n\x03uri\x18\x02 \x01(\tH\x00R\x03uriB\x06\n\x04\x64\x61ta\"C\n\rAbortMetadata\x12\x14\n\x05\x63\x61use\x18\x01 \x01(\tR\x05\x63\x61use\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\"\x98\x07\n\x10\x45xecutionClosure\x12>\n\x07outputs\x18\x01 \x01(\x0b\x32\x1e.flyteidl.admin.LiteralMapBlobB\x02\x18\x01H\x00R\x07outputs\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12%\n\x0b\x61\x62ort_cause\x18\n \x01(\tB\x02\x18\x01H\x00R\nabortCause\x12\x46\n\x0e\x61\x62ort_metadata\x18\x0c \x01(\x0b\x32\x1d.flyteidl.admin.AbortMetadataH\x00R\rabortMetadata\x12@\n\x0boutput_data\x18\r \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x46\n\x0f\x63omputed_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x0e\x63omputedInputs\x12<\n\x05phase\x18\x04 \x01(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x42\n\rnotifications\x18\t \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\x12:\n\x0bworkflow_id\x18\x0b \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12]\n\x14state_change_details\x18\x0e \x01(\x0b\x32+.flyteidl.admin.ExecutionStateChangeDetailsR\x12stateChangeDetailsB\x0f\n\routput_result\"[\n\x0eSystemMetadata\x12+\n\x11\x65xecution_cluster\x18\x01 \x01(\tR\x10\x65xecutionCluster\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\"\xf8\x04\n\x11\x45xecutionMetadata\x12\x43\n\x04mode\x18\x01 \x01(\x0e\x32/.flyteidl.admin.ExecutionMetadata.ExecutionModeR\x04mode\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\x12\x18\n\x07nesting\x18\x03 \x01(\rR\x07nesting\x12=\n\x0cscheduled_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0bscheduledAt\x12Z\n\x15parent_node_execution\x18\x05 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x13parentNodeExecution\x12[\n\x13reference_execution\x18\x10 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12G\n\x0fsystem_metadata\x18\x11 \x01(\x0b\x32\x1e.flyteidl.admin.SystemMetadataR\x0esystemMetadata\x12<\n\x0c\x61rtifact_ids\x18\x12 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\"g\n\rExecutionMode\x12\n\n\x06MANUAL\x10\x00\x12\r\n\tSCHEDULED\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\x0c\n\x08RELAUNCH\x10\x03\x12\x12\n\x0e\x43HILD_WORKFLOW\x10\x04\x12\r\n\tRECOVERED\x10\x05\"V\n\x10NotificationList\x12\x42\n\rnotifications\x18\x01 \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\"\x90\x08\n\rExecutionSpec\x12:\n\x0blaunch_plan\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nlaunchPlan\x12\x35\n\x06inputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x06inputs\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\x12H\n\rnotifications\x18\x05 \x01(\x0b\x32 .flyteidl.admin.NotificationListH\x00R\rnotifications\x12!\n\x0b\x64isable_all\x18\x06 \x01(\x08H\x00R\ndisableAll\x12.\n\x06labels\x18\x07 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x08 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12I\n\x10security_context\x18\n \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12\x39\n\tauth_role\x18\x10 \x01(\x0b\x32\x18.flyteidl.admin.AuthRoleB\x02\x18\x01R\x08\x61uthRole\x12M\n\x12quality_of_service\x18\x11 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12\'\n\x0fmax_parallelism\x18\x12 \x01(\x05R\x0emaxParallelism\x12X\n\x16raw_output_data_config\x18\x13 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12P\n\x12\x63luster_assignment\x18\x14 \x01(\x0b\x32!.flyteidl.admin.ClusterAssignmentR\x11\x63lusterAssignment\x12@\n\rinterruptible\x18\x15 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x16 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x17 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\x12\x12\n\x04tags\x18\x18 \x03(\tR\x04tagsB\x18\n\x16notification_overridesJ\x04\x08\x04\x10\x05\"m\n\x19\x45xecutionTerminateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x63\x61use\x18\x02 \x01(\tR\x05\x63\x61use\"\x1c\n\x1a\x45xecutionTerminateResponse\"]\n\x1fWorkflowExecutionGetDataRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\x88\x02\n WorkflowExecutionGetDataResponse\x12\x35\n\x07outputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12\x33\n\x06inputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\"\x8a\x01\n\x16\x45xecutionUpdateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x34\n\x05state\x18\x02 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\"\xae\x01\n\x1b\x45xecutionStateChangeDetails\x12\x34\n\x05state\x18\x01 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\x12;\n\x0boccurred_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1c\n\tprincipal\x18\x03 \x01(\tR\tprincipal\"\x19\n\x17\x45xecutionUpdateResponse\"v\n\"WorkflowExecutionGetMetricsRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x64\x65pth\x18\x02 \x01(\x05R\x05\x64\x65pth\"N\n#WorkflowExecutionGetMetricsResponse\x12\'\n\x04span\x18\x01 \x01(\x0b\x32\x13.flyteidl.core.SpanR\x04span*>\n\x0e\x45xecutionState\x12\x14\n\x10\x45XECUTION_ACTIVE\x10\x00\x12\x16\n\x12\x45XECUTION_ARCHIVED\x10\x01\x42\xba\x01\n\x12\x63om.flyteidl.adminB\x0e\x45xecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66lyteidl/admin/execution.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/cluster_assignment.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1b\x66lyteidl/core/metrics.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xd6\x01\n\x16\x45xecutionCreateRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x31\n\x04spec\x18\x04 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12\x31\n\x06inputs\x18\x05 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x10\n\x03org\x18\x06 \x01(\tR\x03org\"\x99\x01\n\x18\x45xecutionRelaunchRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\'\n\x0foverwrite_cache\x18\x04 \x01(\x08R\x0eoverwriteCacheJ\x04\x08\x02\x10\x03\"\xa8\x01\n\x17\x45xecutionRecoverRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\"U\n\x17\x45xecutionCreateResponse\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"Y\n\x1bWorkflowExecutionGetRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\xb6\x01\n\tExecution\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x31\n\x04spec\x18\x02 \x01(\x0b\x32\x1d.flyteidl.admin.ExecutionSpecR\x04spec\x12:\n\x07\x63losure\x18\x03 \x01(\x0b\x32 .flyteidl.admin.ExecutionClosureR\x07\x63losure\"`\n\rExecutionList\x12\x39\n\nexecutions\x18\x01 \x03(\x0b\x32\x19.flyteidl.admin.ExecutionR\nexecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"e\n\x0eLiteralMapBlob\x12\x37\n\x06values\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\x06values\x12\x12\n\x03uri\x18\x02 \x01(\tH\x00R\x03uriB\x06\n\x04\x64\x61ta\"C\n\rAbortMetadata\x12\x14\n\x05\x63\x61use\x18\x01 \x01(\tR\x05\x63\x61use\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\"\x98\x07\n\x10\x45xecutionClosure\x12>\n\x07outputs\x18\x01 \x01(\x0b\x32\x1e.flyteidl.admin.LiteralMapBlobB\x02\x18\x01H\x00R\x07outputs\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12%\n\x0b\x61\x62ort_cause\x18\n \x01(\tB\x02\x18\x01H\x00R\nabortCause\x12\x46\n\x0e\x61\x62ort_metadata\x18\x0c \x01(\x0b\x32\x1d.flyteidl.admin.AbortMetadataH\x00R\rabortMetadata\x12@\n\x0boutput_data\x18\r \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x46\n\x0f\x63omputed_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x0e\x63omputedInputs\x12<\n\x05phase\x18\x04 \x01(\x0e\x32&.flyteidl.core.WorkflowExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\x42\n\rnotifications\x18\t \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\x12:\n\x0bworkflow_id\x18\x0b \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12]\n\x14state_change_details\x18\x0e \x01(\x0b\x32+.flyteidl.admin.ExecutionStateChangeDetailsR\x12stateChangeDetailsB\x0f\n\routput_result\"[\n\x0eSystemMetadata\x12+\n\x11\x65xecution_cluster\x18\x01 \x01(\tR\x10\x65xecutionCluster\x12\x1c\n\tnamespace\x18\x02 \x01(\tR\tnamespace\"\xf8\x04\n\x11\x45xecutionMetadata\x12\x43\n\x04mode\x18\x01 \x01(\x0e\x32/.flyteidl.admin.ExecutionMetadata.ExecutionModeR\x04mode\x12\x1c\n\tprincipal\x18\x02 \x01(\tR\tprincipal\x12\x18\n\x07nesting\x18\x03 \x01(\rR\x07nesting\x12=\n\x0cscheduled_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x0bscheduledAt\x12Z\n\x15parent_node_execution\x18\x05 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x13parentNodeExecution\x12[\n\x13reference_execution\x18\x10 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12G\n\x0fsystem_metadata\x18\x11 \x01(\x0b\x32\x1e.flyteidl.admin.SystemMetadataR\x0esystemMetadata\x12<\n\x0c\x61rtifact_ids\x18\x12 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\"g\n\rExecutionMode\x12\n\n\x06MANUAL\x10\x00\x12\r\n\tSCHEDULED\x10\x01\x12\n\n\x06SYSTEM\x10\x02\x12\x0c\n\x08RELAUNCH\x10\x03\x12\x12\n\x0e\x43HILD_WORKFLOW\x10\x04\x12\r\n\tRECOVERED\x10\x05\"V\n\x10NotificationList\x12\x42\n\rnotifications\x18\x01 \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\"\x90\x08\n\rExecutionSpec\x12:\n\x0blaunch_plan\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nlaunchPlan\x12\x35\n\x06inputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01R\x06inputs\x12=\n\x08metadata\x18\x03 \x01(\x0b\x32!.flyteidl.admin.ExecutionMetadataR\x08metadata\x12H\n\rnotifications\x18\x05 \x01(\x0b\x32 .flyteidl.admin.NotificationListH\x00R\rnotifications\x12!\n\x0b\x64isable_all\x18\x06 \x01(\x08H\x00R\ndisableAll\x12.\n\x06labels\x18\x07 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x08 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12I\n\x10security_context\x18\n \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12\x39\n\tauth_role\x18\x10 \x01(\x0b\x32\x18.flyteidl.admin.AuthRoleB\x02\x18\x01R\x08\x61uthRole\x12M\n\x12quality_of_service\x18\x11 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12\'\n\x0fmax_parallelism\x18\x12 \x01(\x05R\x0emaxParallelism\x12X\n\x16raw_output_data_config\x18\x13 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12P\n\x12\x63luster_assignment\x18\x14 \x01(\x0b\x32!.flyteidl.admin.ClusterAssignmentR\x11\x63lusterAssignment\x12@\n\rinterruptible\x18\x15 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x16 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x17 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\x12\x12\n\x04tags\x18\x18 \x03(\tR\x04tagsB\x18\n\x16notification_overridesJ\x04\x08\x04\x10\x05\"m\n\x19\x45xecutionTerminateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x63\x61use\x18\x02 \x01(\tR\x05\x63\x61use\"\x1c\n\x1a\x45xecutionTerminateResponse\"]\n\x1fWorkflowExecutionGetDataRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\"\x88\x02\n WorkflowExecutionGetDataResponse\x12\x35\n\x07outputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12\x33\n\x06inputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\"\x8a\x01\n\x16\x45xecutionUpdateRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x34\n\x05state\x18\x02 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\"\xae\x01\n\x1b\x45xecutionStateChangeDetails\x12\x34\n\x05state\x18\x01 \x01(\x0e\x32\x1e.flyteidl.admin.ExecutionStateR\x05state\x12;\n\x0boccurred_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\noccurredAt\x12\x1c\n\tprincipal\x18\x03 \x01(\tR\tprincipal\"\x19\n\x17\x45xecutionUpdateResponse\"v\n\"WorkflowExecutionGetMetricsRequest\x12:\n\x02id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x02id\x12\x14\n\x05\x64\x65pth\x18\x02 \x01(\x05R\x05\x64\x65pth\"N\n#WorkflowExecutionGetMetricsResponse\x12\'\n\x04span\x18\x01 \x01(\x0b\x32\x13.flyteidl.core.SpanR\x04span*>\n\x0e\x45xecutionState\x12\x14\n\x10\x45XECUTION_ACTIVE\x10\x00\x12\x16\n\x12\x45XECUTION_ARCHIVED\x10\x01\x42\xba\x01\n\x12\x63om.flyteidl.adminB\x0e\x45xecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -51,54 +51,54 @@ _WORKFLOWEXECUTIONGETDATARESPONSE.fields_by_name['outputs']._serialized_options = b'\030\001' _WORKFLOWEXECUTIONGETDATARESPONSE.fields_by_name['inputs']._options = None _WORKFLOWEXECUTIONGETDATARESPONSE.fields_by_name['inputs']._serialized_options = b'\030\001' - _globals['_EXECUTIONSTATE']._serialized_start=5391 - _globals['_EXECUTIONSTATE']._serialized_end=5453 + _globals['_EXECUTIONSTATE']._serialized_start=5409 + _globals['_EXECUTIONSTATE']._serialized_end=5471 _globals['_EXECUTIONCREATEREQUEST']._serialized_start=403 - _globals['_EXECUTIONCREATEREQUEST']._serialized_end=599 - _globals['_EXECUTIONRELAUNCHREQUEST']._serialized_start=602 - _globals['_EXECUTIONRELAUNCHREQUEST']._serialized_end=755 - _globals['_EXECUTIONRECOVERREQUEST']._serialized_start=758 - _globals['_EXECUTIONRECOVERREQUEST']._serialized_end=926 - _globals['_EXECUTIONCREATERESPONSE']._serialized_start=928 - _globals['_EXECUTIONCREATERESPONSE']._serialized_end=1013 - _globals['_WORKFLOWEXECUTIONGETREQUEST']._serialized_start=1015 - _globals['_WORKFLOWEXECUTIONGETREQUEST']._serialized_end=1104 - _globals['_EXECUTION']._serialized_start=1107 - _globals['_EXECUTION']._serialized_end=1289 - _globals['_EXECUTIONLIST']._serialized_start=1291 - _globals['_EXECUTIONLIST']._serialized_end=1387 - _globals['_LITERALMAPBLOB']._serialized_start=1389 - _globals['_LITERALMAPBLOB']._serialized_end=1490 - _globals['_ABORTMETADATA']._serialized_start=1492 - _globals['_ABORTMETADATA']._serialized_end=1559 - _globals['_EXECUTIONCLOSURE']._serialized_start=1562 - _globals['_EXECUTIONCLOSURE']._serialized_end=2482 - _globals['_SYSTEMMETADATA']._serialized_start=2484 - _globals['_SYSTEMMETADATA']._serialized_end=2575 - _globals['_EXECUTIONMETADATA']._serialized_start=2578 - _globals['_EXECUTIONMETADATA']._serialized_end=3210 - _globals['_EXECUTIONMETADATA_EXECUTIONMODE']._serialized_start=3107 - _globals['_EXECUTIONMETADATA_EXECUTIONMODE']._serialized_end=3210 - _globals['_NOTIFICATIONLIST']._serialized_start=3212 - _globals['_NOTIFICATIONLIST']._serialized_end=3298 - _globals['_EXECUTIONSPEC']._serialized_start=3301 - _globals['_EXECUTIONSPEC']._serialized_end=4341 - _globals['_EXECUTIONTERMINATEREQUEST']._serialized_start=4343 - _globals['_EXECUTIONTERMINATEREQUEST']._serialized_end=4452 - _globals['_EXECUTIONTERMINATERESPONSE']._serialized_start=4454 - _globals['_EXECUTIONTERMINATERESPONSE']._serialized_end=4482 - _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_start=4484 - _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_end=4577 - _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_start=4580 - _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_end=4844 - _globals['_EXECUTIONUPDATEREQUEST']._serialized_start=4847 - _globals['_EXECUTIONUPDATEREQUEST']._serialized_end=4985 - _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_start=4988 - _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_end=5162 - _globals['_EXECUTIONUPDATERESPONSE']._serialized_start=5164 - _globals['_EXECUTIONUPDATERESPONSE']._serialized_end=5189 - _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_start=5191 - _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_end=5309 - _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_start=5311 - _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_end=5389 + _globals['_EXECUTIONCREATEREQUEST']._serialized_end=617 + _globals['_EXECUTIONRELAUNCHREQUEST']._serialized_start=620 + _globals['_EXECUTIONRELAUNCHREQUEST']._serialized_end=773 + _globals['_EXECUTIONRECOVERREQUEST']._serialized_start=776 + _globals['_EXECUTIONRECOVERREQUEST']._serialized_end=944 + _globals['_EXECUTIONCREATERESPONSE']._serialized_start=946 + _globals['_EXECUTIONCREATERESPONSE']._serialized_end=1031 + _globals['_WORKFLOWEXECUTIONGETREQUEST']._serialized_start=1033 + _globals['_WORKFLOWEXECUTIONGETREQUEST']._serialized_end=1122 + _globals['_EXECUTION']._serialized_start=1125 + _globals['_EXECUTION']._serialized_end=1307 + _globals['_EXECUTIONLIST']._serialized_start=1309 + _globals['_EXECUTIONLIST']._serialized_end=1405 + _globals['_LITERALMAPBLOB']._serialized_start=1407 + _globals['_LITERALMAPBLOB']._serialized_end=1508 + _globals['_ABORTMETADATA']._serialized_start=1510 + _globals['_ABORTMETADATA']._serialized_end=1577 + _globals['_EXECUTIONCLOSURE']._serialized_start=1580 + _globals['_EXECUTIONCLOSURE']._serialized_end=2500 + _globals['_SYSTEMMETADATA']._serialized_start=2502 + _globals['_SYSTEMMETADATA']._serialized_end=2593 + _globals['_EXECUTIONMETADATA']._serialized_start=2596 + _globals['_EXECUTIONMETADATA']._serialized_end=3228 + _globals['_EXECUTIONMETADATA_EXECUTIONMODE']._serialized_start=3125 + _globals['_EXECUTIONMETADATA_EXECUTIONMODE']._serialized_end=3228 + _globals['_NOTIFICATIONLIST']._serialized_start=3230 + _globals['_NOTIFICATIONLIST']._serialized_end=3316 + _globals['_EXECUTIONSPEC']._serialized_start=3319 + _globals['_EXECUTIONSPEC']._serialized_end=4359 + _globals['_EXECUTIONTERMINATEREQUEST']._serialized_start=4361 + _globals['_EXECUTIONTERMINATEREQUEST']._serialized_end=4470 + _globals['_EXECUTIONTERMINATERESPONSE']._serialized_start=4472 + _globals['_EXECUTIONTERMINATERESPONSE']._serialized_end=4500 + _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_start=4502 + _globals['_WORKFLOWEXECUTIONGETDATAREQUEST']._serialized_end=4595 + _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_start=4598 + _globals['_WORKFLOWEXECUTIONGETDATARESPONSE']._serialized_end=4862 + _globals['_EXECUTIONUPDATEREQUEST']._serialized_start=4865 + _globals['_EXECUTIONUPDATEREQUEST']._serialized_end=5003 + _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_start=5006 + _globals['_EXECUTIONSTATECHANGEDETAILS']._serialized_end=5180 + _globals['_EXECUTIONUPDATERESPONSE']._serialized_start=5182 + _globals['_EXECUTIONUPDATERESPONSE']._serialized_end=5207 + _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_start=5209 + _globals['_WORKFLOWEXECUTIONGETMETRICSREQUEST']._serialized_end=5327 + _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_start=5329 + _globals['_WORKFLOWEXECUTIONGETMETRICSRESPONSE']._serialized_end=5407 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.pyi index 1e5e21f19f..bee241d74d 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/execution_pb2.pyi @@ -25,18 +25,20 @@ EXECUTION_ACTIVE: ExecutionState EXECUTION_ARCHIVED: ExecutionState class ExecutionCreateRequest(_message.Message): - __slots__ = ["project", "domain", "name", "spec", "inputs"] + __slots__ = ["project", "domain", "name", "spec", "inputs", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] SPEC_FIELD_NUMBER: _ClassVar[int] INPUTS_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str name: str spec: ExecutionSpec inputs: _literals_pb2.LiteralMap - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ..., spec: _Optional[_Union[ExecutionSpec, _Mapping]] = ..., inputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ..., spec: _Optional[_Union[ExecutionSpec, _Mapping]] = ..., inputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., org: _Optional[str] = ...) -> None: ... class ExecutionRelaunchRequest(_message.Message): __slots__ = ["id", "name", "overwrite_cache"] diff --git a/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.py index dea0972f4d..68d9cdbd65 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.py @@ -23,7 +23,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n flyteidl/admin/launch_plan.proto\x12\x0e\x66lyteidl.admin\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1d\x66lyteidl/admin/schedule.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x19google/protobuf/any.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"x\n\x17LaunchPlanCreateRequest\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x32\n\x04spec\x18\x02 \x01(\x0b\x32\x1e.flyteidl.admin.LaunchPlanSpecR\x04spec\"\x1a\n\x18LaunchPlanCreateResponse\"\xa8\x01\n\nLaunchPlan\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x32\n\x04spec\x18\x02 \x01(\x0b\x32\x1e.flyteidl.admin.LaunchPlanSpecR\x04spec\x12;\n\x07\x63losure\x18\x03 \x01(\x0b\x32!.flyteidl.admin.LaunchPlanClosureR\x07\x63losure\"e\n\x0eLaunchPlanList\x12=\n\x0claunch_plans\x18\x01 \x03(\x0b\x32\x1a.flyteidl.admin.LaunchPlanR\x0blaunchPlans\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"v\n\x04\x41uth\x12,\n\x12\x61ssumable_iam_role\x18\x01 \x01(\tR\x10\x61ssumableIamRole\x12<\n\x1akubernetes_service_account\x18\x02 \x01(\tR\x18kubernetesServiceAccount:\x02\x18\x01\"\xbd\x07\n\x0eLaunchPlanSpec\x12:\n\x0bworkflow_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12K\n\x0f\x65ntity_metadata\x18\x02 \x01(\x0b\x32\".flyteidl.admin.LaunchPlanMetadataR\x0e\x65ntityMetadata\x12\x42\n\x0e\x64\x65\x66\x61ult_inputs\x18\x03 \x01(\x0b\x32\x1b.flyteidl.core.ParameterMapR\rdefaultInputs\x12<\n\x0c\x66ixed_inputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ixedInputs\x12\x16\n\x04role\x18\x05 \x01(\tB\x02\x18\x01R\x04role\x12.\n\x06labels\x18\x06 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x07 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12,\n\x04\x61uth\x18\x08 \x01(\x0b\x32\x14.flyteidl.admin.AuthB\x02\x18\x01R\x04\x61uth\x12\x39\n\tauth_role\x18\t \x01(\x0b\x32\x18.flyteidl.admin.AuthRoleB\x02\x18\x01R\x08\x61uthRole\x12I\n\x10security_context\x18\n \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12M\n\x12quality_of_service\x18\x10 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12X\n\x16raw_output_data_config\x18\x11 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12\'\n\x0fmax_parallelism\x18\x12 \x01(\x05R\x0emaxParallelism\x12@\n\rinterruptible\x18\x13 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x14 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x15 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\"\xcd\x02\n\x11LaunchPlanClosure\x12\x35\n\x05state\x18\x01 \x01(\x0e\x32\x1f.flyteidl.admin.LaunchPlanStateR\x05state\x12\x44\n\x0f\x65xpected_inputs\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.ParameterMapR\x0e\x65xpectedInputs\x12\x45\n\x10\x65xpected_outputs\x18\x03 \x01(\x0b\x32\x1a.flyteidl.core.VariableMapR\x0f\x65xpectedOutputs\x12\x39\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\"\xd1\x01\n\x12LaunchPlanMetadata\x12\x34\n\x08schedule\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ScheduleR\x08schedule\x12\x42\n\rnotifications\x18\x02 \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\x12\x41\n\x11launch_conditions\x18\x03 \x01(\x0b\x32\x14.google.protobuf.AnyR\x10launchConditions\"{\n\x17LaunchPlanUpdateRequest\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x35\n\x05state\x18\x02 \x01(\x0e\x32\x1f.flyteidl.admin.LaunchPlanStateR\x05state\"\x1a\n\x18LaunchPlanUpdateResponse\"P\n\x17\x41\x63tiveLaunchPlanRequest\x12\x35\n\x02id\x18\x01 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\"\xaa\x01\n\x1b\x41\x63tiveLaunchPlanListRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x14\n\x05limit\x18\x03 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x04 \x01(\tR\x05token\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy*+\n\x0fLaunchPlanState\x12\x0c\n\x08INACTIVE\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x42\xbb\x01\n\x12\x63om.flyteidl.adminB\x0fLaunchPlanProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n flyteidl/admin/launch_plan.proto\x12\x0e\x66lyteidl.admin\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1d\x66lyteidl/admin/schedule.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x19google/protobuf/any.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\"x\n\x17LaunchPlanCreateRequest\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x32\n\x04spec\x18\x02 \x01(\x0b\x32\x1e.flyteidl.admin.LaunchPlanSpecR\x04spec\"\x1a\n\x18LaunchPlanCreateResponse\"\xa8\x01\n\nLaunchPlan\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x32\n\x04spec\x18\x02 \x01(\x0b\x32\x1e.flyteidl.admin.LaunchPlanSpecR\x04spec\x12;\n\x07\x63losure\x18\x03 \x01(\x0b\x32!.flyteidl.admin.LaunchPlanClosureR\x07\x63losure\"e\n\x0eLaunchPlanList\x12=\n\x0claunch_plans\x18\x01 \x03(\x0b\x32\x1a.flyteidl.admin.LaunchPlanR\x0blaunchPlans\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"v\n\x04\x41uth\x12,\n\x12\x61ssumable_iam_role\x18\x01 \x01(\tR\x10\x61ssumableIamRole\x12<\n\x1akubernetes_service_account\x18\x02 \x01(\tR\x18kubernetesServiceAccount:\x02\x18\x01\"\xbd\x07\n\x0eLaunchPlanSpec\x12:\n\x0bworkflow_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12K\n\x0f\x65ntity_metadata\x18\x02 \x01(\x0b\x32\".flyteidl.admin.LaunchPlanMetadataR\x0e\x65ntityMetadata\x12\x42\n\x0e\x64\x65\x66\x61ult_inputs\x18\x03 \x01(\x0b\x32\x1b.flyteidl.core.ParameterMapR\rdefaultInputs\x12<\n\x0c\x66ixed_inputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ixedInputs\x12\x16\n\x04role\x18\x05 \x01(\tB\x02\x18\x01R\x04role\x12.\n\x06labels\x18\x06 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x07 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12,\n\x04\x61uth\x18\x08 \x01(\x0b\x32\x14.flyteidl.admin.AuthB\x02\x18\x01R\x04\x61uth\x12\x39\n\tauth_role\x18\t \x01(\x0b\x32\x18.flyteidl.admin.AuthRoleB\x02\x18\x01R\x08\x61uthRole\x12I\n\x10security_context\x18\n \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12M\n\x12quality_of_service\x18\x10 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceR\x10qualityOfService\x12X\n\x16raw_output_data_config\x18\x11 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12\'\n\x0fmax_parallelism\x18\x12 \x01(\x05R\x0emaxParallelism\x12@\n\rinterruptible\x18\x13 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x14 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x15 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\"\xcd\x02\n\x11LaunchPlanClosure\x12\x35\n\x05state\x18\x01 \x01(\x0e\x32\x1f.flyteidl.admin.LaunchPlanStateR\x05state\x12\x44\n\x0f\x65xpected_inputs\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.ParameterMapR\x0e\x65xpectedInputs\x12\x45\n\x10\x65xpected_outputs\x18\x03 \x01(\x0b\x32\x1a.flyteidl.core.VariableMapR\x0f\x65xpectedOutputs\x12\x39\n\ncreated_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\"\xd1\x01\n\x12LaunchPlanMetadata\x12\x34\n\x08schedule\x18\x01 \x01(\x0b\x32\x18.flyteidl.admin.ScheduleR\x08schedule\x12\x42\n\rnotifications\x18\x02 \x03(\x0b\x32\x1c.flyteidl.admin.NotificationR\rnotifications\x12\x41\n\x11launch_conditions\x18\x03 \x01(\x0b\x32\x14.google.protobuf.AnyR\x10launchConditions\"{\n\x17LaunchPlanUpdateRequest\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12\x35\n\x05state\x18\x02 \x01(\x0e\x32\x1f.flyteidl.admin.LaunchPlanStateR\x05state\"\x1a\n\x18LaunchPlanUpdateResponse\"P\n\x17\x41\x63tiveLaunchPlanRequest\x12\x35\n\x02id\x18\x01 \x01(\x0b\x32%.flyteidl.admin.NamedEntityIdentifierR\x02id\"\xbc\x01\n\x1b\x41\x63tiveLaunchPlanListRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x14\n\x05limit\x18\x03 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x04 \x01(\tR\x05token\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12\x10\n\x03org\x18\x06 \x01(\tR\x03org*+\n\x0fLaunchPlanState\x12\x0c\n\x08INACTIVE\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x42\xbb\x01\n\x12\x63om.flyteidl.adminB\x0fLaunchPlanProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -40,8 +40,8 @@ _LAUNCHPLANSPEC.fields_by_name['auth']._serialized_options = b'\030\001' _LAUNCHPLANSPEC.fields_by_name['auth_role']._options = None _LAUNCHPLANSPEC.fields_by_name['auth_role']._serialized_options = b'\030\001' - _globals['_LAUNCHPLANSTATE']._serialized_start=2818 - _globals['_LAUNCHPLANSTATE']._serialized_end=2861 + _globals['_LAUNCHPLANSTATE']._serialized_start=2836 + _globals['_LAUNCHPLANSTATE']._serialized_end=2879 _globals['_LAUNCHPLANCREATEREQUEST']._serialized_start=358 _globals['_LAUNCHPLANCREATEREQUEST']._serialized_end=478 _globals['_LAUNCHPLANCREATERESPONSE']._serialized_start=480 @@ -65,5 +65,5 @@ _globals['_ACTIVELAUNCHPLANREQUEST']._serialized_start=2563 _globals['_ACTIVELAUNCHPLANREQUEST']._serialized_end=2643 _globals['_ACTIVELAUNCHPLANLISTREQUEST']._serialized_start=2646 - _globals['_ACTIVELAUNCHPLANLISTREQUEST']._serialized_end=2816 + _globals['_ACTIVELAUNCHPLANLISTREQUEST']._serialized_end=2834 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.pyi index cb6a41e2b0..a047c8d473 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/launch_plan_pb2.pyi @@ -140,15 +140,17 @@ class ActiveLaunchPlanRequest(_message.Message): def __init__(self, id: _Optional[_Union[_common_pb2.NamedEntityIdentifier, _Mapping]] = ...) -> None: ... class ActiveLaunchPlanListRequest(_message.Message): - __slots__ = ["project", "domain", "limit", "token", "sort_by"] + __slots__ = ["project", "domain", "limit", "token", "sort_by", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] LIMIT_FIELD_NUMBER: _ClassVar[int] TOKEN_FIELD_NUMBER: _ClassVar[int] SORT_BY_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str limit: int token: str sort_by: _common_pb2.Sort - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., limit: _Optional[int] = ..., token: _Optional[str] = ..., sort_by: _Optional[_Union[_common_pb2.Sort, _Mapping]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., limit: _Optional[int] = ..., token: _Optional[str] = ..., sort_by: _Optional[_Union[_common_pb2.Sort, _Mapping]] = ..., org: _Optional[str] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.py index 931defd6e8..94748a4932 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.py @@ -18,7 +18,7 @@ from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'flyteidl/admin/matchable_resource.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\x1a\'flyteidl/admin/cluster_assignment.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x95\x01\n\x10TaskResourceSpec\x12\x10\n\x03\x63pu\x18\x01 \x01(\tR\x03\x63pu\x12\x10\n\x03gpu\x18\x02 \x01(\tR\x03gpu\x12\x16\n\x06memory\x18\x03 \x01(\tR\x06memory\x12\x18\n\x07storage\x18\x04 \x01(\tR\x07storage\x12+\n\x11\x65phemeral_storage\x18\x05 \x01(\tR\x10\x65phemeralStorage\"\x90\x01\n\x16TaskResourceAttributes\x12<\n\x08\x64\x65\x66\x61ults\x18\x01 \x01(\x0b\x32 .flyteidl.admin.TaskResourceSpecR\x08\x64\x65\x66\x61ults\x12\x38\n\x06limits\x18\x02 \x01(\x0b\x32 .flyteidl.admin.TaskResourceSpecR\x06limits\"\xb5\x01\n\x19\x43lusterResourceAttributes\x12Y\n\nattributes\x18\x01 \x03(\x0b\x32\x39.flyteidl.admin.ClusterResourceAttributes.AttributesEntryR\nattributes\x1a=\n\x0f\x41ttributesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\".\n\x18\x45xecutionQueueAttributes\x12\x12\n\x04tags\x18\x01 \x03(\tR\x04tags\"-\n\x15\x45xecutionClusterLabel\x12\x14\n\x05value\x18\x01 \x01(\tR\x05value\"\xec\x01\n\x0ePluginOverride\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12\x1b\n\tplugin_id\x18\x02 \x03(\tR\x08pluginId\x12l\n\x17missing_plugin_behavior\x18\x04 \x01(\x0e\x32\x34.flyteidl.admin.PluginOverride.MissingPluginBehaviorR\x15missingPluginBehavior\"2\n\x15MissingPluginBehavior\x12\x08\n\x04\x46\x41IL\x10\x00\x12\x0f\n\x0bUSE_DEFAULT\x10\x01\"O\n\x0fPluginOverrides\x12<\n\toverrides\x18\x01 \x03(\x0b\x32\x1e.flyteidl.admin.PluginOverrideR\toverrides\"\xeb\x03\n\x17WorkflowExecutionConfig\x12\'\n\x0fmax_parallelism\x18\x01 \x01(\x05R\x0emaxParallelism\x12I\n\x10security_context\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12X\n\x16raw_output_data_config\x18\x03 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12.\n\x06labels\x18\x04 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x05 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12@\n\rinterruptible\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x07 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x08 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\"\x94\x06\n\x12MatchingAttributes\x12\x62\n\x18task_resource_attributes\x18\x01 \x01(\x0b\x32&.flyteidl.admin.TaskResourceAttributesH\x00R\x16taskResourceAttributes\x12k\n\x1b\x63luster_resource_attributes\x18\x02 \x01(\x0b\x32).flyteidl.admin.ClusterResourceAttributesH\x00R\x19\x63lusterResourceAttributes\x12h\n\x1a\x65xecution_queue_attributes\x18\x03 \x01(\x0b\x32(.flyteidl.admin.ExecutionQueueAttributesH\x00R\x18\x65xecutionQueueAttributes\x12_\n\x17\x65xecution_cluster_label\x18\x04 \x01(\x0b\x32%.flyteidl.admin.ExecutionClusterLabelH\x00R\x15\x65xecutionClusterLabel\x12O\n\x12quality_of_service\x18\x05 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceH\x00R\x10qualityOfService\x12L\n\x10plugin_overrides\x18\x06 \x01(\x0b\x32\x1f.flyteidl.admin.PluginOverridesH\x00R\x0fpluginOverrides\x12\x65\n\x19workflow_execution_config\x18\x07 \x01(\x0b\x32\'.flyteidl.admin.WorkflowExecutionConfigH\x00R\x17workflowExecutionConfig\x12R\n\x12\x63luster_assignment\x18\x08 \x01(\x0b\x32!.flyteidl.admin.ClusterAssignmentH\x00R\x11\x63lusterAssignmentB\x08\n\x06target\"\xd5\x01\n MatchableAttributesConfiguration\x12\x42\n\nattributes\x18\x01 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\nattributes\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x18\n\x07project\x18\x03 \x01(\tR\x07project\x12\x1a\n\x08workflow\x18\x04 \x01(\tR\x08workflow\x12\x1f\n\x0blaunch_plan\x18\x05 \x01(\tR\nlaunchPlan\"h\n\x1eListMatchableAttributesRequest\x12\x46\n\rresource_type\x18\x01 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\"{\n\x1fListMatchableAttributesResponse\x12X\n\x0e\x63onfigurations\x18\x01 \x03(\x0b\x32\x30.flyteidl.admin.MatchableAttributesConfigurationR\x0e\x63onfigurations*\xe0\x01\n\x11MatchableResource\x12\x11\n\rTASK_RESOURCE\x10\x00\x12\x14\n\x10\x43LUSTER_RESOURCE\x10\x01\x12\x13\n\x0f\x45XECUTION_QUEUE\x10\x02\x12\x1b\n\x17\x45XECUTION_CLUSTER_LABEL\x10\x03\x12$\n QUALITY_OF_SERVICE_SPECIFICATION\x10\x04\x12\x13\n\x0fPLUGIN_OVERRIDE\x10\x05\x12\x1d\n\x19WORKFLOW_EXECUTION_CONFIG\x10\x06\x12\x16\n\x12\x43LUSTER_ASSIGNMENT\x10\x07\x42\xc2\x01\n\x12\x63om.flyteidl.adminB\x16MatchableResourceProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'flyteidl/admin/matchable_resource.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\x1a\'flyteidl/admin/cluster_assignment.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1c\x66lyteidl/core/security.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\x95\x01\n\x10TaskResourceSpec\x12\x10\n\x03\x63pu\x18\x01 \x01(\tR\x03\x63pu\x12\x10\n\x03gpu\x18\x02 \x01(\tR\x03gpu\x12\x16\n\x06memory\x18\x03 \x01(\tR\x06memory\x12\x18\n\x07storage\x18\x04 \x01(\tR\x07storage\x12+\n\x11\x65phemeral_storage\x18\x05 \x01(\tR\x10\x65phemeralStorage\"\x90\x01\n\x16TaskResourceAttributes\x12<\n\x08\x64\x65\x66\x61ults\x18\x01 \x01(\x0b\x32 .flyteidl.admin.TaskResourceSpecR\x08\x64\x65\x66\x61ults\x12\x38\n\x06limits\x18\x02 \x01(\x0b\x32 .flyteidl.admin.TaskResourceSpecR\x06limits\"\xb5\x01\n\x19\x43lusterResourceAttributes\x12Y\n\nattributes\x18\x01 \x03(\x0b\x32\x39.flyteidl.admin.ClusterResourceAttributes.AttributesEntryR\nattributes\x1a=\n\x0f\x41ttributesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\".\n\x18\x45xecutionQueueAttributes\x12\x12\n\x04tags\x18\x01 \x03(\tR\x04tags\"-\n\x15\x45xecutionClusterLabel\x12\x14\n\x05value\x18\x01 \x01(\tR\x05value\"\xec\x01\n\x0ePluginOverride\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12\x1b\n\tplugin_id\x18\x02 \x03(\tR\x08pluginId\x12l\n\x17missing_plugin_behavior\x18\x04 \x01(\x0e\x32\x34.flyteidl.admin.PluginOverride.MissingPluginBehaviorR\x15missingPluginBehavior\"2\n\x15MissingPluginBehavior\x12\x08\n\x04\x46\x41IL\x10\x00\x12\x0f\n\x0bUSE_DEFAULT\x10\x01\"O\n\x0fPluginOverrides\x12<\n\toverrides\x18\x01 \x03(\x0b\x32\x1e.flyteidl.admin.PluginOverrideR\toverrides\"\xeb\x03\n\x17WorkflowExecutionConfig\x12\'\n\x0fmax_parallelism\x18\x01 \x01(\x05R\x0emaxParallelism\x12I\n\x10security_context\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.SecurityContextR\x0fsecurityContext\x12X\n\x16raw_output_data_config\x18\x03 \x01(\x0b\x32#.flyteidl.admin.RawOutputDataConfigR\x13rawOutputDataConfig\x12.\n\x06labels\x18\x04 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12=\n\x0b\x61nnotations\x18\x05 \x01(\x0b\x32\x1b.flyteidl.admin.AnnotationsR\x0b\x61nnotations\x12@\n\rinterruptible\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.BoolValueR\rinterruptible\x12\'\n\x0foverwrite_cache\x18\x07 \x01(\x08R\x0eoverwriteCache\x12(\n\x04\x65nvs\x18\x08 \x01(\x0b\x32\x14.flyteidl.admin.EnvsR\x04\x65nvs\"\x94\x06\n\x12MatchingAttributes\x12\x62\n\x18task_resource_attributes\x18\x01 \x01(\x0b\x32&.flyteidl.admin.TaskResourceAttributesH\x00R\x16taskResourceAttributes\x12k\n\x1b\x63luster_resource_attributes\x18\x02 \x01(\x0b\x32).flyteidl.admin.ClusterResourceAttributesH\x00R\x19\x63lusterResourceAttributes\x12h\n\x1a\x65xecution_queue_attributes\x18\x03 \x01(\x0b\x32(.flyteidl.admin.ExecutionQueueAttributesH\x00R\x18\x65xecutionQueueAttributes\x12_\n\x17\x65xecution_cluster_label\x18\x04 \x01(\x0b\x32%.flyteidl.admin.ExecutionClusterLabelH\x00R\x15\x65xecutionClusterLabel\x12O\n\x12quality_of_service\x18\x05 \x01(\x0b\x32\x1f.flyteidl.core.QualityOfServiceH\x00R\x10qualityOfService\x12L\n\x10plugin_overrides\x18\x06 \x01(\x0b\x32\x1f.flyteidl.admin.PluginOverridesH\x00R\x0fpluginOverrides\x12\x65\n\x19workflow_execution_config\x18\x07 \x01(\x0b\x32\'.flyteidl.admin.WorkflowExecutionConfigH\x00R\x17workflowExecutionConfig\x12R\n\x12\x63luster_assignment\x18\x08 \x01(\x0b\x32!.flyteidl.admin.ClusterAssignmentH\x00R\x11\x63lusterAssignmentB\x08\n\x06target\"\xe7\x01\n MatchableAttributesConfiguration\x12\x42\n\nattributes\x18\x01 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\nattributes\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x18\n\x07project\x18\x03 \x01(\tR\x07project\x12\x1a\n\x08workflow\x18\x04 \x01(\tR\x08workflow\x12\x1f\n\x0blaunch_plan\x18\x05 \x01(\tR\nlaunchPlan\x12\x10\n\x03org\x18\x06 \x01(\tR\x03org\"z\n\x1eListMatchableAttributesRequest\x12\x46\n\rresource_type\x18\x01 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\x12\x10\n\x03org\x18\x02 \x01(\tR\x03org\"{\n\x1fListMatchableAttributesResponse\x12X\n\x0e\x63onfigurations\x18\x01 \x03(\x0b\x32\x30.flyteidl.admin.MatchableAttributesConfigurationR\x0e\x63onfigurations*\xe0\x01\n\x11MatchableResource\x12\x11\n\rTASK_RESOURCE\x10\x00\x12\x14\n\x10\x43LUSTER_RESOURCE\x10\x01\x12\x13\n\x0f\x45XECUTION_QUEUE\x10\x02\x12\x1b\n\x17\x45XECUTION_CLUSTER_LABEL\x10\x03\x12$\n QUALITY_OF_SERVICE_SPECIFICATION\x10\x04\x12\x13\n\x0fPLUGIN_OVERRIDE\x10\x05\x12\x1d\n\x19WORKFLOW_EXECUTION_CONFIG\x10\x06\x12\x16\n\x12\x43LUSTER_ASSIGNMENT\x10\x07\x42\xc2\x01\n\x12\x63om.flyteidl.adminB\x16MatchableResourceProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -29,8 +29,8 @@ DESCRIPTOR._serialized_options = b'\n\022com.flyteidl.adminB\026MatchableResourceProtoP\001Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\242\002\003FAX\252\002\016Flyteidl.Admin\312\002\016Flyteidl\\Admin\342\002\032Flyteidl\\Admin\\GPBMetadata\352\002\017Flyteidl::Admin' _CLUSTERRESOURCEATTRIBUTES_ATTRIBUTESENTRY._options = None _CLUSTERRESOURCEATTRIBUTES_ATTRIBUTESENTRY._serialized_options = b'8\001' - _globals['_MATCHABLERESOURCE']._serialized_start=2853 - _globals['_MATCHABLERESOURCE']._serialized_end=3077 + _globals['_MATCHABLERESOURCE']._serialized_start=2889 + _globals['_MATCHABLERESOURCE']._serialized_end=3113 _globals['_TASKRESOURCESPEC']._serialized_start=223 _globals['_TASKRESOURCESPEC']._serialized_end=372 _globals['_TASKRESOURCEATTRIBUTES']._serialized_start=375 @@ -54,9 +54,9 @@ _globals['_MATCHINGATTRIBUTES']._serialized_start=1615 _globals['_MATCHINGATTRIBUTES']._serialized_end=2403 _globals['_MATCHABLEATTRIBUTESCONFIGURATION']._serialized_start=2406 - _globals['_MATCHABLEATTRIBUTESCONFIGURATION']._serialized_end=2619 - _globals['_LISTMATCHABLEATTRIBUTESREQUEST']._serialized_start=2621 - _globals['_LISTMATCHABLEATTRIBUTESREQUEST']._serialized_end=2725 - _globals['_LISTMATCHABLEATTRIBUTESRESPONSE']._serialized_start=2727 - _globals['_LISTMATCHABLEATTRIBUTESRESPONSE']._serialized_end=2850 + _globals['_MATCHABLEATTRIBUTESCONFIGURATION']._serialized_end=2637 + _globals['_LISTMATCHABLEATTRIBUTESREQUEST']._serialized_start=2639 + _globals['_LISTMATCHABLEATTRIBUTESREQUEST']._serialized_end=2761 + _globals['_LISTMATCHABLEATTRIBUTESRESPONSE']._serialized_start=2763 + _globals['_LISTMATCHABLEATTRIBUTESRESPONSE']._serialized_end=2886 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.pyi index 56a7b03165..cc19f2d146 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/matchable_resource_pb2.pyi @@ -140,24 +140,28 @@ class MatchingAttributes(_message.Message): def __init__(self, task_resource_attributes: _Optional[_Union[TaskResourceAttributes, _Mapping]] = ..., cluster_resource_attributes: _Optional[_Union[ClusterResourceAttributes, _Mapping]] = ..., execution_queue_attributes: _Optional[_Union[ExecutionQueueAttributes, _Mapping]] = ..., execution_cluster_label: _Optional[_Union[ExecutionClusterLabel, _Mapping]] = ..., quality_of_service: _Optional[_Union[_execution_pb2.QualityOfService, _Mapping]] = ..., plugin_overrides: _Optional[_Union[PluginOverrides, _Mapping]] = ..., workflow_execution_config: _Optional[_Union[WorkflowExecutionConfig, _Mapping]] = ..., cluster_assignment: _Optional[_Union[_cluster_assignment_pb2.ClusterAssignment, _Mapping]] = ...) -> None: ... class MatchableAttributesConfiguration(_message.Message): - __slots__ = ["attributes", "domain", "project", "workflow", "launch_plan"] + __slots__ = ["attributes", "domain", "project", "workflow", "launch_plan", "org"] ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] PROJECT_FIELD_NUMBER: _ClassVar[int] WORKFLOW_FIELD_NUMBER: _ClassVar[int] LAUNCH_PLAN_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] attributes: MatchingAttributes domain: str project: str workflow: str launch_plan: str - def __init__(self, attributes: _Optional[_Union[MatchingAttributes, _Mapping]] = ..., domain: _Optional[str] = ..., project: _Optional[str] = ..., workflow: _Optional[str] = ..., launch_plan: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, attributes: _Optional[_Union[MatchingAttributes, _Mapping]] = ..., domain: _Optional[str] = ..., project: _Optional[str] = ..., workflow: _Optional[str] = ..., launch_plan: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class ListMatchableAttributesRequest(_message.Message): - __slots__ = ["resource_type"] + __slots__ = ["resource_type", "org"] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] resource_type: MatchableResource - def __init__(self, resource_type: _Optional[_Union[MatchableResource, str]] = ...) -> None: ... + org: str + def __init__(self, resource_type: _Optional[_Union[MatchableResource, str]] = ..., org: _Optional[str] = ...) -> None: ... class ListMatchableAttributesResponse(_message.Message): __slots__ = ["configurations"] diff --git a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py index 17e5cbd652..93a29df4d6 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.py @@ -21,7 +21,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#flyteidl/admin/node_execution.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1b\x66lyteidl/core/catalog.proto\x1a\x1c\x66lyteidl/core/compiler.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\"Q\n\x17NodeExecutionGetRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x99\x02\n\x18NodeExecutionListRequest\x12^\n\x15workflow_execution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x13workflowExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12(\n\x10unique_parent_id\x18\x06 \x01(\tR\x0euniqueParentId\"\xea\x01\n\x1fNodeExecutionForTaskListRequest\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\"\xe7\x01\n\rNodeExecution\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\x12\x1b\n\tinput_uri\x18\x02 \x01(\tR\x08inputUri\x12>\n\x07\x63losure\x18\x03 \x01(\x0b\x32$.flyteidl.admin.NodeExecutionClosureR\x07\x63losure\x12\x41\n\x08metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.NodeExecutionMetaDataR\x08metadata\"\xba\x01\n\x15NodeExecutionMetaData\x12\x1f\n\x0bretry_group\x18\x01 \x01(\tR\nretryGroup\x12$\n\x0eis_parent_node\x18\x02 \x01(\x08R\x0cisParentNode\x12 \n\x0cspec_node_id\x18\x03 \x01(\tR\nspecNodeId\x12\x1d\n\nis_dynamic\x18\x04 \x01(\x08R\tisDynamic\x12\x19\n\x08is_array\x18\x05 \x01(\x08R\x07isArray\"q\n\x11NodeExecutionList\x12\x46\n\x0fnode_executions\x18\x01 \x03(\x0b\x32\x1d.flyteidl.admin.NodeExecutionR\x0enodeExecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\xf6\x05\n\x14NodeExecutionClosure\x12#\n\noutput_uri\x18\x01 \x01(\tB\x02\x18\x01H\x00R\toutputUri\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12@\n\x0boutput_data\x18\n \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x38\n\x05phase\x18\x03 \x01(\x0e\x32\".flyteidl.core.NodeExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\\\n\x16workflow_node_metadata\x18\x08 \x01(\x0b\x32$.flyteidl.admin.WorkflowNodeMetadataH\x01R\x14workflowNodeMetadata\x12P\n\x12task_node_metadata\x18\t \x01(\x0b\x32 .flyteidl.admin.TaskNodeMetadataH\x01R\x10taskNodeMetadata\x12\x19\n\x08\x64\x65\x63k_uri\x18\x0b \x01(\tR\x07\x64\x65\x63kUri\x12/\n\x14\x64ynamic_job_spec_uri\x18\x0c \x01(\tR\x11\x64ynamicJobSpecUriB\x0f\n\routput_resultB\x11\n\x0ftarget_metadata\"d\n\x14WorkflowNodeMetadata\x12L\n\x0b\x65xecutionId\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xc0\x01\n\x10TaskNodeMetadata\x12\x44\n\x0c\x63\x61\x63he_status\x18\x01 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12?\n\x0b\x63\x61talog_key\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.CatalogMetadataR\ncatalogKey\x12%\n\x0e\x63heckpoint_uri\x18\x04 \x01(\tR\rcheckpointUri\"\xce\x01\n\x1b\x44ynamicWorkflowNodeMetadata\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12S\n\x11\x63ompiled_workflow\x18\x02 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflow\x12/\n\x14\x64ynamic_job_spec_uri\x18\x03 \x01(\tR\x11\x64ynamicJobSpecUri\"U\n\x1bNodeExecutionGetDataRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x96\x03\n\x1cNodeExecutionGetDataResponse\x12\x33\n\x06inputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12\x35\n\x07outputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\x12V\n\x10\x64ynamic_workflow\x18\x10 \x01(\x0b\x32+.flyteidl.admin.DynamicWorkflowNodeMetadataR\x0f\x64ynamicWorkflow\x12\x38\n\nflyte_urls\x18\x11 \x01(\x0b\x32\x19.flyteidl.admin.FlyteURLsR\tflyteUrlsB\xbe\x01\n\x12\x63om.flyteidl.adminB\x12NodeExecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#flyteidl/admin/node_execution.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\x1a\x1d\x66lyteidl/core/execution.proto\x1a\x1b\x66lyteidl/core/catalog.proto\x1a\x1c\x66lyteidl/core/compiler.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\"Q\n\x17NodeExecutionGetRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x99\x02\n\x18NodeExecutionListRequest\x12^\n\x15workflow_execution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x13workflowExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12(\n\x10unique_parent_id\x18\x06 \x01(\tR\x0euniqueParentId\"\xea\x01\n\x1fNodeExecutionForTaskListRequest\x12R\n\x11task_execution_id\x18\x01 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\x0ftaskExecutionId\x12\x14\n\x05limit\x18\x02 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x04 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x05 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\"\xe7\x01\n\rNodeExecution\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\x12\x1b\n\tinput_uri\x18\x02 \x01(\tR\x08inputUri\x12>\n\x07\x63losure\x18\x03 \x01(\x0b\x32$.flyteidl.admin.NodeExecutionClosureR\x07\x63losure\x12\x41\n\x08metadata\x18\x04 \x01(\x0b\x32%.flyteidl.admin.NodeExecutionMetaDataR\x08metadata\"\xba\x01\n\x15NodeExecutionMetaData\x12\x1f\n\x0bretry_group\x18\x01 \x01(\tR\nretryGroup\x12$\n\x0eis_parent_node\x18\x02 \x01(\x08R\x0cisParentNode\x12 \n\x0cspec_node_id\x18\x03 \x01(\tR\nspecNodeId\x12\x1d\n\nis_dynamic\x18\x04 \x01(\x08R\tisDynamic\x12\x19\n\x08is_array\x18\x05 \x01(\x08R\x07isArray\"q\n\x11NodeExecutionList\x12\x46\n\x0fnode_executions\x18\x01 \x03(\x0b\x32\x1d.flyteidl.admin.NodeExecutionR\x0enodeExecutions\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\xf6\x05\n\x14NodeExecutionClosure\x12#\n\noutput_uri\x18\x01 \x01(\tB\x02\x18\x01H\x00R\toutputUri\x12\x35\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.ExecutionErrorH\x00R\x05\x65rror\x12@\n\x0boutput_data\x18\n \x01(\x0b\x32\x19.flyteidl.core.LiteralMapB\x02\x18\x01H\x00R\noutputData\x12\x38\n\x05phase\x18\x03 \x01(\x0e\x32\".flyteidl.core.NodeExecution.PhaseR\x05phase\x12\x39\n\nstarted_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartedAt\x12\x35\n\x08\x64uration\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\ncreated_at\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\x12\x39\n\nupdated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tupdatedAt\x12\\\n\x16workflow_node_metadata\x18\x08 \x01(\x0b\x32$.flyteidl.admin.WorkflowNodeMetadataH\x01R\x14workflowNodeMetadata\x12P\n\x12task_node_metadata\x18\t \x01(\x0b\x32 .flyteidl.admin.TaskNodeMetadataH\x01R\x10taskNodeMetadata\x12\x19\n\x08\x64\x65\x63k_uri\x18\x0b \x01(\tR\x07\x64\x65\x63kUri\x12/\n\x14\x64ynamic_job_spec_uri\x18\x0c \x01(\tR\x11\x64ynamicJobSpecUriB\x0f\n\routput_resultB\x11\n\x0ftarget_metadata\"d\n\x14WorkflowNodeMetadata\x12L\n\x0b\x65xecutionId\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xc0\x01\n\x10TaskNodeMetadata\x12\x44\n\x0c\x63\x61\x63he_status\x18\x01 \x01(\x0e\x32!.flyteidl.core.CatalogCacheStatusR\x0b\x63\x61\x63heStatus\x12?\n\x0b\x63\x61talog_key\x18\x02 \x01(\x0b\x32\x1e.flyteidl.core.CatalogMetadataR\ncatalogKey\x12%\n\x0e\x63heckpoint_uri\x18\x04 \x01(\tR\rcheckpointUri\"\xce\x01\n\x1b\x44ynamicWorkflowNodeMetadata\x12)\n\x02id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x02id\x12S\n\x11\x63ompiled_workflow\x18\x02 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflow\x12/\n\x14\x64ynamic_job_spec_uri\x18\x03 \x01(\tR\x11\x64ynamicJobSpecUri\"U\n\x1bNodeExecutionGetDataRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"\x96\x03\n\x1cNodeExecutionGetDataResponse\x12\x33\n\x06inputs\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x06inputs\x12\x35\n\x07outputs\x18\x02 \x01(\x0b\x32\x17.flyteidl.admin.UrlBlobB\x02\x18\x01R\x07outputs\x12:\n\x0b\x66ull_inputs\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\nfullInputs\x12<\n\x0c\x66ull_outputs\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x0b\x66ullOutputs\x12V\n\x10\x64ynamic_workflow\x18\x10 \x01(\x0b\x32+.flyteidl.admin.DynamicWorkflowNodeMetadataR\x0f\x64ynamicWorkflow\x12\x38\n\nflyte_urls\x18\x11 \x01(\x0b\x32\x19.flyteidl.admin.FlyteURLsR\tflyteUrls\"W\n\x1dGetDynamicNodeWorkflowRequest\x12\x36\n\x02id\x18\x01 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x02id\"r\n\x1b\x44ynamicNodeWorkflowResponse\x12S\n\x11\x63ompiled_workflow\x18\x01 \x01(\x0b\x32&.flyteidl.core.CompiledWorkflowClosureR\x10\x63ompiledWorkflowB\xbe\x01\n\x12\x63om.flyteidl.adminB\x12NodeExecutionProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -62,4 +62,8 @@ _globals['_NODEEXECUTIONGETDATAREQUEST']._serialized_end=2795 _globals['_NODEEXECUTIONGETDATARESPONSE']._serialized_start=2798 _globals['_NODEEXECUTIONGETDATARESPONSE']._serialized_end=3204 + _globals['_GETDYNAMICNODEWORKFLOWREQUEST']._serialized_start=3206 + _globals['_GETDYNAMICNODEWORKFLOWREQUEST']._serialized_end=3293 + _globals['_DYNAMICNODEWORKFLOWRESPONSE']._serialized_start=3295 + _globals['_DYNAMICNODEWORKFLOWRESPONSE']._serialized_end=3409 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi index 8cbb708f01..9bf601847d 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/node_execution_pb2.pyi @@ -158,3 +158,15 @@ class NodeExecutionGetDataResponse(_message.Message): dynamic_workflow: DynamicWorkflowNodeMetadata flyte_urls: _common_pb2.FlyteURLs def __init__(self, inputs: _Optional[_Union[_common_pb2.UrlBlob, _Mapping]] = ..., outputs: _Optional[_Union[_common_pb2.UrlBlob, _Mapping]] = ..., full_inputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., full_outputs: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., dynamic_workflow: _Optional[_Union[DynamicWorkflowNodeMetadata, _Mapping]] = ..., flyte_urls: _Optional[_Union[_common_pb2.FlyteURLs, _Mapping]] = ...) -> None: ... + +class GetDynamicNodeWorkflowRequest(_message.Message): + __slots__ = ["id"] + ID_FIELD_NUMBER: _ClassVar[int] + id: _identifier_pb2.NodeExecutionIdentifier + def __init__(self, id: _Optional[_Union[_identifier_pb2.NodeExecutionIdentifier, _Mapping]] = ...) -> None: ... + +class DynamicNodeWorkflowResponse(_message.Message): + __slots__ = ["compiled_workflow"] + COMPILED_WORKFLOW_FIELD_NUMBER: _ClassVar[int] + compiled_workflow: _compiler_pb2.CompiledWorkflowClosure + def __init__(self, compiled_workflow: _Optional[_Union[_compiler_pb2.CompiledWorkflowClosure, _Mapping]] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.py index b6f7d05f5e..46808ec536 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.py @@ -14,7 +14,7 @@ from flyteidl.admin import matchable_resource_pb2 as flyteidl_dot_admin_dot_matchable__resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'flyteidl/admin/project_attributes.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/matchable_resource.proto\"\x82\x01\n\x11ProjectAttributes\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12S\n\x13matching_attributes\x18\x02 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\x12matchingAttributes\"c\n\x1eProjectAttributesUpdateRequest\x12\x41\n\nattributes\x18\x01 \x01(\x0b\x32!.flyteidl.admin.ProjectAttributesR\nattributes\"!\n\x1fProjectAttributesUpdateResponse\"\x7f\n\x1bProjectAttributesGetRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x46\n\rresource_type\x18\x02 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\"a\n\x1cProjectAttributesGetResponse\x12\x41\n\nattributes\x18\x01 \x01(\x0b\x32!.flyteidl.admin.ProjectAttributesR\nattributes\"\x82\x01\n\x1eProjectAttributesDeleteRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x46\n\rresource_type\x18\x02 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\"!\n\x1fProjectAttributesDeleteResponseB\xc2\x01\n\x12\x63om.flyteidl.adminB\x16ProjectAttributesProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'flyteidl/admin/project_attributes.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/matchable_resource.proto\"\x94\x01\n\x11ProjectAttributes\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12S\n\x13matching_attributes\x18\x02 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\x12matchingAttributes\x12\x10\n\x03org\x18\x03 \x01(\tR\x03org\"c\n\x1eProjectAttributesUpdateRequest\x12\x41\n\nattributes\x18\x01 \x01(\x0b\x32!.flyteidl.admin.ProjectAttributesR\nattributes\"!\n\x1fProjectAttributesUpdateResponse\"\x91\x01\n\x1bProjectAttributesGetRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x46\n\rresource_type\x18\x02 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\x12\x10\n\x03org\x18\x03 \x01(\tR\x03org\"a\n\x1cProjectAttributesGetResponse\x12\x41\n\nattributes\x18\x01 \x01(\x0b\x32!.flyteidl.admin.ProjectAttributesR\nattributes\"\x94\x01\n\x1eProjectAttributesDeleteRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x46\n\rresource_type\x18\x02 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\x12\x10\n\x03org\x18\x03 \x01(\tR\x03org\"!\n\x1fProjectAttributesDeleteResponseB\xc2\x01\n\x12\x63om.flyteidl.adminB\x16ProjectAttributesProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,17 +24,17 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\022com.flyteidl.adminB\026ProjectAttributesProtoP\001Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\242\002\003FAX\252\002\016Flyteidl.Admin\312\002\016Flyteidl\\Admin\342\002\032Flyteidl\\Admin\\GPBMetadata\352\002\017Flyteidl::Admin' _globals['_PROJECTATTRIBUTES']._serialized_start=101 - _globals['_PROJECTATTRIBUTES']._serialized_end=231 - _globals['_PROJECTATTRIBUTESUPDATEREQUEST']._serialized_start=233 - _globals['_PROJECTATTRIBUTESUPDATEREQUEST']._serialized_end=332 - _globals['_PROJECTATTRIBUTESUPDATERESPONSE']._serialized_start=334 - _globals['_PROJECTATTRIBUTESUPDATERESPONSE']._serialized_end=367 - _globals['_PROJECTATTRIBUTESGETREQUEST']._serialized_start=369 - _globals['_PROJECTATTRIBUTESGETREQUEST']._serialized_end=496 - _globals['_PROJECTATTRIBUTESGETRESPONSE']._serialized_start=498 - _globals['_PROJECTATTRIBUTESGETRESPONSE']._serialized_end=595 - _globals['_PROJECTATTRIBUTESDELETEREQUEST']._serialized_start=598 - _globals['_PROJECTATTRIBUTESDELETEREQUEST']._serialized_end=728 - _globals['_PROJECTATTRIBUTESDELETERESPONSE']._serialized_start=730 - _globals['_PROJECTATTRIBUTESDELETERESPONSE']._serialized_end=763 + _globals['_PROJECTATTRIBUTES']._serialized_end=249 + _globals['_PROJECTATTRIBUTESUPDATEREQUEST']._serialized_start=251 + _globals['_PROJECTATTRIBUTESUPDATEREQUEST']._serialized_end=350 + _globals['_PROJECTATTRIBUTESUPDATERESPONSE']._serialized_start=352 + _globals['_PROJECTATTRIBUTESUPDATERESPONSE']._serialized_end=385 + _globals['_PROJECTATTRIBUTESGETREQUEST']._serialized_start=388 + _globals['_PROJECTATTRIBUTESGETREQUEST']._serialized_end=533 + _globals['_PROJECTATTRIBUTESGETRESPONSE']._serialized_start=535 + _globals['_PROJECTATTRIBUTESGETRESPONSE']._serialized_end=632 + _globals['_PROJECTATTRIBUTESDELETEREQUEST']._serialized_start=635 + _globals['_PROJECTATTRIBUTESDELETEREQUEST']._serialized_end=783 + _globals['_PROJECTATTRIBUTESDELETERESPONSE']._serialized_start=785 + _globals['_PROJECTATTRIBUTESDELETERESPONSE']._serialized_end=818 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.pyi index 8ad0997ac2..6581a30a15 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/project_attributes_pb2.pyi @@ -6,12 +6,14 @@ from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Opti DESCRIPTOR: _descriptor.FileDescriptor class ProjectAttributes(_message.Message): - __slots__ = ["project", "matching_attributes"] + __slots__ = ["project", "matching_attributes", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] MATCHING_ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str matching_attributes: _matchable_resource_pb2.MatchingAttributes - def __init__(self, project: _Optional[str] = ..., matching_attributes: _Optional[_Union[_matchable_resource_pb2.MatchingAttributes, _Mapping]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., matching_attributes: _Optional[_Union[_matchable_resource_pb2.MatchingAttributes, _Mapping]] = ..., org: _Optional[str] = ...) -> None: ... class ProjectAttributesUpdateRequest(_message.Message): __slots__ = ["attributes"] @@ -24,12 +26,14 @@ class ProjectAttributesUpdateResponse(_message.Message): def __init__(self) -> None: ... class ProjectAttributesGetRequest(_message.Message): - __slots__ = ["project", "resource_type"] + __slots__ = ["project", "resource_type", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str resource_type: _matchable_resource_pb2.MatchableResource - def __init__(self, project: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ..., org: _Optional[str] = ...) -> None: ... class ProjectAttributesGetResponse(_message.Message): __slots__ = ["attributes"] @@ -38,12 +42,14 @@ class ProjectAttributesGetResponse(_message.Message): def __init__(self, attributes: _Optional[_Union[ProjectAttributes, _Mapping]] = ...) -> None: ... class ProjectAttributesDeleteRequest(_message.Message): - __slots__ = ["project", "resource_type"] + __slots__ = ["project", "resource_type", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str resource_type: _matchable_resource_pb2.MatchableResource - def __init__(self, project: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ..., org: _Optional[str] = ...) -> None: ... class ProjectAttributesDeleteResponse(_message.Message): __slots__ = [] diff --git a/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.py index ff5c077221..3d80159af4 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.py @@ -14,7 +14,7 @@ from flyteidl.admin import matchable_resource_pb2 as flyteidl_dot_admin_dot_matchable__resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.flyteidl/admin/project_domain_attributes.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/matchable_resource.proto\"\xa0\x01\n\x17ProjectDomainAttributes\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12S\n\x13matching_attributes\x18\x03 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\x12matchingAttributes\"o\n$ProjectDomainAttributesUpdateRequest\x12G\n\nattributes\x18\x01 \x01(\x0b\x32\'.flyteidl.admin.ProjectDomainAttributesR\nattributes\"\'\n%ProjectDomainAttributesUpdateResponse\"\x9d\x01\n!ProjectDomainAttributesGetRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x46\n\rresource_type\x18\x03 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\"m\n\"ProjectDomainAttributesGetResponse\x12G\n\nattributes\x18\x01 \x01(\x0b\x32\'.flyteidl.admin.ProjectDomainAttributesR\nattributes\"\xa0\x01\n$ProjectDomainAttributesDeleteRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x46\n\rresource_type\x18\x03 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\"\'\n%ProjectDomainAttributesDeleteResponseB\xc8\x01\n\x12\x63om.flyteidl.adminB\x1cProjectDomainAttributesProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.flyteidl/admin/project_domain_attributes.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/matchable_resource.proto\"\xb2\x01\n\x17ProjectDomainAttributes\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12S\n\x13matching_attributes\x18\x03 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\x12matchingAttributes\x12\x10\n\x03org\x18\x04 \x01(\tR\x03org\"o\n$ProjectDomainAttributesUpdateRequest\x12G\n\nattributes\x18\x01 \x01(\x0b\x32\'.flyteidl.admin.ProjectDomainAttributesR\nattributes\"\'\n%ProjectDomainAttributesUpdateResponse\"\xaf\x01\n!ProjectDomainAttributesGetRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x46\n\rresource_type\x18\x03 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\x12\x10\n\x03org\x18\x04 \x01(\tR\x03org\"m\n\"ProjectDomainAttributesGetResponse\x12G\n\nattributes\x18\x01 \x01(\x0b\x32\'.flyteidl.admin.ProjectDomainAttributesR\nattributes\"\xb2\x01\n$ProjectDomainAttributesDeleteRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x46\n\rresource_type\x18\x03 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\x12\x10\n\x03org\x18\x04 \x01(\tR\x03org\"\'\n%ProjectDomainAttributesDeleteResponseB\xc8\x01\n\x12\x63om.flyteidl.adminB\x1cProjectDomainAttributesProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,17 +24,17 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\022com.flyteidl.adminB\034ProjectDomainAttributesProtoP\001Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\242\002\003FAX\252\002\016Flyteidl.Admin\312\002\016Flyteidl\\Admin\342\002\032Flyteidl\\Admin\\GPBMetadata\352\002\017Flyteidl::Admin' _globals['_PROJECTDOMAINATTRIBUTES']._serialized_start=108 - _globals['_PROJECTDOMAINATTRIBUTES']._serialized_end=268 - _globals['_PROJECTDOMAINATTRIBUTESUPDATEREQUEST']._serialized_start=270 - _globals['_PROJECTDOMAINATTRIBUTESUPDATEREQUEST']._serialized_end=381 - _globals['_PROJECTDOMAINATTRIBUTESUPDATERESPONSE']._serialized_start=383 - _globals['_PROJECTDOMAINATTRIBUTESUPDATERESPONSE']._serialized_end=422 - _globals['_PROJECTDOMAINATTRIBUTESGETREQUEST']._serialized_start=425 - _globals['_PROJECTDOMAINATTRIBUTESGETREQUEST']._serialized_end=582 - _globals['_PROJECTDOMAINATTRIBUTESGETRESPONSE']._serialized_start=584 - _globals['_PROJECTDOMAINATTRIBUTESGETRESPONSE']._serialized_end=693 - _globals['_PROJECTDOMAINATTRIBUTESDELETEREQUEST']._serialized_start=696 - _globals['_PROJECTDOMAINATTRIBUTESDELETEREQUEST']._serialized_end=856 - _globals['_PROJECTDOMAINATTRIBUTESDELETERESPONSE']._serialized_start=858 - _globals['_PROJECTDOMAINATTRIBUTESDELETERESPONSE']._serialized_end=897 + _globals['_PROJECTDOMAINATTRIBUTES']._serialized_end=286 + _globals['_PROJECTDOMAINATTRIBUTESUPDATEREQUEST']._serialized_start=288 + _globals['_PROJECTDOMAINATTRIBUTESUPDATEREQUEST']._serialized_end=399 + _globals['_PROJECTDOMAINATTRIBUTESUPDATERESPONSE']._serialized_start=401 + _globals['_PROJECTDOMAINATTRIBUTESUPDATERESPONSE']._serialized_end=440 + _globals['_PROJECTDOMAINATTRIBUTESGETREQUEST']._serialized_start=443 + _globals['_PROJECTDOMAINATTRIBUTESGETREQUEST']._serialized_end=618 + _globals['_PROJECTDOMAINATTRIBUTESGETRESPONSE']._serialized_start=620 + _globals['_PROJECTDOMAINATTRIBUTESGETRESPONSE']._serialized_end=729 + _globals['_PROJECTDOMAINATTRIBUTESDELETEREQUEST']._serialized_start=732 + _globals['_PROJECTDOMAINATTRIBUTESDELETEREQUEST']._serialized_end=910 + _globals['_PROJECTDOMAINATTRIBUTESDELETERESPONSE']._serialized_start=912 + _globals['_PROJECTDOMAINATTRIBUTESDELETERESPONSE']._serialized_end=951 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.pyi index af8624ca21..40a7a38bc7 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/project_domain_attributes_pb2.pyi @@ -6,14 +6,16 @@ from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Opti DESCRIPTOR: _descriptor.FileDescriptor class ProjectDomainAttributes(_message.Message): - __slots__ = ["project", "domain", "matching_attributes"] + __slots__ = ["project", "domain", "matching_attributes", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] MATCHING_ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str matching_attributes: _matchable_resource_pb2.MatchingAttributes - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., matching_attributes: _Optional[_Union[_matchable_resource_pb2.MatchingAttributes, _Mapping]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., matching_attributes: _Optional[_Union[_matchable_resource_pb2.MatchingAttributes, _Mapping]] = ..., org: _Optional[str] = ...) -> None: ... class ProjectDomainAttributesUpdateRequest(_message.Message): __slots__ = ["attributes"] @@ -26,14 +28,16 @@ class ProjectDomainAttributesUpdateResponse(_message.Message): def __init__(self) -> None: ... class ProjectDomainAttributesGetRequest(_message.Message): - __slots__ = ["project", "domain", "resource_type"] + __slots__ = ["project", "domain", "resource_type", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str resource_type: _matchable_resource_pb2.MatchableResource - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ..., org: _Optional[str] = ...) -> None: ... class ProjectDomainAttributesGetResponse(_message.Message): __slots__ = ["attributes"] @@ -42,14 +46,16 @@ class ProjectDomainAttributesGetResponse(_message.Message): def __init__(self, attributes: _Optional[_Union[ProjectDomainAttributes, _Mapping]] = ...) -> None: ... class ProjectDomainAttributesDeleteRequest(_message.Message): - __slots__ = ["project", "domain", "resource_type"] + __slots__ = ["project", "domain", "resource_type", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str resource_type: _matchable_resource_pb2.MatchableResource - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ..., org: _Optional[str] = ...) -> None: ... class ProjectDomainAttributesDeleteResponse(_message.Message): __slots__ = [] diff --git a/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.py index ae321b092c..d0b442e3a5 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.py @@ -14,7 +14,7 @@ from flyteidl.admin import common_pb2 as flyteidl_dot_admin_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/admin/project.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\",\n\x06\x44omain\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"\xad\x02\n\x07Project\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x30\n\x07\x64omains\x18\x03 \x03(\x0b\x32\x16.flyteidl.admin.DomainR\x07\x64omains\x12 \n\x0b\x64\x65scription\x18\x04 \x01(\tR\x0b\x64\x65scription\x12.\n\x06labels\x18\x05 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12:\n\x05state\x18\x06 \x01(\x0e\x32$.flyteidl.admin.Project.ProjectStateR\x05state\">\n\x0cProjectState\x12\n\n\x06\x41\x43TIVE\x10\x00\x12\x0c\n\x08\x41RCHIVED\x10\x01\x12\x14\n\x10SYSTEM_GENERATED\x10\x02\"U\n\x08Projects\x12\x33\n\x08projects\x18\x01 \x03(\x0b\x32\x17.flyteidl.admin.ProjectR\x08projects\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\x89\x01\n\x12ProjectListRequest\x12\x14\n\x05limit\x18\x01 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x03 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x04 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\"K\n\x16ProjectRegisterRequest\x12\x31\n\x07project\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.ProjectR\x07project\"\x19\n\x17ProjectRegisterResponse\"\x17\n\x15ProjectUpdateResponseB\xb8\x01\n\x12\x63om.flyteidl.adminB\x0cProjectProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/admin/project.proto\x12\x0e\x66lyteidl.admin\x1a\x1b\x66lyteidl/admin/common.proto\",\n\x06\x44omain\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"\xbf\x02\n\x07Project\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x30\n\x07\x64omains\x18\x03 \x03(\x0b\x32\x16.flyteidl.admin.DomainR\x07\x64omains\x12 \n\x0b\x64\x65scription\x18\x04 \x01(\tR\x0b\x64\x65scription\x12.\n\x06labels\x18\x05 \x01(\x0b\x32\x16.flyteidl.admin.LabelsR\x06labels\x12:\n\x05state\x18\x06 \x01(\x0e\x32$.flyteidl.admin.Project.ProjectStateR\x05state\x12\x10\n\x03org\x18\x07 \x01(\tR\x03org\">\n\x0cProjectState\x12\n\n\x06\x41\x43TIVE\x10\x00\x12\x0c\n\x08\x41RCHIVED\x10\x01\x12\x14\n\x10SYSTEM_GENERATED\x10\x02\"U\n\x08Projects\x12\x33\n\x08projects\x18\x01 \x03(\x0b\x32\x17.flyteidl.admin.ProjectR\x08projects\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\x9b\x01\n\x12ProjectListRequest\x12\x14\n\x05limit\x18\x01 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\x12\x18\n\x07\x66ilters\x18\x03 \x01(\tR\x07\x66ilters\x12-\n\x07sort_by\x18\x04 \x01(\x0b\x32\x14.flyteidl.admin.SortR\x06sortBy\x12\x10\n\x03org\x18\x05 \x01(\tR\x03org\"K\n\x16ProjectRegisterRequest\x12\x31\n\x07project\x18\x01 \x01(\x0b\x32\x17.flyteidl.admin.ProjectR\x07project\"\x19\n\x17ProjectRegisterResponse\"\x17\n\x15ProjectUpdateResponseB\xb8\x01\n\x12\x63om.flyteidl.adminB\x0cProjectProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -26,17 +26,17 @@ _globals['_DOMAIN']._serialized_start=77 _globals['_DOMAIN']._serialized_end=121 _globals['_PROJECT']._serialized_start=124 - _globals['_PROJECT']._serialized_end=425 - _globals['_PROJECT_PROJECTSTATE']._serialized_start=363 - _globals['_PROJECT_PROJECTSTATE']._serialized_end=425 - _globals['_PROJECTS']._serialized_start=427 - _globals['_PROJECTS']._serialized_end=512 - _globals['_PROJECTLISTREQUEST']._serialized_start=515 - _globals['_PROJECTLISTREQUEST']._serialized_end=652 - _globals['_PROJECTREGISTERREQUEST']._serialized_start=654 - _globals['_PROJECTREGISTERREQUEST']._serialized_end=729 - _globals['_PROJECTREGISTERRESPONSE']._serialized_start=731 - _globals['_PROJECTREGISTERRESPONSE']._serialized_end=756 - _globals['_PROJECTUPDATERESPONSE']._serialized_start=758 - _globals['_PROJECTUPDATERESPONSE']._serialized_end=781 + _globals['_PROJECT']._serialized_end=443 + _globals['_PROJECT_PROJECTSTATE']._serialized_start=381 + _globals['_PROJECT_PROJECTSTATE']._serialized_end=443 + _globals['_PROJECTS']._serialized_start=445 + _globals['_PROJECTS']._serialized_end=530 + _globals['_PROJECTLISTREQUEST']._serialized_start=533 + _globals['_PROJECTLISTREQUEST']._serialized_end=688 + _globals['_PROJECTREGISTERREQUEST']._serialized_start=690 + _globals['_PROJECTREGISTERREQUEST']._serialized_end=765 + _globals['_PROJECTREGISTERRESPONSE']._serialized_start=767 + _globals['_PROJECTREGISTERRESPONSE']._serialized_end=792 + _globals['_PROJECTUPDATERESPONSE']._serialized_start=794 + _globals['_PROJECTUPDATERESPONSE']._serialized_end=817 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.pyi index 33b5106c81..1379fa3c0c 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/project_pb2.pyi @@ -16,7 +16,7 @@ class Domain(_message.Message): def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... class Project(_message.Message): - __slots__ = ["id", "name", "domains", "description", "labels", "state"] + __slots__ = ["id", "name", "domains", "description", "labels", "state", "org"] class ProjectState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = [] ACTIVE: _ClassVar[Project.ProjectState] @@ -31,13 +31,15 @@ class Project(_message.Message): DESCRIPTION_FIELD_NUMBER: _ClassVar[int] LABELS_FIELD_NUMBER: _ClassVar[int] STATE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] id: str name: str domains: _containers.RepeatedCompositeFieldContainer[Domain] description: str labels: _common_pb2.Labels state: Project.ProjectState - def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., domains: _Optional[_Iterable[_Union[Domain, _Mapping]]] = ..., description: _Optional[str] = ..., labels: _Optional[_Union[_common_pb2.Labels, _Mapping]] = ..., state: _Optional[_Union[Project.ProjectState, str]] = ...) -> None: ... + org: str + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., domains: _Optional[_Iterable[_Union[Domain, _Mapping]]] = ..., description: _Optional[str] = ..., labels: _Optional[_Union[_common_pb2.Labels, _Mapping]] = ..., state: _Optional[_Union[Project.ProjectState, str]] = ..., org: _Optional[str] = ...) -> None: ... class Projects(_message.Message): __slots__ = ["projects", "token"] @@ -48,16 +50,18 @@ class Projects(_message.Message): def __init__(self, projects: _Optional[_Iterable[_Union[Project, _Mapping]]] = ..., token: _Optional[str] = ...) -> None: ... class ProjectListRequest(_message.Message): - __slots__ = ["limit", "token", "filters", "sort_by"] + __slots__ = ["limit", "token", "filters", "sort_by", "org"] LIMIT_FIELD_NUMBER: _ClassVar[int] TOKEN_FIELD_NUMBER: _ClassVar[int] FILTERS_FIELD_NUMBER: _ClassVar[int] SORT_BY_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] limit: int token: str filters: str sort_by: _common_pb2.Sort - def __init__(self, limit: _Optional[int] = ..., token: _Optional[str] = ..., filters: _Optional[str] = ..., sort_by: _Optional[_Union[_common_pb2.Sort, _Mapping]] = ...) -> None: ... + org: str + def __init__(self, limit: _Optional[int] = ..., token: _Optional[str] = ..., filters: _Optional[str] = ..., sort_by: _Optional[_Union[_common_pb2.Sort, _Mapping]] = ..., org: _Optional[str] = ...) -> None: ... class ProjectRegisterRequest(_message.Message): __slots__ = ["project"] diff --git a/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.py b/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.py index a277fac588..1a8ee5c48e 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.py @@ -14,7 +14,7 @@ from flyteidl.admin import matchable_resource_pb2 as flyteidl_dot_admin_dot_matchable__resource__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(flyteidl/admin/workflow_attributes.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/matchable_resource.proto\"\xb7\x01\n\x12WorkflowAttributes\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x1a\n\x08workflow\x18\x03 \x01(\tR\x08workflow\x12S\n\x13matching_attributes\x18\x04 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\x12matchingAttributes\"e\n\x1fWorkflowAttributesUpdateRequest\x12\x42\n\nattributes\x18\x01 \x01(\x0b\x32\".flyteidl.admin.WorkflowAttributesR\nattributes\"\"\n WorkflowAttributesUpdateResponse\"\xb4\x01\n\x1cWorkflowAttributesGetRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x1a\n\x08workflow\x18\x03 \x01(\tR\x08workflow\x12\x46\n\rresource_type\x18\x04 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\"c\n\x1dWorkflowAttributesGetResponse\x12\x42\n\nattributes\x18\x01 \x01(\x0b\x32\".flyteidl.admin.WorkflowAttributesR\nattributes\"\xb7\x01\n\x1fWorkflowAttributesDeleteRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x1a\n\x08workflow\x18\x03 \x01(\tR\x08workflow\x12\x46\n\rresource_type\x18\x04 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\"\"\n WorkflowAttributesDeleteResponseB\xc3\x01\n\x12\x63om.flyteidl.adminB\x17WorkflowAttributesProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(flyteidl/admin/workflow_attributes.proto\x12\x0e\x66lyteidl.admin\x1a\'flyteidl/admin/matchable_resource.proto\"\xc9\x01\n\x12WorkflowAttributes\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x1a\n\x08workflow\x18\x03 \x01(\tR\x08workflow\x12S\n\x13matching_attributes\x18\x04 \x01(\x0b\x32\".flyteidl.admin.MatchingAttributesR\x12matchingAttributes\x12\x10\n\x03org\x18\x05 \x01(\tR\x03org\"e\n\x1fWorkflowAttributesUpdateRequest\x12\x42\n\nattributes\x18\x01 \x01(\x0b\x32\".flyteidl.admin.WorkflowAttributesR\nattributes\"\"\n WorkflowAttributesUpdateResponse\"\xc6\x01\n\x1cWorkflowAttributesGetRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x1a\n\x08workflow\x18\x03 \x01(\tR\x08workflow\x12\x46\n\rresource_type\x18\x04 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\x12\x10\n\x03org\x18\x05 \x01(\tR\x03org\"c\n\x1dWorkflowAttributesGetResponse\x12\x42\n\nattributes\x18\x01 \x01(\x0b\x32\".flyteidl.admin.WorkflowAttributesR\nattributes\"\xc9\x01\n\x1fWorkflowAttributesDeleteRequest\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x1a\n\x08workflow\x18\x03 \x01(\tR\x08workflow\x12\x46\n\rresource_type\x18\x04 \x01(\x0e\x32!.flyteidl.admin.MatchableResourceR\x0cresourceType\x12\x10\n\x03org\x18\x05 \x01(\tR\x03org\"\"\n WorkflowAttributesDeleteResponseB\xc3\x01\n\x12\x63om.flyteidl.adminB\x17WorkflowAttributesProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\xa2\x02\x03\x46\x41X\xaa\x02\x0e\x46lyteidl.Admin\xca\x02\x0e\x46lyteidl\\Admin\xe2\x02\x1a\x46lyteidl\\Admin\\GPBMetadata\xea\x02\x0f\x46lyteidl::Adminb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,17 +24,17 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\022com.flyteidl.adminB\027WorkflowAttributesProtoP\001Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin\242\002\003FAX\252\002\016Flyteidl.Admin\312\002\016Flyteidl\\Admin\342\002\032Flyteidl\\Admin\\GPBMetadata\352\002\017Flyteidl::Admin' _globals['_WORKFLOWATTRIBUTES']._serialized_start=102 - _globals['_WORKFLOWATTRIBUTES']._serialized_end=285 - _globals['_WORKFLOWATTRIBUTESUPDATEREQUEST']._serialized_start=287 - _globals['_WORKFLOWATTRIBUTESUPDATEREQUEST']._serialized_end=388 - _globals['_WORKFLOWATTRIBUTESUPDATERESPONSE']._serialized_start=390 - _globals['_WORKFLOWATTRIBUTESUPDATERESPONSE']._serialized_end=424 - _globals['_WORKFLOWATTRIBUTESGETREQUEST']._serialized_start=427 - _globals['_WORKFLOWATTRIBUTESGETREQUEST']._serialized_end=607 - _globals['_WORKFLOWATTRIBUTESGETRESPONSE']._serialized_start=609 - _globals['_WORKFLOWATTRIBUTESGETRESPONSE']._serialized_end=708 - _globals['_WORKFLOWATTRIBUTESDELETEREQUEST']._serialized_start=711 - _globals['_WORKFLOWATTRIBUTESDELETEREQUEST']._serialized_end=894 - _globals['_WORKFLOWATTRIBUTESDELETERESPONSE']._serialized_start=896 - _globals['_WORKFLOWATTRIBUTESDELETERESPONSE']._serialized_end=930 + _globals['_WORKFLOWATTRIBUTES']._serialized_end=303 + _globals['_WORKFLOWATTRIBUTESUPDATEREQUEST']._serialized_start=305 + _globals['_WORKFLOWATTRIBUTESUPDATEREQUEST']._serialized_end=406 + _globals['_WORKFLOWATTRIBUTESUPDATERESPONSE']._serialized_start=408 + _globals['_WORKFLOWATTRIBUTESUPDATERESPONSE']._serialized_end=442 + _globals['_WORKFLOWATTRIBUTESGETREQUEST']._serialized_start=445 + _globals['_WORKFLOWATTRIBUTESGETREQUEST']._serialized_end=643 + _globals['_WORKFLOWATTRIBUTESGETRESPONSE']._serialized_start=645 + _globals['_WORKFLOWATTRIBUTESGETRESPONSE']._serialized_end=744 + _globals['_WORKFLOWATTRIBUTESDELETEREQUEST']._serialized_start=747 + _globals['_WORKFLOWATTRIBUTESDELETEREQUEST']._serialized_end=948 + _globals['_WORKFLOWATTRIBUTESDELETERESPONSE']._serialized_start=950 + _globals['_WORKFLOWATTRIBUTESDELETERESPONSE']._serialized_end=984 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.pyi index 67b040a36d..57c7d80c80 100644 --- a/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/admin/workflow_attributes_pb2.pyi @@ -6,16 +6,18 @@ from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Opti DESCRIPTOR: _descriptor.FileDescriptor class WorkflowAttributes(_message.Message): - __slots__ = ["project", "domain", "workflow", "matching_attributes"] + __slots__ = ["project", "domain", "workflow", "matching_attributes", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] WORKFLOW_FIELD_NUMBER: _ClassVar[int] MATCHING_ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str workflow: str matching_attributes: _matchable_resource_pb2.MatchingAttributes - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., workflow: _Optional[str] = ..., matching_attributes: _Optional[_Union[_matchable_resource_pb2.MatchingAttributes, _Mapping]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., workflow: _Optional[str] = ..., matching_attributes: _Optional[_Union[_matchable_resource_pb2.MatchingAttributes, _Mapping]] = ..., org: _Optional[str] = ...) -> None: ... class WorkflowAttributesUpdateRequest(_message.Message): __slots__ = ["attributes"] @@ -28,16 +30,18 @@ class WorkflowAttributesUpdateResponse(_message.Message): def __init__(self) -> None: ... class WorkflowAttributesGetRequest(_message.Message): - __slots__ = ["project", "domain", "workflow", "resource_type"] + __slots__ = ["project", "domain", "workflow", "resource_type", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] WORKFLOW_FIELD_NUMBER: _ClassVar[int] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str workflow: str resource_type: _matchable_resource_pb2.MatchableResource - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., workflow: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., workflow: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ..., org: _Optional[str] = ...) -> None: ... class WorkflowAttributesGetResponse(_message.Message): __slots__ = ["attributes"] @@ -46,16 +50,18 @@ class WorkflowAttributesGetResponse(_message.Message): def __init__(self, attributes: _Optional[_Union[WorkflowAttributes, _Mapping]] = ...) -> None: ... class WorkflowAttributesDeleteRequest(_message.Message): - __slots__ = ["project", "domain", "workflow", "resource_type"] + __slots__ = ["project", "domain", "workflow", "resource_type", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] WORKFLOW_FIELD_NUMBER: _ClassVar[int] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str workflow: str resource_type: _matchable_resource_pb2.MatchableResource - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., workflow: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., workflow: _Optional[str] = ..., resource_type: _Optional[_Union[_matchable_resource_pb2.MatchableResource, str]] = ..., org: _Optional[str] = ...) -> None: ... class WorkflowAttributesDeleteResponse(_message.Message): __slots__ = [] diff --git a/flyteidl/gen/pb_python/flyteidl/artifact/__init__.py b/flyteidl/gen/pb_python/flyteidl/artifact/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2.py b/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2.py deleted file mode 100644 index 84e5a5344a..0000000000 --- a/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: flyteidl/artifact/artifacts.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from flyteidl.admin import launch_plan_pb2 as flyteidl_dot_admin_dot_launch__plan__pb2 -from flyteidl.core import literals_pb2 as flyteidl_dot_core_dot_literals__pb2 -from flyteidl.core import types_pb2 as flyteidl_dot_core_dot_types__pb2 -from flyteidl.core import identifier_pb2 as flyteidl_dot_core_dot_identifier__pb2 -from flyteidl.core import artifact_id_pb2 as flyteidl_dot_core_dot_artifact__id__pb2 -from flyteidl.core import interface_pb2 as flyteidl_dot_core_dot_interface__pb2 -from flyteidl.event import cloudevents_pb2 as flyteidl_dot_event_dot_cloudevents__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!flyteidl/artifact/artifacts.proto\x12\x11\x66lyteidl.artifact\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/api/annotations.proto\x1a flyteidl/admin/launch_plan.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/types.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a flyteidl/event/cloudevents.proto\"\xca\x01\n\x08\x41rtifact\x12:\n\x0b\x61rtifact_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.ArtifactIDR\nartifactId\x12\x33\n\x04spec\x18\x02 \x01(\x0b\x32\x1f.flyteidl.artifact.ArtifactSpecR\x04spec\x12\x12\n\x04tags\x18\x03 \x03(\tR\x04tags\x12\x39\n\x06source\x18\x04 \x01(\x0b\x32!.flyteidl.artifact.ArtifactSourceR\x06source\"\x8b\x03\n\x15\x43reateArtifactRequest\x12=\n\x0c\x61rtifact_key\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactKeyR\x0b\x61rtifactKey\x12\x18\n\x07version\x18\x03 \x01(\tR\x07version\x12\x33\n\x04spec\x18\x02 \x01(\x0b\x32\x1f.flyteidl.artifact.ArtifactSpecR\x04spec\x12X\n\npartitions\x18\x04 \x03(\x0b\x32\x38.flyteidl.artifact.CreateArtifactRequest.PartitionsEntryR\npartitions\x12\x10\n\x03tag\x18\x05 \x01(\tR\x03tag\x12\x39\n\x06source\x18\x06 \x01(\x0b\x32!.flyteidl.artifact.ArtifactSourceR\x06source\x1a=\n\x0fPartitionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xfb\x01\n\x0e\x41rtifactSource\x12Y\n\x12workflow_execution\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x11workflowExecution\x12\x17\n\x07node_id\x18\x02 \x01(\tR\x06nodeId\x12\x32\n\x07task_id\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x06taskId\x12#\n\rretry_attempt\x18\x04 \x01(\rR\x0cretryAttempt\x12\x1c\n\tprincipal\x18\x05 \x01(\tR\tprincipal\"\xf9\x01\n\x0c\x41rtifactSpec\x12,\n\x05value\x18\x01 \x01(\x0b\x32\x16.flyteidl.core.LiteralR\x05value\x12.\n\x04type\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.LiteralTypeR\x04type\x12+\n\x11short_description\x18\x03 \x01(\tR\x10shortDescription\x12\x39\n\ruser_metadata\x18\x04 \x01(\x0b\x32\x14.google.protobuf.AnyR\x0cuserMetadata\x12#\n\rmetadata_type\x18\x05 \x01(\tR\x0cmetadataType\"Q\n\x16\x43reateArtifactResponse\x12\x37\n\x08\x61rtifact\x18\x01 \x01(\x0b\x32\x1b.flyteidl.artifact.ArtifactR\x08\x61rtifact\"b\n\x12GetArtifactRequest\x12\x32\n\x05query\x18\x01 \x01(\x0b\x32\x1c.flyteidl.core.ArtifactQueryR\x05query\x12\x18\n\x07\x64\x65tails\x18\x02 \x01(\x08R\x07\x64\x65tails\"N\n\x13GetArtifactResponse\x12\x37\n\x08\x61rtifact\x18\x01 \x01(\x0b\x32\x1b.flyteidl.artifact.ArtifactR\x08\x61rtifact\"`\n\rSearchOptions\x12+\n\x11strict_partitions\x18\x01 \x01(\x08R\x10strictPartitions\x12\"\n\rlatest_by_key\x18\x02 \x01(\x08R\x0blatestByKey\"\xb2\x02\n\x16SearchArtifactsRequest\x12=\n\x0c\x61rtifact_key\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactKeyR\x0b\x61rtifactKey\x12\x39\n\npartitions\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.PartitionsR\npartitions\x12\x1c\n\tprincipal\x18\x03 \x01(\tR\tprincipal\x12\x18\n\x07version\x18\x04 \x01(\tR\x07version\x12:\n\x07options\x18\x05 \x01(\x0b\x32 .flyteidl.artifact.SearchOptionsR\x07options\x12\x14\n\x05token\x18\x06 \x01(\tR\x05token\x12\x14\n\x05limit\x18\x07 \x01(\x05R\x05limit\"j\n\x17SearchArtifactsResponse\x12\x39\n\tartifacts\x18\x01 \x03(\x0b\x32\x1b.flyteidl.artifact.ArtifactR\tartifacts\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\"\xdc\x01\n\x19\x46indByWorkflowExecRequest\x12\x43\n\x07\x65xec_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x06\x65xecId\x12T\n\tdirection\x18\x02 \x01(\x0e\x32\x36.flyteidl.artifact.FindByWorkflowExecRequest.DirectionR\tdirection\"$\n\tDirection\x12\n\n\x06INPUTS\x10\x00\x12\x0b\n\x07OUTPUTS\x10\x01\"\x7f\n\rAddTagRequest\x12:\n\x0b\x61rtifact_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.ArtifactIDR\nartifactId\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\x12\x1c\n\toverwrite\x18\x03 \x01(\x08R\toverwrite\"\x10\n\x0e\x41\x64\x64TagResponse\"b\n\x14\x43reateTriggerRequest\x12J\n\x13trigger_launch_plan\x18\x01 \x01(\x0b\x32\x1a.flyteidl.admin.LaunchPlanR\x11triggerLaunchPlan\"\x17\n\x15\x43reateTriggerResponse\"P\n\x14\x44\x65leteTriggerRequest\x12\x38\n\ntrigger_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\ttriggerId\"\x17\n\x15\x44\x65leteTriggerResponse\"\x80\x01\n\x10\x41rtifactProducer\x12\x36\n\tentity_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x08\x65ntityId\x12\x34\n\x07outputs\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.VariableMapR\x07outputs\"\\\n\x17RegisterProducerRequest\x12\x41\n\tproducers\x18\x01 \x03(\x0b\x32#.flyteidl.artifact.ArtifactProducerR\tproducers\"\x7f\n\x10\x41rtifactConsumer\x12\x36\n\tentity_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x08\x65ntityId\x12\x33\n\x06inputs\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.ParameterMapR\x06inputs\"\\\n\x17RegisterConsumerRequest\x12\x41\n\tconsumers\x18\x01 \x03(\x0b\x32#.flyteidl.artifact.ArtifactConsumerR\tconsumers\"\x12\n\x10RegisterResponse\"\x9a\x01\n\x16\x45xecutionInputsRequest\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\x12\x31\n\x06inputs\x18\x02 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x06inputs\"\x19\n\x17\x45xecutionInputsResponse2\xd7\x0e\n\x10\x41rtifactRegistry\x12g\n\x0e\x43reateArtifact\x12(.flyteidl.artifact.CreateArtifactRequest\x1a).flyteidl.artifact.CreateArtifactResponse\"\x00\x12\x85\x05\n\x0bGetArtifact\x12%.flyteidl.artifact.GetArtifactRequest\x1a&.flyteidl.artifact.GetArtifactResponse\"\xa6\x04\x82\xd3\xe4\x93\x02\x9f\x04Z\xb8\x01\x12\xb5\x01/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}/{query.artifact_id.version}Z\x9c\x01\x12\x99\x01/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}Z\xa0\x01\x12\x9d\x01/artifacts/api/v1/data/artifact/tag/{query.artifact_tag.artifact_key.project}/{query.artifact_tag.artifact_key.domain}/{query.artifact_tag.artifact_key.name}\x12 /artifacts/api/v1/data/artifacts\x12\xa0\x02\n\x0fSearchArtifacts\x12).flyteidl.artifact.SearchArtifactsRequest\x1a*.flyteidl.artifact.SearchArtifactsResponse\"\xb5\x01\x82\xd3\xe4\x93\x02\xae\x01ZK\x12I/artifacts/api/v1/data/query/{artifact_key.project}/{artifact_key.domain}\x12_/artifacts/api/v1/data/query/s/{artifact_key.project}/{artifact_key.domain}/{artifact_key.name}\x12\x64\n\rCreateTrigger\x12\'.flyteidl.artifact.CreateTriggerRequest\x1a(.flyteidl.artifact.CreateTriggerResponse\"\x00\x12\x64\n\rDeleteTrigger\x12\'.flyteidl.artifact.DeleteTriggerRequest\x1a(.flyteidl.artifact.DeleteTriggerResponse\"\x00\x12O\n\x06\x41\x64\x64Tag\x12 .flyteidl.artifact.AddTagRequest\x1a!.flyteidl.artifact.AddTagResponse\"\x00\x12\x65\n\x10RegisterProducer\x12*.flyteidl.artifact.RegisterProducerRequest\x1a#.flyteidl.artifact.RegisterResponse\"\x00\x12\x65\n\x10RegisterConsumer\x12*.flyteidl.artifact.RegisterConsumerRequest\x1a#.flyteidl.artifact.RegisterResponse\"\x00\x12m\n\x12SetExecutionInputs\x12).flyteidl.artifact.ExecutionInputsRequest\x1a*.flyteidl.artifact.ExecutionInputsResponse\"\x00\x12\xd4\x01\n\x12\x46indByWorkflowExec\x12,.flyteidl.artifact.FindByWorkflowExecRequest\x1a*.flyteidl.artifact.SearchArtifactsResponse\"d\x82\xd3\xe4\x93\x02^\x12\\/artifacts/api/v1/data/query/e/{exec_id.project}/{exec_id.domain}/{exec_id.name}/{direction}B\xcc\x01\n\x15\x63om.flyteidl.artifactB\x0e\x41rtifactsProtoP\x01Z>github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/artifact\xa2\x02\x03\x46\x41X\xaa\x02\x11\x46lyteidl.Artifact\xca\x02\x11\x46lyteidl\\Artifact\xe2\x02\x1d\x46lyteidl\\Artifact\\GPBMetadata\xea\x02\x12\x46lyteidl::Artifactb\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'flyteidl.artifact.artifacts_pb2', _globals) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\025com.flyteidl.artifactB\016ArtifactsProtoP\001Z>github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/artifact\242\002\003FAX\252\002\021Flyteidl.Artifact\312\002\021Flyteidl\\Artifact\342\002\035Flyteidl\\Artifact\\GPBMetadata\352\002\022Flyteidl::Artifact' - _CREATEARTIFACTREQUEST_PARTITIONSENTRY._options = None - _CREATEARTIFACTREQUEST_PARTITIONSENTRY._serialized_options = b'8\001' - _ARTIFACTREGISTRY.methods_by_name['GetArtifact']._options = None - _ARTIFACTREGISTRY.methods_by_name['GetArtifact']._serialized_options = b'\202\323\344\223\002\237\004Z\270\001\022\265\001/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}/{query.artifact_id.version}Z\234\001\022\231\001/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}Z\240\001\022\235\001/artifacts/api/v1/data/artifact/tag/{query.artifact_tag.artifact_key.project}/{query.artifact_tag.artifact_key.domain}/{query.artifact_tag.artifact_key.name}\022 /artifacts/api/v1/data/artifacts' - _ARTIFACTREGISTRY.methods_by_name['SearchArtifacts']._options = None - _ARTIFACTREGISTRY.methods_by_name['SearchArtifacts']._serialized_options = b'\202\323\344\223\002\256\001ZK\022I/artifacts/api/v1/data/query/{artifact_key.project}/{artifact_key.domain}\022_/artifacts/api/v1/data/query/s/{artifact_key.project}/{artifact_key.domain}/{artifact_key.name}' - _ARTIFACTREGISTRY.methods_by_name['FindByWorkflowExec']._options = None - _ARTIFACTREGISTRY.methods_by_name['FindByWorkflowExec']._serialized_options = b'\202\323\344\223\002^\022\\/artifacts/api/v1/data/query/e/{exec_id.project}/{exec_id.domain}/{exec_id.name}/{direction}' - _globals['_ARTIFACT']._serialized_start=335 - _globals['_ARTIFACT']._serialized_end=537 - _globals['_CREATEARTIFACTREQUEST']._serialized_start=540 - _globals['_CREATEARTIFACTREQUEST']._serialized_end=935 - _globals['_CREATEARTIFACTREQUEST_PARTITIONSENTRY']._serialized_start=874 - _globals['_CREATEARTIFACTREQUEST_PARTITIONSENTRY']._serialized_end=935 - _globals['_ARTIFACTSOURCE']._serialized_start=938 - _globals['_ARTIFACTSOURCE']._serialized_end=1189 - _globals['_ARTIFACTSPEC']._serialized_start=1192 - _globals['_ARTIFACTSPEC']._serialized_end=1441 - _globals['_CREATEARTIFACTRESPONSE']._serialized_start=1443 - _globals['_CREATEARTIFACTRESPONSE']._serialized_end=1524 - _globals['_GETARTIFACTREQUEST']._serialized_start=1526 - _globals['_GETARTIFACTREQUEST']._serialized_end=1624 - _globals['_GETARTIFACTRESPONSE']._serialized_start=1626 - _globals['_GETARTIFACTRESPONSE']._serialized_end=1704 - _globals['_SEARCHOPTIONS']._serialized_start=1706 - _globals['_SEARCHOPTIONS']._serialized_end=1802 - _globals['_SEARCHARTIFACTSREQUEST']._serialized_start=1805 - _globals['_SEARCHARTIFACTSREQUEST']._serialized_end=2111 - _globals['_SEARCHARTIFACTSRESPONSE']._serialized_start=2113 - _globals['_SEARCHARTIFACTSRESPONSE']._serialized_end=2219 - _globals['_FINDBYWORKFLOWEXECREQUEST']._serialized_start=2222 - _globals['_FINDBYWORKFLOWEXECREQUEST']._serialized_end=2442 - _globals['_FINDBYWORKFLOWEXECREQUEST_DIRECTION']._serialized_start=2406 - _globals['_FINDBYWORKFLOWEXECREQUEST_DIRECTION']._serialized_end=2442 - _globals['_ADDTAGREQUEST']._serialized_start=2444 - _globals['_ADDTAGREQUEST']._serialized_end=2571 - _globals['_ADDTAGRESPONSE']._serialized_start=2573 - _globals['_ADDTAGRESPONSE']._serialized_end=2589 - _globals['_CREATETRIGGERREQUEST']._serialized_start=2591 - _globals['_CREATETRIGGERREQUEST']._serialized_end=2689 - _globals['_CREATETRIGGERRESPONSE']._serialized_start=2691 - _globals['_CREATETRIGGERRESPONSE']._serialized_end=2714 - _globals['_DELETETRIGGERREQUEST']._serialized_start=2716 - _globals['_DELETETRIGGERREQUEST']._serialized_end=2796 - _globals['_DELETETRIGGERRESPONSE']._serialized_start=2798 - _globals['_DELETETRIGGERRESPONSE']._serialized_end=2821 - _globals['_ARTIFACTPRODUCER']._serialized_start=2824 - _globals['_ARTIFACTPRODUCER']._serialized_end=2952 - _globals['_REGISTERPRODUCERREQUEST']._serialized_start=2954 - _globals['_REGISTERPRODUCERREQUEST']._serialized_end=3046 - _globals['_ARTIFACTCONSUMER']._serialized_start=3048 - _globals['_ARTIFACTCONSUMER']._serialized_end=3175 - _globals['_REGISTERCONSUMERREQUEST']._serialized_start=3177 - _globals['_REGISTERCONSUMERREQUEST']._serialized_end=3269 - _globals['_REGISTERRESPONSE']._serialized_start=3271 - _globals['_REGISTERRESPONSE']._serialized_end=3289 - _globals['_EXECUTIONINPUTSREQUEST']._serialized_start=3292 - _globals['_EXECUTIONINPUTSREQUEST']._serialized_end=3446 - _globals['_EXECUTIONINPUTSRESPONSE']._serialized_start=3448 - _globals['_EXECUTIONINPUTSRESPONSE']._serialized_end=3473 - _globals['_ARTIFACTREGISTRY']._serialized_start=3476 - _globals['_ARTIFACTREGISTRY']._serialized_end=5355 -# @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2.pyi deleted file mode 100644 index 91c389b456..0000000000 --- a/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2.pyi +++ /dev/null @@ -1,225 +0,0 @@ -from google.protobuf import any_pb2 as _any_pb2 -from google.api import annotations_pb2 as _annotations_pb2 -from flyteidl.admin import launch_plan_pb2 as _launch_plan_pb2 -from flyteidl.core import literals_pb2 as _literals_pb2 -from flyteidl.core import types_pb2 as _types_pb2 -from flyteidl.core import identifier_pb2 as _identifier_pb2 -from flyteidl.core import artifact_id_pb2 as _artifact_id_pb2 -from flyteidl.core import interface_pb2 as _interface_pb2 -from flyteidl.event import cloudevents_pb2 as _cloudevents_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union - -DESCRIPTOR: _descriptor.FileDescriptor - -class Artifact(_message.Message): - __slots__ = ["artifact_id", "spec", "tags", "source"] - ARTIFACT_ID_FIELD_NUMBER: _ClassVar[int] - SPEC_FIELD_NUMBER: _ClassVar[int] - TAGS_FIELD_NUMBER: _ClassVar[int] - SOURCE_FIELD_NUMBER: _ClassVar[int] - artifact_id: _artifact_id_pb2.ArtifactID - spec: ArtifactSpec - tags: _containers.RepeatedScalarFieldContainer[str] - source: ArtifactSource - def __init__(self, artifact_id: _Optional[_Union[_artifact_id_pb2.ArtifactID, _Mapping]] = ..., spec: _Optional[_Union[ArtifactSpec, _Mapping]] = ..., tags: _Optional[_Iterable[str]] = ..., source: _Optional[_Union[ArtifactSource, _Mapping]] = ...) -> None: ... - -class CreateArtifactRequest(_message.Message): - __slots__ = ["artifact_key", "version", "spec", "partitions", "tag", "source"] - class PartitionsEntry(_message.Message): - __slots__ = ["key", "value"] - KEY_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - key: str - value: str - def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... - ARTIFACT_KEY_FIELD_NUMBER: _ClassVar[int] - VERSION_FIELD_NUMBER: _ClassVar[int] - SPEC_FIELD_NUMBER: _ClassVar[int] - PARTITIONS_FIELD_NUMBER: _ClassVar[int] - TAG_FIELD_NUMBER: _ClassVar[int] - SOURCE_FIELD_NUMBER: _ClassVar[int] - artifact_key: _artifact_id_pb2.ArtifactKey - version: str - spec: ArtifactSpec - partitions: _containers.ScalarMap[str, str] - tag: str - source: ArtifactSource - def __init__(self, artifact_key: _Optional[_Union[_artifact_id_pb2.ArtifactKey, _Mapping]] = ..., version: _Optional[str] = ..., spec: _Optional[_Union[ArtifactSpec, _Mapping]] = ..., partitions: _Optional[_Mapping[str, str]] = ..., tag: _Optional[str] = ..., source: _Optional[_Union[ArtifactSource, _Mapping]] = ...) -> None: ... - -class ArtifactSource(_message.Message): - __slots__ = ["workflow_execution", "node_id", "task_id", "retry_attempt", "principal"] - WORKFLOW_EXECUTION_FIELD_NUMBER: _ClassVar[int] - NODE_ID_FIELD_NUMBER: _ClassVar[int] - TASK_ID_FIELD_NUMBER: _ClassVar[int] - RETRY_ATTEMPT_FIELD_NUMBER: _ClassVar[int] - PRINCIPAL_FIELD_NUMBER: _ClassVar[int] - workflow_execution: _identifier_pb2.WorkflowExecutionIdentifier - node_id: str - task_id: _identifier_pb2.Identifier - retry_attempt: int - principal: str - def __init__(self, workflow_execution: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., node_id: _Optional[str] = ..., task_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., retry_attempt: _Optional[int] = ..., principal: _Optional[str] = ...) -> None: ... - -class ArtifactSpec(_message.Message): - __slots__ = ["value", "type", "short_description", "user_metadata", "metadata_type"] - VALUE_FIELD_NUMBER: _ClassVar[int] - TYPE_FIELD_NUMBER: _ClassVar[int] - SHORT_DESCRIPTION_FIELD_NUMBER: _ClassVar[int] - USER_METADATA_FIELD_NUMBER: _ClassVar[int] - METADATA_TYPE_FIELD_NUMBER: _ClassVar[int] - value: _literals_pb2.Literal - type: _types_pb2.LiteralType - short_description: str - user_metadata: _any_pb2.Any - metadata_type: str - def __init__(self, value: _Optional[_Union[_literals_pb2.Literal, _Mapping]] = ..., type: _Optional[_Union[_types_pb2.LiteralType, _Mapping]] = ..., short_description: _Optional[str] = ..., user_metadata: _Optional[_Union[_any_pb2.Any, _Mapping]] = ..., metadata_type: _Optional[str] = ...) -> None: ... - -class CreateArtifactResponse(_message.Message): - __slots__ = ["artifact"] - ARTIFACT_FIELD_NUMBER: _ClassVar[int] - artifact: Artifact - def __init__(self, artifact: _Optional[_Union[Artifact, _Mapping]] = ...) -> None: ... - -class GetArtifactRequest(_message.Message): - __slots__ = ["query", "details"] - QUERY_FIELD_NUMBER: _ClassVar[int] - DETAILS_FIELD_NUMBER: _ClassVar[int] - query: _artifact_id_pb2.ArtifactQuery - details: bool - def __init__(self, query: _Optional[_Union[_artifact_id_pb2.ArtifactQuery, _Mapping]] = ..., details: bool = ...) -> None: ... - -class GetArtifactResponse(_message.Message): - __slots__ = ["artifact"] - ARTIFACT_FIELD_NUMBER: _ClassVar[int] - artifact: Artifact - def __init__(self, artifact: _Optional[_Union[Artifact, _Mapping]] = ...) -> None: ... - -class SearchOptions(_message.Message): - __slots__ = ["strict_partitions", "latest_by_key"] - STRICT_PARTITIONS_FIELD_NUMBER: _ClassVar[int] - LATEST_BY_KEY_FIELD_NUMBER: _ClassVar[int] - strict_partitions: bool - latest_by_key: bool - def __init__(self, strict_partitions: bool = ..., latest_by_key: bool = ...) -> None: ... - -class SearchArtifactsRequest(_message.Message): - __slots__ = ["artifact_key", "partitions", "principal", "version", "options", "token", "limit"] - ARTIFACT_KEY_FIELD_NUMBER: _ClassVar[int] - PARTITIONS_FIELD_NUMBER: _ClassVar[int] - PRINCIPAL_FIELD_NUMBER: _ClassVar[int] - VERSION_FIELD_NUMBER: _ClassVar[int] - OPTIONS_FIELD_NUMBER: _ClassVar[int] - TOKEN_FIELD_NUMBER: _ClassVar[int] - LIMIT_FIELD_NUMBER: _ClassVar[int] - artifact_key: _artifact_id_pb2.ArtifactKey - partitions: _artifact_id_pb2.Partitions - principal: str - version: str - options: SearchOptions - token: str - limit: int - def __init__(self, artifact_key: _Optional[_Union[_artifact_id_pb2.ArtifactKey, _Mapping]] = ..., partitions: _Optional[_Union[_artifact_id_pb2.Partitions, _Mapping]] = ..., principal: _Optional[str] = ..., version: _Optional[str] = ..., options: _Optional[_Union[SearchOptions, _Mapping]] = ..., token: _Optional[str] = ..., limit: _Optional[int] = ...) -> None: ... - -class SearchArtifactsResponse(_message.Message): - __slots__ = ["artifacts", "token"] - ARTIFACTS_FIELD_NUMBER: _ClassVar[int] - TOKEN_FIELD_NUMBER: _ClassVar[int] - artifacts: _containers.RepeatedCompositeFieldContainer[Artifact] - token: str - def __init__(self, artifacts: _Optional[_Iterable[_Union[Artifact, _Mapping]]] = ..., token: _Optional[str] = ...) -> None: ... - -class FindByWorkflowExecRequest(_message.Message): - __slots__ = ["exec_id", "direction"] - class Direction(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = [] - INPUTS: _ClassVar[FindByWorkflowExecRequest.Direction] - OUTPUTS: _ClassVar[FindByWorkflowExecRequest.Direction] - INPUTS: FindByWorkflowExecRequest.Direction - OUTPUTS: FindByWorkflowExecRequest.Direction - EXEC_ID_FIELD_NUMBER: _ClassVar[int] - DIRECTION_FIELD_NUMBER: _ClassVar[int] - exec_id: _identifier_pb2.WorkflowExecutionIdentifier - direction: FindByWorkflowExecRequest.Direction - def __init__(self, exec_id: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., direction: _Optional[_Union[FindByWorkflowExecRequest.Direction, str]] = ...) -> None: ... - -class AddTagRequest(_message.Message): - __slots__ = ["artifact_id", "value", "overwrite"] - ARTIFACT_ID_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - OVERWRITE_FIELD_NUMBER: _ClassVar[int] - artifact_id: _artifact_id_pb2.ArtifactID - value: str - overwrite: bool - def __init__(self, artifact_id: _Optional[_Union[_artifact_id_pb2.ArtifactID, _Mapping]] = ..., value: _Optional[str] = ..., overwrite: bool = ...) -> None: ... - -class AddTagResponse(_message.Message): - __slots__ = [] - def __init__(self) -> None: ... - -class CreateTriggerRequest(_message.Message): - __slots__ = ["trigger_launch_plan"] - TRIGGER_LAUNCH_PLAN_FIELD_NUMBER: _ClassVar[int] - trigger_launch_plan: _launch_plan_pb2.LaunchPlan - def __init__(self, trigger_launch_plan: _Optional[_Union[_launch_plan_pb2.LaunchPlan, _Mapping]] = ...) -> None: ... - -class CreateTriggerResponse(_message.Message): - __slots__ = [] - def __init__(self) -> None: ... - -class DeleteTriggerRequest(_message.Message): - __slots__ = ["trigger_id"] - TRIGGER_ID_FIELD_NUMBER: _ClassVar[int] - trigger_id: _identifier_pb2.Identifier - def __init__(self, trigger_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ...) -> None: ... - -class DeleteTriggerResponse(_message.Message): - __slots__ = [] - def __init__(self) -> None: ... - -class ArtifactProducer(_message.Message): - __slots__ = ["entity_id", "outputs"] - ENTITY_ID_FIELD_NUMBER: _ClassVar[int] - OUTPUTS_FIELD_NUMBER: _ClassVar[int] - entity_id: _identifier_pb2.Identifier - outputs: _interface_pb2.VariableMap - def __init__(self, entity_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., outputs: _Optional[_Union[_interface_pb2.VariableMap, _Mapping]] = ...) -> None: ... - -class RegisterProducerRequest(_message.Message): - __slots__ = ["producers"] - PRODUCERS_FIELD_NUMBER: _ClassVar[int] - producers: _containers.RepeatedCompositeFieldContainer[ArtifactProducer] - def __init__(self, producers: _Optional[_Iterable[_Union[ArtifactProducer, _Mapping]]] = ...) -> None: ... - -class ArtifactConsumer(_message.Message): - __slots__ = ["entity_id", "inputs"] - ENTITY_ID_FIELD_NUMBER: _ClassVar[int] - INPUTS_FIELD_NUMBER: _ClassVar[int] - entity_id: _identifier_pb2.Identifier - inputs: _interface_pb2.ParameterMap - def __init__(self, entity_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., inputs: _Optional[_Union[_interface_pb2.ParameterMap, _Mapping]] = ...) -> None: ... - -class RegisterConsumerRequest(_message.Message): - __slots__ = ["consumers"] - CONSUMERS_FIELD_NUMBER: _ClassVar[int] - consumers: _containers.RepeatedCompositeFieldContainer[ArtifactConsumer] - def __init__(self, consumers: _Optional[_Iterable[_Union[ArtifactConsumer, _Mapping]]] = ...) -> None: ... - -class RegisterResponse(_message.Message): - __slots__ = [] - def __init__(self) -> None: ... - -class ExecutionInputsRequest(_message.Message): - __slots__ = ["execution_id", "inputs"] - EXECUTION_ID_FIELD_NUMBER: _ClassVar[int] - INPUTS_FIELD_NUMBER: _ClassVar[int] - execution_id: _identifier_pb2.WorkflowExecutionIdentifier - inputs: _containers.RepeatedCompositeFieldContainer[_artifact_id_pb2.ArtifactID] - def __init__(self, execution_id: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., inputs: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ...) -> None: ... - -class ExecutionInputsResponse(_message.Message): - __slots__ = [] - def __init__(self) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2_grpc.py b/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2_grpc.py deleted file mode 100644 index b989cdc000..0000000000 --- a/flyteidl/gen/pb_python/flyteidl/artifact/artifacts_pb2_grpc.py +++ /dev/null @@ -1,363 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from flyteidl.artifact import artifacts_pb2 as flyteidl_dot_artifact_dot_artifacts__pb2 - - -class ArtifactRegistryStub(object): - """Missing associated documentation comment in .proto file.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateArtifact = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/CreateArtifact', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateArtifactRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateArtifactResponse.FromString, - ) - self.GetArtifact = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/GetArtifact', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.GetArtifactRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.GetArtifactResponse.FromString, - ) - self.SearchArtifacts = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/SearchArtifacts', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsResponse.FromString, - ) - self.CreateTrigger = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/CreateTrigger', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateTriggerRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateTriggerResponse.FromString, - ) - self.DeleteTrigger = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/DeleteTrigger', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.DeleteTriggerRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.DeleteTriggerResponse.FromString, - ) - self.AddTag = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/AddTag', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.AddTagRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.AddTagResponse.FromString, - ) - self.RegisterProducer = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/RegisterProducer', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterProducerRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterResponse.FromString, - ) - self.RegisterConsumer = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/RegisterConsumer', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterConsumerRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterResponse.FromString, - ) - self.SetExecutionInputs = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/SetExecutionInputs', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.ExecutionInputsRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.ExecutionInputsResponse.FromString, - ) - self.FindByWorkflowExec = channel.unary_unary( - '/flyteidl.artifact.ArtifactRegistry/FindByWorkflowExec', - request_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.FindByWorkflowExecRequest.SerializeToString, - response_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsResponse.FromString, - ) - - -class ArtifactRegistryServicer(object): - """Missing associated documentation comment in .proto file.""" - - def CreateArtifact(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetArtifact(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def SearchArtifacts(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def CreateTrigger(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteTrigger(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def AddTag(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RegisterProducer(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RegisterConsumer(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def SetExecutionInputs(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def FindByWorkflowExec(self, request, context): - """Missing associated documentation comment in .proto file.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_ArtifactRegistryServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateArtifact': grpc.unary_unary_rpc_method_handler( - servicer.CreateArtifact, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateArtifactRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateArtifactResponse.SerializeToString, - ), - 'GetArtifact': grpc.unary_unary_rpc_method_handler( - servicer.GetArtifact, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.GetArtifactRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.GetArtifactResponse.SerializeToString, - ), - 'SearchArtifacts': grpc.unary_unary_rpc_method_handler( - servicer.SearchArtifacts, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsResponse.SerializeToString, - ), - 'CreateTrigger': grpc.unary_unary_rpc_method_handler( - servicer.CreateTrigger, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateTriggerRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.CreateTriggerResponse.SerializeToString, - ), - 'DeleteTrigger': grpc.unary_unary_rpc_method_handler( - servicer.DeleteTrigger, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.DeleteTriggerRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.DeleteTriggerResponse.SerializeToString, - ), - 'AddTag': grpc.unary_unary_rpc_method_handler( - servicer.AddTag, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.AddTagRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.AddTagResponse.SerializeToString, - ), - 'RegisterProducer': grpc.unary_unary_rpc_method_handler( - servicer.RegisterProducer, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterProducerRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterResponse.SerializeToString, - ), - 'RegisterConsumer': grpc.unary_unary_rpc_method_handler( - servicer.RegisterConsumer, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterConsumerRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.RegisterResponse.SerializeToString, - ), - 'SetExecutionInputs': grpc.unary_unary_rpc_method_handler( - servicer.SetExecutionInputs, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.ExecutionInputsRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.ExecutionInputsResponse.SerializeToString, - ), - 'FindByWorkflowExec': grpc.unary_unary_rpc_method_handler( - servicer.FindByWorkflowExec, - request_deserializer=flyteidl_dot_artifact_dot_artifacts__pb2.FindByWorkflowExecRequest.FromString, - response_serializer=flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'flyteidl.artifact.ArtifactRegistry', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - # This class is part of an EXPERIMENTAL API. -class ArtifactRegistry(object): - """Missing associated documentation comment in .proto file.""" - - @staticmethod - def CreateArtifact(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/CreateArtifact', - flyteidl_dot_artifact_dot_artifacts__pb2.CreateArtifactRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.CreateArtifactResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def GetArtifact(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/GetArtifact', - flyteidl_dot_artifact_dot_artifacts__pb2.GetArtifactRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.GetArtifactResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def SearchArtifacts(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/SearchArtifacts', - flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def CreateTrigger(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/CreateTrigger', - flyteidl_dot_artifact_dot_artifacts__pb2.CreateTriggerRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.CreateTriggerResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def DeleteTrigger(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/DeleteTrigger', - flyteidl_dot_artifact_dot_artifacts__pb2.DeleteTriggerRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.DeleteTriggerResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def AddTag(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/AddTag', - flyteidl_dot_artifact_dot_artifacts__pb2.AddTagRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.AddTagResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def RegisterProducer(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/RegisterProducer', - flyteidl_dot_artifact_dot_artifacts__pb2.RegisterProducerRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.RegisterResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def RegisterConsumer(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/RegisterConsumer', - flyteidl_dot_artifact_dot_artifacts__pb2.RegisterConsumerRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.RegisterResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def SetExecutionInputs(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/SetExecutionInputs', - flyteidl_dot_artifact_dot_artifacts__pb2.ExecutionInputsRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.ExecutionInputsResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - - @staticmethod - def FindByWorkflowExec(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/flyteidl.artifact.ArtifactRegistry/FindByWorkflowExec', - flyteidl_dot_artifact_dot_artifacts__pb2.FindByWorkflowExecRequest.SerializeToString, - flyteidl_dot_artifact_dot_artifacts__pb2.SearchArtifactsResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.py b/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.py index 9496d9c008..ff603987f3 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.py @@ -11,10 +11,11 @@ _sym_db = _symbol_database.Default() +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from flyteidl.core import identifier_pb2 as flyteidl_dot_core_dot_identifier__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66lyteidl/core/artifact_id.proto\x12\rflyteidl.core\x1a\x1e\x66lyteidl/core/identifier.proto\"S\n\x0b\x41rtifactKey\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\"n\n\x13\x41rtifactBindingData\x12\x14\n\x05index\x18\x01 \x01(\rR\x05index\x12#\n\rpartition_key\x18\x02 \x01(\tR\x0cpartitionKey\x12\x1c\n\ttransform\x18\x03 \x01(\tR\ttransform\"$\n\x10InputBindingData\x12\x10\n\x03var\x18\x01 \x01(\tR\x03var\"\xd5\x01\n\nLabelValue\x12#\n\x0cstatic_value\x18\x01 \x01(\tH\x00R\x0bstaticValue\x12Q\n\x11triggered_binding\x18\x02 \x01(\x0b\x32\".flyteidl.core.ArtifactBindingDataH\x00R\x10triggeredBinding\x12\x46\n\rinput_binding\x18\x03 \x01(\x0b\x32\x1f.flyteidl.core.InputBindingDataH\x00R\x0cinputBindingB\x07\n\x05value\"\x9d\x01\n\nPartitions\x12:\n\x05value\x18\x01 \x03(\x0b\x32$.flyteidl.core.Partitions.ValueEntryR\x05value\x1aS\n\nValueEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12/\n\x05value\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LabelValueR\x05value:\x02\x38\x01\"\xb0\x01\n\nArtifactID\x12=\n\x0c\x61rtifact_key\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactKeyR\x0b\x61rtifactKey\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\x12;\n\npartitions\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.PartitionsH\x00R\npartitionsB\x0c\n\ndimensions\"}\n\x0b\x41rtifactTag\x12=\n\x0c\x61rtifact_key\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactKeyR\x0b\x61rtifactKey\x12/\n\x05value\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LabelValueR\x05value\"\xf0\x01\n\rArtifactQuery\x12<\n\x0b\x61rtifact_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.ArtifactIDH\x00R\nartifactId\x12?\n\x0c\x61rtifact_tag\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactTagH\x00R\x0b\x61rtifactTag\x12\x12\n\x03uri\x18\x03 \x01(\tH\x00R\x03uri\x12>\n\x07\x62inding\x18\x04 \x01(\x0b\x32\".flyteidl.core.ArtifactBindingDataH\x00R\x07\x62indingB\x0c\n\nidentifier\"z\n\x07Trigger\x12\x38\n\ntrigger_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\ttriggerId\x12\x35\n\x08triggers\x18\x02 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x08triggersB\xb5\x01\n\x11\x63om.flyteidl.coreB\x0f\x41rtifactIdProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66lyteidl/core/artifact_id.proto\x12\rflyteidl.core\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1e\x66lyteidl/core/identifier.proto\"S\n\x0b\x41rtifactKey\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\"\xb9\x01\n\x13\x41rtifactBindingData\x12\x14\n\x05index\x18\x01 \x01(\rR\x05index\x12%\n\rpartition_key\x18\x02 \x01(\tH\x00R\x0cpartitionKey\x12\x35\n\x16\x62ind_to_time_partition\x18\x03 \x01(\x08H\x00R\x13\x62indToTimePartition\x12\x1c\n\ttransform\x18\x04 \x01(\tR\ttransformB\x10\n\x0epartition_data\"$\n\x10InputBindingData\x12\x10\n\x03var\x18\x01 \x01(\tR\x03var\"\x92\x02\n\nLabelValue\x12#\n\x0cstatic_value\x18\x01 \x01(\tH\x00R\x0bstaticValue\x12;\n\ntime_value\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\ttimeValue\x12Q\n\x11triggered_binding\x18\x03 \x01(\x0b\x32\".flyteidl.core.ArtifactBindingDataH\x00R\x10triggeredBinding\x12\x46\n\rinput_binding\x18\x04 \x01(\x0b\x32\x1f.flyteidl.core.InputBindingDataH\x00R\x0cinputBindingB\x07\n\x05value\"\x9d\x01\n\nPartitions\x12:\n\x05value\x18\x01 \x03(\x0b\x32$.flyteidl.core.Partitions.ValueEntryR\x05value\x1aS\n\nValueEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12/\n\x05value\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LabelValueR\x05value:\x02\x38\x01\"@\n\rTimePartition\x12/\n\x05value\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LabelValueR\x05value\"\xe5\x01\n\nArtifactID\x12=\n\x0c\x61rtifact_key\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactKeyR\x0b\x61rtifactKey\x12\x18\n\x07version\x18\x02 \x01(\tR\x07version\x12\x39\n\npartitions\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.PartitionsR\npartitions\x12\x43\n\x0etime_partition\x18\x04 \x01(\x0b\x32\x1c.flyteidl.core.TimePartitionR\rtimePartition\"}\n\x0b\x41rtifactTag\x12=\n\x0c\x61rtifact_key\x18\x01 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactKeyR\x0b\x61rtifactKey\x12/\n\x05value\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LabelValueR\x05value\"\xf0\x01\n\rArtifactQuery\x12<\n\x0b\x61rtifact_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.ArtifactIDH\x00R\nartifactId\x12?\n\x0c\x61rtifact_tag\x18\x02 \x01(\x0b\x32\x1a.flyteidl.core.ArtifactTagH\x00R\x0b\x61rtifactTag\x12\x12\n\x03uri\x18\x03 \x01(\tH\x00R\x03uri\x12>\n\x07\x62inding\x18\x04 \x01(\x0b\x32\".flyteidl.core.ArtifactBindingDataH\x00R\x07\x62indingB\x0c\n\nidentifierB\xb5\x01\n\x11\x63om.flyteidl.coreB\x0f\x41rtifactIdProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,24 +26,24 @@ DESCRIPTOR._serialized_options = b'\n\021com.flyteidl.coreB\017ArtifactIdProtoP\001Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\242\002\003FCX\252\002\rFlyteidl.Core\312\002\rFlyteidl\\Core\342\002\031Flyteidl\\Core\\GPBMetadata\352\002\016Flyteidl::Core' _PARTITIONS_VALUEENTRY._options = None _PARTITIONS_VALUEENTRY._serialized_options = b'8\001' - _globals['_ARTIFACTKEY']._serialized_start=82 - _globals['_ARTIFACTKEY']._serialized_end=165 - _globals['_ARTIFACTBINDINGDATA']._serialized_start=167 - _globals['_ARTIFACTBINDINGDATA']._serialized_end=277 - _globals['_INPUTBINDINGDATA']._serialized_start=279 - _globals['_INPUTBINDINGDATA']._serialized_end=315 - _globals['_LABELVALUE']._serialized_start=318 - _globals['_LABELVALUE']._serialized_end=531 - _globals['_PARTITIONS']._serialized_start=534 - _globals['_PARTITIONS']._serialized_end=691 - _globals['_PARTITIONS_VALUEENTRY']._serialized_start=608 - _globals['_PARTITIONS_VALUEENTRY']._serialized_end=691 - _globals['_ARTIFACTID']._serialized_start=694 - _globals['_ARTIFACTID']._serialized_end=870 - _globals['_ARTIFACTTAG']._serialized_start=872 - _globals['_ARTIFACTTAG']._serialized_end=997 - _globals['_ARTIFACTQUERY']._serialized_start=1000 - _globals['_ARTIFACTQUERY']._serialized_end=1240 - _globals['_TRIGGER']._serialized_start=1242 - _globals['_TRIGGER']._serialized_end=1364 + _globals['_ARTIFACTKEY']._serialized_start=115 + _globals['_ARTIFACTKEY']._serialized_end=198 + _globals['_ARTIFACTBINDINGDATA']._serialized_start=201 + _globals['_ARTIFACTBINDINGDATA']._serialized_end=386 + _globals['_INPUTBINDINGDATA']._serialized_start=388 + _globals['_INPUTBINDINGDATA']._serialized_end=424 + _globals['_LABELVALUE']._serialized_start=427 + _globals['_LABELVALUE']._serialized_end=701 + _globals['_PARTITIONS']._serialized_start=704 + _globals['_PARTITIONS']._serialized_end=861 + _globals['_PARTITIONS_VALUEENTRY']._serialized_start=778 + _globals['_PARTITIONS_VALUEENTRY']._serialized_end=861 + _globals['_TIMEPARTITION']._serialized_start=863 + _globals['_TIMEPARTITION']._serialized_end=927 + _globals['_ARTIFACTID']._serialized_start=930 + _globals['_ARTIFACTID']._serialized_end=1159 + _globals['_ARTIFACTTAG']._serialized_start=1161 + _globals['_ARTIFACTTAG']._serialized_end=1286 + _globals['_ARTIFACTQUERY']._serialized_start=1289 + _globals['_ARTIFACTQUERY']._serialized_end=1529 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.pyi index 4f65759ae7..53a9420bc1 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/core/artifact_id_pb2.pyi @@ -1,8 +1,9 @@ +from google.protobuf import timestamp_pb2 as _timestamp_pb2 from flyteidl.core import identifier_pb2 as _identifier_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union +from typing import ClassVar as _ClassVar, Mapping as _Mapping, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor @@ -17,14 +18,16 @@ class ArtifactKey(_message.Message): def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... class ArtifactBindingData(_message.Message): - __slots__ = ["index", "partition_key", "transform"] + __slots__ = ["index", "partition_key", "bind_to_time_partition", "transform"] INDEX_FIELD_NUMBER: _ClassVar[int] PARTITION_KEY_FIELD_NUMBER: _ClassVar[int] + BIND_TO_TIME_PARTITION_FIELD_NUMBER: _ClassVar[int] TRANSFORM_FIELD_NUMBER: _ClassVar[int] index: int partition_key: str + bind_to_time_partition: bool transform: str - def __init__(self, index: _Optional[int] = ..., partition_key: _Optional[str] = ..., transform: _Optional[str] = ...) -> None: ... + def __init__(self, index: _Optional[int] = ..., partition_key: _Optional[str] = ..., bind_to_time_partition: bool = ..., transform: _Optional[str] = ...) -> None: ... class InputBindingData(_message.Message): __slots__ = ["var"] @@ -33,14 +36,16 @@ class InputBindingData(_message.Message): def __init__(self, var: _Optional[str] = ...) -> None: ... class LabelValue(_message.Message): - __slots__ = ["static_value", "triggered_binding", "input_binding"] + __slots__ = ["static_value", "time_value", "triggered_binding", "input_binding"] STATIC_VALUE_FIELD_NUMBER: _ClassVar[int] + TIME_VALUE_FIELD_NUMBER: _ClassVar[int] TRIGGERED_BINDING_FIELD_NUMBER: _ClassVar[int] INPUT_BINDING_FIELD_NUMBER: _ClassVar[int] static_value: str + time_value: _timestamp_pb2.Timestamp triggered_binding: ArtifactBindingData input_binding: InputBindingData - def __init__(self, static_value: _Optional[str] = ..., triggered_binding: _Optional[_Union[ArtifactBindingData, _Mapping]] = ..., input_binding: _Optional[_Union[InputBindingData, _Mapping]] = ...) -> None: ... + def __init__(self, static_value: _Optional[str] = ..., time_value: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., triggered_binding: _Optional[_Union[ArtifactBindingData, _Mapping]] = ..., input_binding: _Optional[_Union[InputBindingData, _Mapping]] = ...) -> None: ... class Partitions(_message.Message): __slots__ = ["value"] @@ -55,15 +60,23 @@ class Partitions(_message.Message): value: _containers.MessageMap[str, LabelValue] def __init__(self, value: _Optional[_Mapping[str, LabelValue]] = ...) -> None: ... +class TimePartition(_message.Message): + __slots__ = ["value"] + VALUE_FIELD_NUMBER: _ClassVar[int] + value: LabelValue + def __init__(self, value: _Optional[_Union[LabelValue, _Mapping]] = ...) -> None: ... + class ArtifactID(_message.Message): - __slots__ = ["artifact_key", "version", "partitions"] + __slots__ = ["artifact_key", "version", "partitions", "time_partition"] ARTIFACT_KEY_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] PARTITIONS_FIELD_NUMBER: _ClassVar[int] + TIME_PARTITION_FIELD_NUMBER: _ClassVar[int] artifact_key: ArtifactKey version: str partitions: Partitions - def __init__(self, artifact_key: _Optional[_Union[ArtifactKey, _Mapping]] = ..., version: _Optional[str] = ..., partitions: _Optional[_Union[Partitions, _Mapping]] = ...) -> None: ... + time_partition: TimePartition + def __init__(self, artifact_key: _Optional[_Union[ArtifactKey, _Mapping]] = ..., version: _Optional[str] = ..., partitions: _Optional[_Union[Partitions, _Mapping]] = ..., time_partition: _Optional[_Union[TimePartition, _Mapping]] = ...) -> None: ... class ArtifactTag(_message.Message): __slots__ = ["artifact_key", "value"] @@ -84,11 +97,3 @@ class ArtifactQuery(_message.Message): uri: str binding: ArtifactBindingData def __init__(self, artifact_id: _Optional[_Union[ArtifactID, _Mapping]] = ..., artifact_tag: _Optional[_Union[ArtifactTag, _Mapping]] = ..., uri: _Optional[str] = ..., binding: _Optional[_Union[ArtifactBindingData, _Mapping]] = ...) -> None: ... - -class Trigger(_message.Message): - __slots__ = ["trigger_id", "triggers"] - TRIGGER_ID_FIELD_NUMBER: _ClassVar[int] - TRIGGERS_FIELD_NUMBER: _ClassVar[int] - trigger_id: _identifier_pb2.Identifier - triggers: _containers.RepeatedCompositeFieldContainer[ArtifactID] - def __init__(self, trigger_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., triggers: _Optional[_Iterable[_Union[ArtifactID, _Mapping]]] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.py b/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.py index dc72c89d9f..2635c044db 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.py @@ -14,7 +14,7 @@ from flyteidl.core import identifier_pb2 as flyteidl_dot_core_dot_identifier__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/core/catalog.proto\x12\rflyteidl.core\x1a\x1e\x66lyteidl/core/identifier.proto\"I\n\x12\x43\x61talogArtifactTag\x12\x1f\n\x0b\x61rtifact_id\x18\x01 \x01(\tR\nartifactId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"\x83\x02\n\x0f\x43\x61talogMetadata\x12\x38\n\ndataset_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\tdatasetId\x12\x44\n\x0c\x61rtifact_tag\x18\x02 \x01(\x0b\x32!.flyteidl.core.CatalogArtifactTagR\x0b\x61rtifactTag\x12\\\n\x15source_task_execution\x18\x03 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierH\x00R\x13sourceTaskExecutionB\x12\n\x10source_execution\"\x9e\x01\n\x12\x43\x61talogReservation\"\x87\x01\n\x06Status\x12\x18\n\x14RESERVATION_DISABLED\x10\x00\x12\x18\n\x14RESERVATION_ACQUIRED\x10\x01\x12\x16\n\x12RESERVATION_EXISTS\x10\x02\x12\x18\n\x14RESERVATION_RELEASED\x10\x03\x12\x17\n\x13RESERVATION_FAILURE\x10\x04*\xa0\x01\n\x12\x43\x61talogCacheStatus\x12\x12\n\x0e\x43\x41\x43HE_DISABLED\x10\x00\x12\x0e\n\nCACHE_MISS\x10\x01\x12\r\n\tCACHE_HIT\x10\x02\x12\x13\n\x0f\x43\x41\x43HE_POPULATED\x10\x03\x12\x18\n\x14\x43\x41\x43HE_LOOKUP_FAILURE\x10\x04\x12\x15\n\x11\x43\x41\x43HE_PUT_FAILURE\x10\x05\x12\x11\n\rCACHE_SKIPPED\x10\x06\x42\xb2\x01\n\x11\x63om.flyteidl.coreB\x0c\x43\x61talogProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/core/catalog.proto\x12\rflyteidl.core\x1a\x1e\x66lyteidl/core/identifier.proto\"I\n\x12\x43\x61talogArtifactTag\x12\x1f\n\x0b\x61rtifact_id\x18\x01 \x01(\tR\nartifactId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\"\x83\x02\n\x0f\x43\x61talogMetadata\x12\x38\n\ndataset_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\tdatasetId\x12\x44\n\x0c\x61rtifact_tag\x18\x02 \x01(\x0b\x32!.flyteidl.core.CatalogArtifactTagR\x0b\x61rtifactTag\x12\\\n\x15source_task_execution\x18\x03 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierH\x00R\x13sourceTaskExecutionB\x12\n\x10source_execution\"\x9e\x01\n\x12\x43\x61talogReservation\"\x87\x01\n\x06Status\x12\x18\n\x14RESERVATION_DISABLED\x10\x00\x12\x18\n\x14RESERVATION_ACQUIRED\x10\x01\x12\x16\n\x12RESERVATION_EXISTS\x10\x02\x12\x18\n\x14RESERVATION_RELEASED\x10\x03\x12\x17\n\x13RESERVATION_FAILURE\x10\x04*\xb3\x01\n\x12\x43\x61talogCacheStatus\x12\x12\n\x0e\x43\x41\x43HE_DISABLED\x10\x00\x12\x0e\n\nCACHE_MISS\x10\x01\x12\r\n\tCACHE_HIT\x10\x02\x12\x13\n\x0f\x43\x41\x43HE_POPULATED\x10\x03\x12\x18\n\x14\x43\x41\x43HE_LOOKUP_FAILURE\x10\x04\x12\x15\n\x11\x43\x41\x43HE_PUT_FAILURE\x10\x05\x12\x11\n\rCACHE_SKIPPED\x10\x06\x12\x11\n\rCACHE_EVICTED\x10\x07\x42\xb2\x01\n\x11\x63om.flyteidl.coreB\x0c\x43\x61talogProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,7 +24,7 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\021com.flyteidl.coreB\014CatalogProtoP\001Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\242\002\003FCX\252\002\rFlyteidl.Core\312\002\rFlyteidl\\Core\342\002\031Flyteidl\\Core\\GPBMetadata\352\002\016Flyteidl::Core' _globals['_CATALOGCACHESTATUS']._serialized_start=577 - _globals['_CATALOGCACHESTATUS']._serialized_end=737 + _globals['_CATALOGCACHESTATUS']._serialized_end=756 _globals['_CATALOGARTIFACTTAG']._serialized_start=78 _globals['_CATALOGARTIFACTTAG']._serialized_end=151 _globals['_CATALOGMETADATA']._serialized_start=154 diff --git a/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.pyi index 90acf7d24b..f28fadcccd 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/core/catalog_pb2.pyi @@ -15,6 +15,7 @@ class CatalogCacheStatus(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): CACHE_LOOKUP_FAILURE: _ClassVar[CatalogCacheStatus] CACHE_PUT_FAILURE: _ClassVar[CatalogCacheStatus] CACHE_SKIPPED: _ClassVar[CatalogCacheStatus] + CACHE_EVICTED: _ClassVar[CatalogCacheStatus] CACHE_DISABLED: CatalogCacheStatus CACHE_MISS: CatalogCacheStatus CACHE_HIT: CatalogCacheStatus @@ -22,6 +23,7 @@ CACHE_POPULATED: CatalogCacheStatus CACHE_LOOKUP_FAILURE: CatalogCacheStatus CACHE_PUT_FAILURE: CatalogCacheStatus CACHE_SKIPPED: CatalogCacheStatus +CACHE_EVICTED: CatalogCacheStatus class CatalogArtifactTag(_message.Message): __slots__ = ["artifact_id", "name"] diff --git a/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.py b/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.py index bcb507c1cb..5068f3edb4 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.py @@ -13,7 +13,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66lyteidl/core/identifier.proto\x12\rflyteidl.core\"\xae\x01\n\nIdentifier\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x18\n\x07project\x18\x02 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x03 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x18\n\x07version\x18\x05 \x01(\tR\x07version\"c\n\x1bWorkflowExecutionIdentifier\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\"\x81\x01\n\x17NodeExecutionIdentifier\x12\x17\n\x07node_id\x18\x01 \x01(\tR\x06nodeId\x12M\n\x0c\x65xecution_id\x18\x02 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xc6\x01\n\x17TaskExecutionIdentifier\x12\x32\n\x07task_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x06taskId\x12R\n\x11node_execution_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x0fnodeExecutionId\x12#\n\rretry_attempt\x18\x03 \x01(\rR\x0cretryAttempt\"~\n\x10SignalIdentifier\x12\x1b\n\tsignal_id\x18\x01 \x01(\tR\x08signalId\x12M\n\x0c\x65xecution_id\x18\x02 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId*U\n\x0cResourceType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x08\n\x04TASK\x10\x01\x12\x0c\n\x08WORKFLOW\x10\x02\x12\x0f\n\x0bLAUNCH_PLAN\x10\x03\x12\x0b\n\x07\x44\x41TASET\x10\x04\x42\xb5\x01\n\x11\x63om.flyteidl.coreB\x0fIdentifierProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1e\x66lyteidl/core/identifier.proto\x12\rflyteidl.core\"\xc0\x01\n\nIdentifier\x12@\n\rresource_type\x18\x01 \x01(\x0e\x32\x1b.flyteidl.core.ResourceTypeR\x0cresourceType\x12\x18\n\x07project\x18\x02 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x03 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x18\n\x07version\x18\x05 \x01(\tR\x07version\x12\x10\n\x03org\x18\x06 \x01(\tR\x03org\"u\n\x1bWorkflowExecutionIdentifier\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x16\n\x06\x64omain\x18\x02 \x01(\tR\x06\x64omain\x12\x12\n\x04name\x18\x04 \x01(\tR\x04name\x12\x10\n\x03org\x18\x05 \x01(\tR\x03org\"\x81\x01\n\x17NodeExecutionIdentifier\x12\x17\n\x07node_id\x18\x01 \x01(\tR\x06nodeId\x12M\n\x0c\x65xecution_id\x18\x02 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\"\xc6\x01\n\x17TaskExecutionIdentifier\x12\x32\n\x07task_id\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x06taskId\x12R\n\x11node_execution_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierR\x0fnodeExecutionId\x12#\n\rretry_attempt\x18\x03 \x01(\rR\x0cretryAttempt\"~\n\x10SignalIdentifier\x12\x1b\n\tsignal_id\x18\x01 \x01(\tR\x08signalId\x12M\n\x0c\x65xecution_id\x18\x02 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId*U\n\x0cResourceType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x08\n\x04TASK\x10\x01\x12\x0c\n\x08WORKFLOW\x10\x02\x12\x0f\n\x0bLAUNCH_PLAN\x10\x03\x12\x0b\n\x07\x44\x41TASET\x10\x04\x42\xb5\x01\n\x11\x63om.flyteidl.coreB\x0fIdentifierProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -22,16 +22,16 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\021com.flyteidl.coreB\017IdentifierProtoP\001Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\242\002\003FCX\252\002\rFlyteidl.Core\312\002\rFlyteidl\\Core\342\002\031Flyteidl\\Core\\GPBMetadata\352\002\016Flyteidl::Core' - _globals['_RESOURCETYPE']._serialized_start=788 - _globals['_RESOURCETYPE']._serialized_end=873 + _globals['_RESOURCETYPE']._serialized_start=824 + _globals['_RESOURCETYPE']._serialized_end=909 _globals['_IDENTIFIER']._serialized_start=50 - _globals['_IDENTIFIER']._serialized_end=224 - _globals['_WORKFLOWEXECUTIONIDENTIFIER']._serialized_start=226 - _globals['_WORKFLOWEXECUTIONIDENTIFIER']._serialized_end=325 - _globals['_NODEEXECUTIONIDENTIFIER']._serialized_start=328 - _globals['_NODEEXECUTIONIDENTIFIER']._serialized_end=457 - _globals['_TASKEXECUTIONIDENTIFIER']._serialized_start=460 - _globals['_TASKEXECUTIONIDENTIFIER']._serialized_end=658 - _globals['_SIGNALIDENTIFIER']._serialized_start=660 - _globals['_SIGNALIDENTIFIER']._serialized_end=786 + _globals['_IDENTIFIER']._serialized_end=242 + _globals['_WORKFLOWEXECUTIONIDENTIFIER']._serialized_start=244 + _globals['_WORKFLOWEXECUTIONIDENTIFIER']._serialized_end=361 + _globals['_NODEEXECUTIONIDENTIFIER']._serialized_start=364 + _globals['_NODEEXECUTIONIDENTIFIER']._serialized_end=493 + _globals['_TASKEXECUTIONIDENTIFIER']._serialized_start=496 + _globals['_TASKEXECUTIONIDENTIFIER']._serialized_end=694 + _globals['_SIGNALIDENTIFIER']._serialized_start=696 + _globals['_SIGNALIDENTIFIER']._serialized_end=822 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.pyi index 7adea46343..5d3857195e 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/core/identifier_pb2.pyi @@ -19,28 +19,32 @@ LAUNCH_PLAN: ResourceType DATASET: ResourceType class Identifier(_message.Message): - __slots__ = ["resource_type", "project", "domain", "name", "version"] + __slots__ = ["resource_type", "project", "domain", "name", "version", "org"] RESOURCE_TYPE_FIELD_NUMBER: _ClassVar[int] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] resource_type: ResourceType project: str domain: str name: str version: str - def __init__(self, resource_type: _Optional[_Union[ResourceType, str]] = ..., project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ..., version: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, resource_type: _Optional[_Union[ResourceType, str]] = ..., project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ..., version: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class WorkflowExecutionIdentifier(_message.Message): - __slots__ = ["project", "domain", "name"] + __slots__ = ["project", "domain", "name", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str domain: str name: str - def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., domain: _Optional[str] = ..., name: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class NodeExecutionIdentifier(_message.Message): __slots__ = ["node_id", "execution_id"] diff --git a/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.py b/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.py index 1dc42f4b3c..4624badde3 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.py @@ -13,9 +13,10 @@ from flyteidl.core import identifier_pb2 as flyteidl_dot_core_dot_identifier__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/core/metrics.proto\x12\rflyteidl.core\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa3\x03\n\x04Span\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12M\n\x0bworkflow_id\x18\x03 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierH\x00R\nworkflowId\x12\x41\n\x07node_id\x18\x04 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierH\x00R\x06nodeId\x12\x41\n\x07task_id\x18\x05 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierH\x00R\x06taskId\x12#\n\x0coperation_id\x18\x06 \x01(\tH\x00R\x0boperationId\x12)\n\x05spans\x18\x07 \x03(\x0b\x32\x13.flyteidl.core.SpanR\x05spansB\x04\n\x02idB\xb2\x01\n\x11\x63om.flyteidl.coreB\x0cMetricsProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1b\x66lyteidl/core/metrics.proto\x12\rflyteidl.core\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\xa3\x03\n\x04Span\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\x12M\n\x0bworkflow_id\x18\x03 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierH\x00R\nworkflowId\x12\x41\n\x07node_id\x18\x04 \x01(\x0b\x32&.flyteidl.core.NodeExecutionIdentifierH\x00R\x06nodeId\x12\x41\n\x07task_id\x18\x05 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierH\x00R\x06taskId\x12#\n\x0coperation_id\x18\x06 \x01(\tH\x00R\x0boperationId\x12)\n\x05spans\x18\x07 \x03(\x0b\x32\x13.flyteidl.core.SpanR\x05spansB\x04\n\x02id\"\\\n\x15\x45xecutionMetricResult\x12\x16\n\x06metric\x18\x01 \x01(\tR\x06metric\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.google.protobuf.StructR\x04\x64\x61taB\xb2\x01\n\x11\x63om.flyteidl.coreB\x0cMetricsProtoP\x01Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\xa2\x02\x03\x46\x43X\xaa\x02\rFlyteidl.Core\xca\x02\rFlyteidl\\Core\xe2\x02\x19\x46lyteidl\\Core\\GPBMetadata\xea\x02\x0e\x46lyteidl::Coreb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,6 +25,8 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\021com.flyteidl.coreB\014MetricsProtoP\001Z:github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core\242\002\003FCX\252\002\rFlyteidl.Core\312\002\rFlyteidl\\Core\342\002\031Flyteidl\\Core\\GPBMetadata\352\002\016Flyteidl::Core' - _globals['_SPAN']._serialized_start=112 - _globals['_SPAN']._serialized_end=531 + _globals['_SPAN']._serialized_start=142 + _globals['_SPAN']._serialized_end=561 + _globals['_EXECUTIONMETRICRESULT']._serialized_start=563 + _globals['_EXECUTIONMETRICRESULT']._serialized_end=655 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.pyi index d529fd64e5..5e3c7d871e 100644 --- a/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/core/metrics_pb2.pyi @@ -1,5 +1,6 @@ from flyteidl.core import identifier_pb2 as _identifier_pb2 from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf import struct_pb2 as _struct_pb2 from google.protobuf.internal import containers as _containers from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -24,3 +25,11 @@ class Span(_message.Message): operation_id: str spans: _containers.RepeatedCompositeFieldContainer[Span] def __init__(self, start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., workflow_id: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., node_id: _Optional[_Union[_identifier_pb2.NodeExecutionIdentifier, _Mapping]] = ..., task_id: _Optional[_Union[_identifier_pb2.TaskExecutionIdentifier, _Mapping]] = ..., operation_id: _Optional[str] = ..., spans: _Optional[_Iterable[_Union[Span, _Mapping]]] = ...) -> None: ... + +class ExecutionMetricResult(_message.Message): + __slots__ = ["metric", "data"] + METRIC_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + metric: str + data: _struct_pb2.Struct + def __init__(self, metric: _Optional[str] = ..., data: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.py b/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.py index 1de3b8a96e..c5699b3c85 100644 --- a/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.py @@ -16,7 +16,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&flyteidl/datacatalog/datacatalog.proto\x12\x0b\x64\x61tacatalog\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"F\n\x14\x43reateDatasetRequest\x12.\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x14.datacatalog.DatasetR\x07\x64\x61taset\"\x17\n\x15\x43reateDatasetResponse\"E\n\x11GetDatasetRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\"D\n\x12GetDatasetResponse\x12.\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x14.datacatalog.DatasetR\x07\x64\x61taset\"\x96\x01\n\x12GetArtifactRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12!\n\x0b\x61rtifact_id\x18\x02 \x01(\tH\x00R\nartifactId\x12\x1b\n\x08tag_name\x18\x03 \x01(\tH\x00R\x07tagNameB\x0e\n\x0cquery_handle\"H\n\x13GetArtifactResponse\x12\x31\n\x08\x61rtifact\x18\x01 \x01(\x0b\x32\x15.datacatalog.ArtifactR\x08\x61rtifact\"J\n\x15\x43reateArtifactRequest\x12\x31\n\x08\x61rtifact\x18\x01 \x01(\x0b\x32\x15.datacatalog.ArtifactR\x08\x61rtifact\"\x18\n\x16\x43reateArtifactResponse\"3\n\rAddTagRequest\x12\"\n\x03tag\x18\x01 \x01(\x0b\x32\x10.datacatalog.TagR\x03tag\"\x10\n\x0e\x41\x64\x64TagResponse\"\xbf\x01\n\x14ListArtifactsRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12\x35\n\x06\x66ilter\x18\x02 \x01(\x0b\x32\x1d.datacatalog.FilterExpressionR\x06\x66ilter\x12>\n\npagination\x18\x03 \x01(\x0b\x32\x1e.datacatalog.PaginationOptionsR\npagination\"k\n\x15ListArtifactsResponse\x12\x33\n\tartifacts\x18\x01 \x03(\x0b\x32\x15.datacatalog.ArtifactR\tartifacts\x12\x1d\n\nnext_token\x18\x02 \x01(\tR\tnextToken\"\x8c\x01\n\x13ListDatasetsRequest\x12\x35\n\x06\x66ilter\x18\x01 \x01(\x0b\x32\x1d.datacatalog.FilterExpressionR\x06\x66ilter\x12>\n\npagination\x18\x02 \x01(\x0b\x32\x1e.datacatalog.PaginationOptionsR\npagination\"g\n\x14ListDatasetsResponse\x12\x30\n\x08\x64\x61tasets\x18\x01 \x03(\x0b\x32\x14.datacatalog.DatasetR\x08\x64\x61tasets\x12\x1d\n\nnext_token\x18\x02 \x01(\tR\tnextToken\"\xfb\x01\n\x15UpdateArtifactRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12!\n\x0b\x61rtifact_id\x18\x02 \x01(\tH\x00R\nartifactId\x12\x1b\n\x08tag_name\x18\x03 \x01(\tH\x00R\x07tagName\x12-\n\x04\x64\x61ta\x18\x04 \x03(\x0b\x32\x19.datacatalog.ArtifactDataR\x04\x64\x61ta\x12\x31\n\x08metadata\x18\x05 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadataB\x0e\n\x0cquery_handle\"9\n\x16UpdateArtifactResponse\x12\x1f\n\x0b\x61rtifact_id\x18\x01 \x01(\tR\nartifactId\"a\n\rReservationID\x12\x35\n\ndataset_id\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\tdatasetId\x12\x19\n\x08tag_name\x18\x02 \x01(\tR\x07tagName\"\xc7\x01\n\x1dGetOrExtendReservationRequest\x12\x41\n\x0ereservation_id\x18\x01 \x01(\x0b\x32\x1a.datacatalog.ReservationIDR\rreservationId\x12\x19\n\x08owner_id\x18\x02 \x01(\tR\x07ownerId\x12H\n\x12heartbeat_interval\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationR\x11heartbeatInterval\"\xa3\x02\n\x0bReservation\x12\x41\n\x0ereservation_id\x18\x01 \x01(\x0b\x32\x1a.datacatalog.ReservationIDR\rreservationId\x12\x19\n\x08owner_id\x18\x02 \x01(\tR\x07ownerId\x12H\n\x12heartbeat_interval\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationR\x11heartbeatInterval\x12\x39\n\nexpires_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\texpiresAt\x12\x31\n\x08metadata\x18\x06 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadata\"\\\n\x1eGetOrExtendReservationResponse\x12:\n\x0breservation\x18\x01 \x01(\x0b\x32\x18.datacatalog.ReservationR\x0breservation\"y\n\x19ReleaseReservationRequest\x12\x41\n\x0ereservation_id\x18\x01 \x01(\x0b\x32\x1a.datacatalog.ReservationIDR\rreservationId\x12\x19\n\x08owner_id\x18\x02 \x01(\tR\x07ownerId\"\x1c\n\x1aReleaseReservationResponse\"\x8a\x01\n\x07\x44\x61taset\x12&\n\x02id\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x02id\x12\x31\n\x08metadata\x18\x02 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadata\x12$\n\rpartitionKeys\x18\x03 \x03(\tR\rpartitionKeys\"3\n\tPartition\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"\x7f\n\tDatasetID\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x16\n\x06\x64omain\x18\x03 \x01(\tR\x06\x64omain\x12\x18\n\x07version\x18\x04 \x01(\tR\x07version\x12\x12\n\x04UUID\x18\x05 \x01(\tR\x04UUID\"\xc7\x02\n\x08\x41rtifact\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x30\n\x07\x64\x61taset\x18\x02 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12-\n\x04\x64\x61ta\x18\x03 \x03(\x0b\x32\x19.datacatalog.ArtifactDataR\x04\x64\x61ta\x12\x31\n\x08metadata\x18\x04 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadata\x12\x36\n\npartitions\x18\x05 \x03(\x0b\x32\x16.datacatalog.PartitionR\npartitions\x12$\n\x04tags\x18\x06 \x03(\x0b\x32\x10.datacatalog.TagR\x04tags\x12\x39\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\"P\n\x0c\x41rtifactData\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.flyteidl.core.LiteralR\x05value\"l\n\x03Tag\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x0b\x61rtifact_id\x18\x02 \x01(\tR\nartifactId\x12\x30\n\x07\x64\x61taset\x18\x03 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\"\x81\x01\n\x08Metadata\x12:\n\x07key_map\x18\x01 \x03(\x0b\x32!.datacatalog.Metadata.KeyMapEntryR\x06keyMap\x1a\x39\n\x0bKeyMapEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"O\n\x10\x46ilterExpression\x12;\n\x07\x66ilters\x18\x01 \x03(\x0b\x32!.datacatalog.SinglePropertyFilterR\x07\x66ilters\"\xce\x03\n\x14SinglePropertyFilter\x12?\n\ntag_filter\x18\x01 \x01(\x0b\x32\x1e.datacatalog.TagPropertyFilterH\x00R\ttagFilter\x12Q\n\x10partition_filter\x18\x02 \x01(\x0b\x32$.datacatalog.PartitionPropertyFilterH\x00R\x0fpartitionFilter\x12N\n\x0f\x61rtifact_filter\x18\x03 \x01(\x0b\x32#.datacatalog.ArtifactPropertyFilterH\x00R\x0e\x61rtifactFilter\x12K\n\x0e\x64\x61taset_filter\x18\x04 \x01(\x0b\x32\".datacatalog.DatasetPropertyFilterH\x00R\rdatasetFilter\x12P\n\x08operator\x18\n \x01(\x0e\x32\x34.datacatalog.SinglePropertyFilter.ComparisonOperatorR\x08operator\" \n\x12\x43omparisonOperator\x12\n\n\x06\x45QUALS\x10\x00\x42\x11\n\x0fproperty_filter\"G\n\x16\x41rtifactPropertyFilter\x12!\n\x0b\x61rtifact_id\x18\x01 \x01(\tH\x00R\nartifactIdB\n\n\x08property\"<\n\x11TagPropertyFilter\x12\x1b\n\x08tag_name\x18\x01 \x01(\tH\x00R\x07tagNameB\n\n\x08property\"[\n\x17PartitionPropertyFilter\x12\x34\n\x07key_val\x18\x01 \x01(\x0b\x32\x19.datacatalog.KeyValuePairH\x00R\x06keyValB\n\n\x08property\"6\n\x0cKeyValuePair\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"\x8b\x01\n\x15\x44\x61tasetPropertyFilter\x12\x1a\n\x07project\x18\x01 \x01(\tH\x00R\x07project\x12\x14\n\x04name\x18\x02 \x01(\tH\x00R\x04name\x12\x18\n\x06\x64omain\x18\x03 \x01(\tH\x00R\x06\x64omain\x12\x1a\n\x07version\x18\x04 \x01(\tH\x00R\x07versionB\n\n\x08property\"\x93\x02\n\x11PaginationOptions\x12\x14\n\x05limit\x18\x01 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\x12@\n\x07sortKey\x18\x03 \x01(\x0e\x32&.datacatalog.PaginationOptions.SortKeyR\x07sortKey\x12\x46\n\tsortOrder\x18\x04 \x01(\x0e\x32(.datacatalog.PaginationOptions.SortOrderR\tsortOrder\"*\n\tSortOrder\x12\x0e\n\nDESCENDING\x10\x00\x12\r\n\tASCENDING\x10\x01\"\x1c\n\x07SortKey\x12\x11\n\rCREATION_TIME\x10\x00\x32\x86\x07\n\x0b\x44\x61taCatalog\x12V\n\rCreateDataset\x12!.datacatalog.CreateDatasetRequest\x1a\".datacatalog.CreateDatasetResponse\x12M\n\nGetDataset\x12\x1e.datacatalog.GetDatasetRequest\x1a\x1f.datacatalog.GetDatasetResponse\x12Y\n\x0e\x43reateArtifact\x12\".datacatalog.CreateArtifactRequest\x1a#.datacatalog.CreateArtifactResponse\x12P\n\x0bGetArtifact\x12\x1f.datacatalog.GetArtifactRequest\x1a .datacatalog.GetArtifactResponse\x12\x41\n\x06\x41\x64\x64Tag\x12\x1a.datacatalog.AddTagRequest\x1a\x1b.datacatalog.AddTagResponse\x12V\n\rListArtifacts\x12!.datacatalog.ListArtifactsRequest\x1a\".datacatalog.ListArtifactsResponse\x12S\n\x0cListDatasets\x12 .datacatalog.ListDatasetsRequest\x1a!.datacatalog.ListDatasetsResponse\x12Y\n\x0eUpdateArtifact\x12\".datacatalog.UpdateArtifactRequest\x1a#.datacatalog.UpdateArtifactResponse\x12q\n\x16GetOrExtendReservation\x12*.datacatalog.GetOrExtendReservationRequest\x1a+.datacatalog.GetOrExtendReservationResponse\x12\x65\n\x12ReleaseReservation\x12&.datacatalog.ReleaseReservationRequest\x1a\'.datacatalog.ReleaseReservationResponseB\xb2\x01\n\x0f\x63om.datacatalogB\x10\x44\x61tacatalogProtoP\x01ZAgithub.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/datacatalog\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tacatalog\xca\x02\x0b\x44\x61tacatalog\xe2\x02\x17\x44\x61tacatalog\\GPBMetadata\xea\x02\x0b\x44\x61tacatalogb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&flyteidl/datacatalog/datacatalog.proto\x12\x0b\x64\x61tacatalog\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"F\n\x14\x43reateDatasetRequest\x12.\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x14.datacatalog.DatasetR\x07\x64\x61taset\"\x17\n\x15\x43reateDatasetResponse\"E\n\x11GetDatasetRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\"D\n\x12GetDatasetResponse\x12.\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x14.datacatalog.DatasetR\x07\x64\x61taset\"\x96\x01\n\x12GetArtifactRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12!\n\x0b\x61rtifact_id\x18\x02 \x01(\tH\x00R\nartifactId\x12\x1b\n\x08tag_name\x18\x03 \x01(\tH\x00R\x07tagNameB\x0e\n\x0cquery_handle\"H\n\x13GetArtifactResponse\x12\x31\n\x08\x61rtifact\x18\x01 \x01(\x0b\x32\x15.datacatalog.ArtifactR\x08\x61rtifact\"J\n\x15\x43reateArtifactRequest\x12\x31\n\x08\x61rtifact\x18\x01 \x01(\x0b\x32\x15.datacatalog.ArtifactR\x08\x61rtifact\"\x18\n\x16\x43reateArtifactResponse\"3\n\rAddTagRequest\x12\"\n\x03tag\x18\x01 \x01(\x0b\x32\x10.datacatalog.TagR\x03tag\"\x10\n\x0e\x41\x64\x64TagResponse\"\xbf\x01\n\x14ListArtifactsRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12\x35\n\x06\x66ilter\x18\x02 \x01(\x0b\x32\x1d.datacatalog.FilterExpressionR\x06\x66ilter\x12>\n\npagination\x18\x03 \x01(\x0b\x32\x1e.datacatalog.PaginationOptionsR\npagination\"k\n\x15ListArtifactsResponse\x12\x33\n\tartifacts\x18\x01 \x03(\x0b\x32\x15.datacatalog.ArtifactR\tartifacts\x12\x1d\n\nnext_token\x18\x02 \x01(\tR\tnextToken\"\x8c\x01\n\x13ListDatasetsRequest\x12\x35\n\x06\x66ilter\x18\x01 \x01(\x0b\x32\x1d.datacatalog.FilterExpressionR\x06\x66ilter\x12>\n\npagination\x18\x02 \x01(\x0b\x32\x1e.datacatalog.PaginationOptionsR\npagination\"g\n\x14ListDatasetsResponse\x12\x30\n\x08\x64\x61tasets\x18\x01 \x03(\x0b\x32\x14.datacatalog.DatasetR\x08\x64\x61tasets\x12\x1d\n\nnext_token\x18\x02 \x01(\tR\tnextToken\"\xfb\x01\n\x15UpdateArtifactRequest\x12\x30\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12!\n\x0b\x61rtifact_id\x18\x02 \x01(\tH\x00R\nartifactId\x12\x1b\n\x08tag_name\x18\x03 \x01(\tH\x00R\x07tagName\x12-\n\x04\x64\x61ta\x18\x04 \x03(\x0b\x32\x19.datacatalog.ArtifactDataR\x04\x64\x61ta\x12\x31\n\x08metadata\x18\x05 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadataB\x0e\n\x0cquery_handle\"9\n\x16UpdateArtifactResponse\x12\x1f\n\x0b\x61rtifact_id\x18\x01 \x01(\tR\nartifactId\"a\n\rReservationID\x12\x35\n\ndataset_id\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\tdatasetId\x12\x19\n\x08tag_name\x18\x02 \x01(\tR\x07tagName\"\xc7\x01\n\x1dGetOrExtendReservationRequest\x12\x41\n\x0ereservation_id\x18\x01 \x01(\x0b\x32\x1a.datacatalog.ReservationIDR\rreservationId\x12\x19\n\x08owner_id\x18\x02 \x01(\tR\x07ownerId\x12H\n\x12heartbeat_interval\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationR\x11heartbeatInterval\"\xa3\x02\n\x0bReservation\x12\x41\n\x0ereservation_id\x18\x01 \x01(\x0b\x32\x1a.datacatalog.ReservationIDR\rreservationId\x12\x19\n\x08owner_id\x18\x02 \x01(\tR\x07ownerId\x12H\n\x12heartbeat_interval\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationR\x11heartbeatInterval\x12\x39\n\nexpires_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\texpiresAt\x12\x31\n\x08metadata\x18\x06 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadata\"\\\n\x1eGetOrExtendReservationResponse\x12:\n\x0breservation\x18\x01 \x01(\x0b\x32\x18.datacatalog.ReservationR\x0breservation\"y\n\x19ReleaseReservationRequest\x12\x41\n\x0ereservation_id\x18\x01 \x01(\x0b\x32\x1a.datacatalog.ReservationIDR\rreservationId\x12\x19\n\x08owner_id\x18\x02 \x01(\tR\x07ownerId\"\x1c\n\x1aReleaseReservationResponse\"\x8a\x01\n\x07\x44\x61taset\x12&\n\x02id\x18\x01 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x02id\x12\x31\n\x08metadata\x18\x02 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadata\x12$\n\rpartitionKeys\x18\x03 \x03(\tR\rpartitionKeys\"3\n\tPartition\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"\x91\x01\n\tDatasetID\x12\x18\n\x07project\x18\x01 \x01(\tR\x07project\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x16\n\x06\x64omain\x18\x03 \x01(\tR\x06\x64omain\x12\x18\n\x07version\x18\x04 \x01(\tR\x07version\x12\x12\n\x04UUID\x18\x05 \x01(\tR\x04UUID\x12\x10\n\x03org\x18\x06 \x01(\tR\x03org\"\xc7\x02\n\x08\x41rtifact\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x30\n\x07\x64\x61taset\x18\x02 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\x12-\n\x04\x64\x61ta\x18\x03 \x03(\x0b\x32\x19.datacatalog.ArtifactDataR\x04\x64\x61ta\x12\x31\n\x08metadata\x18\x04 \x01(\x0b\x32\x15.datacatalog.MetadataR\x08metadata\x12\x36\n\npartitions\x18\x05 \x03(\x0b\x32\x16.datacatalog.PartitionR\npartitions\x12$\n\x04tags\x18\x06 \x03(\x0b\x32\x10.datacatalog.TagR\x04tags\x12\x39\n\ncreated_at\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tcreatedAt\"P\n\x0c\x41rtifactData\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.flyteidl.core.LiteralR\x05value\"l\n\x03Tag\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x0b\x61rtifact_id\x18\x02 \x01(\tR\nartifactId\x12\x30\n\x07\x64\x61taset\x18\x03 \x01(\x0b\x32\x16.datacatalog.DatasetIDR\x07\x64\x61taset\"\x81\x01\n\x08Metadata\x12:\n\x07key_map\x18\x01 \x03(\x0b\x32!.datacatalog.Metadata.KeyMapEntryR\x06keyMap\x1a\x39\n\x0bKeyMapEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"O\n\x10\x46ilterExpression\x12;\n\x07\x66ilters\x18\x01 \x03(\x0b\x32!.datacatalog.SinglePropertyFilterR\x07\x66ilters\"\xce\x03\n\x14SinglePropertyFilter\x12?\n\ntag_filter\x18\x01 \x01(\x0b\x32\x1e.datacatalog.TagPropertyFilterH\x00R\ttagFilter\x12Q\n\x10partition_filter\x18\x02 \x01(\x0b\x32$.datacatalog.PartitionPropertyFilterH\x00R\x0fpartitionFilter\x12N\n\x0f\x61rtifact_filter\x18\x03 \x01(\x0b\x32#.datacatalog.ArtifactPropertyFilterH\x00R\x0e\x61rtifactFilter\x12K\n\x0e\x64\x61taset_filter\x18\x04 \x01(\x0b\x32\".datacatalog.DatasetPropertyFilterH\x00R\rdatasetFilter\x12P\n\x08operator\x18\n \x01(\x0e\x32\x34.datacatalog.SinglePropertyFilter.ComparisonOperatorR\x08operator\" \n\x12\x43omparisonOperator\x12\n\n\x06\x45QUALS\x10\x00\x42\x11\n\x0fproperty_filter\"G\n\x16\x41rtifactPropertyFilter\x12!\n\x0b\x61rtifact_id\x18\x01 \x01(\tH\x00R\nartifactIdB\n\n\x08property\"<\n\x11TagPropertyFilter\x12\x1b\n\x08tag_name\x18\x01 \x01(\tH\x00R\x07tagNameB\n\n\x08property\"[\n\x17PartitionPropertyFilter\x12\x34\n\x07key_val\x18\x01 \x01(\x0b\x32\x19.datacatalog.KeyValuePairH\x00R\x06keyValB\n\n\x08property\"6\n\x0cKeyValuePair\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value\"\x9f\x01\n\x15\x44\x61tasetPropertyFilter\x12\x1a\n\x07project\x18\x01 \x01(\tH\x00R\x07project\x12\x14\n\x04name\x18\x02 \x01(\tH\x00R\x04name\x12\x18\n\x06\x64omain\x18\x03 \x01(\tH\x00R\x06\x64omain\x12\x1a\n\x07version\x18\x04 \x01(\tH\x00R\x07version\x12\x12\n\x03org\x18\x05 \x01(\tH\x00R\x03orgB\n\n\x08property\"\x93\x02\n\x11PaginationOptions\x12\x14\n\x05limit\x18\x01 \x01(\rR\x05limit\x12\x14\n\x05token\x18\x02 \x01(\tR\x05token\x12@\n\x07sortKey\x18\x03 \x01(\x0e\x32&.datacatalog.PaginationOptions.SortKeyR\x07sortKey\x12\x46\n\tsortOrder\x18\x04 \x01(\x0e\x32(.datacatalog.PaginationOptions.SortOrderR\tsortOrder\"*\n\tSortOrder\x12\x0e\n\nDESCENDING\x10\x00\x12\r\n\tASCENDING\x10\x01\"\x1c\n\x07SortKey\x12\x11\n\rCREATION_TIME\x10\x00\x32\x86\x07\n\x0b\x44\x61taCatalog\x12V\n\rCreateDataset\x12!.datacatalog.CreateDatasetRequest\x1a\".datacatalog.CreateDatasetResponse\x12M\n\nGetDataset\x12\x1e.datacatalog.GetDatasetRequest\x1a\x1f.datacatalog.GetDatasetResponse\x12Y\n\x0e\x43reateArtifact\x12\".datacatalog.CreateArtifactRequest\x1a#.datacatalog.CreateArtifactResponse\x12P\n\x0bGetArtifact\x12\x1f.datacatalog.GetArtifactRequest\x1a .datacatalog.GetArtifactResponse\x12\x41\n\x06\x41\x64\x64Tag\x12\x1a.datacatalog.AddTagRequest\x1a\x1b.datacatalog.AddTagResponse\x12V\n\rListArtifacts\x12!.datacatalog.ListArtifactsRequest\x1a\".datacatalog.ListArtifactsResponse\x12S\n\x0cListDatasets\x12 .datacatalog.ListDatasetsRequest\x1a!.datacatalog.ListDatasetsResponse\x12Y\n\x0eUpdateArtifact\x12\".datacatalog.UpdateArtifactRequest\x1a#.datacatalog.UpdateArtifactResponse\x12q\n\x16GetOrExtendReservation\x12*.datacatalog.GetOrExtendReservationRequest\x1a+.datacatalog.GetOrExtendReservationResponse\x12\x65\n\x12ReleaseReservation\x12&.datacatalog.ReleaseReservationRequest\x1a\'.datacatalog.ReleaseReservationResponseB\xb2\x01\n\x0f\x63om.datacatalogB\x10\x44\x61tacatalogProtoP\x01ZAgithub.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/datacatalog\xa2\x02\x03\x44XX\xaa\x02\x0b\x44\x61tacatalog\xca\x02\x0b\x44\x61tacatalog\xe2\x02\x17\x44\x61tacatalog\\GPBMetadata\xea\x02\x0b\x44\x61tacatalogb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -75,40 +75,40 @@ _globals['_DATASET']._serialized_end=2633 _globals['_PARTITION']._serialized_start=2635 _globals['_PARTITION']._serialized_end=2686 - _globals['_DATASETID']._serialized_start=2688 - _globals['_DATASETID']._serialized_end=2815 - _globals['_ARTIFACT']._serialized_start=2818 - _globals['_ARTIFACT']._serialized_end=3145 - _globals['_ARTIFACTDATA']._serialized_start=3147 - _globals['_ARTIFACTDATA']._serialized_end=3227 - _globals['_TAG']._serialized_start=3229 - _globals['_TAG']._serialized_end=3337 - _globals['_METADATA']._serialized_start=3340 - _globals['_METADATA']._serialized_end=3469 - _globals['_METADATA_KEYMAPENTRY']._serialized_start=3412 - _globals['_METADATA_KEYMAPENTRY']._serialized_end=3469 - _globals['_FILTEREXPRESSION']._serialized_start=3471 - _globals['_FILTEREXPRESSION']._serialized_end=3550 - _globals['_SINGLEPROPERTYFILTER']._serialized_start=3553 - _globals['_SINGLEPROPERTYFILTER']._serialized_end=4015 - _globals['_SINGLEPROPERTYFILTER_COMPARISONOPERATOR']._serialized_start=3964 - _globals['_SINGLEPROPERTYFILTER_COMPARISONOPERATOR']._serialized_end=3996 - _globals['_ARTIFACTPROPERTYFILTER']._serialized_start=4017 - _globals['_ARTIFACTPROPERTYFILTER']._serialized_end=4088 - _globals['_TAGPROPERTYFILTER']._serialized_start=4090 - _globals['_TAGPROPERTYFILTER']._serialized_end=4150 - _globals['_PARTITIONPROPERTYFILTER']._serialized_start=4152 - _globals['_PARTITIONPROPERTYFILTER']._serialized_end=4243 - _globals['_KEYVALUEPAIR']._serialized_start=4245 - _globals['_KEYVALUEPAIR']._serialized_end=4299 - _globals['_DATASETPROPERTYFILTER']._serialized_start=4302 - _globals['_DATASETPROPERTYFILTER']._serialized_end=4441 - _globals['_PAGINATIONOPTIONS']._serialized_start=4444 - _globals['_PAGINATIONOPTIONS']._serialized_end=4719 - _globals['_PAGINATIONOPTIONS_SORTORDER']._serialized_start=4647 - _globals['_PAGINATIONOPTIONS_SORTORDER']._serialized_end=4689 - _globals['_PAGINATIONOPTIONS_SORTKEY']._serialized_start=4691 - _globals['_PAGINATIONOPTIONS_SORTKEY']._serialized_end=4719 - _globals['_DATACATALOG']._serialized_start=4722 - _globals['_DATACATALOG']._serialized_end=5624 + _globals['_DATASETID']._serialized_start=2689 + _globals['_DATASETID']._serialized_end=2834 + _globals['_ARTIFACT']._serialized_start=2837 + _globals['_ARTIFACT']._serialized_end=3164 + _globals['_ARTIFACTDATA']._serialized_start=3166 + _globals['_ARTIFACTDATA']._serialized_end=3246 + _globals['_TAG']._serialized_start=3248 + _globals['_TAG']._serialized_end=3356 + _globals['_METADATA']._serialized_start=3359 + _globals['_METADATA']._serialized_end=3488 + _globals['_METADATA_KEYMAPENTRY']._serialized_start=3431 + _globals['_METADATA_KEYMAPENTRY']._serialized_end=3488 + _globals['_FILTEREXPRESSION']._serialized_start=3490 + _globals['_FILTEREXPRESSION']._serialized_end=3569 + _globals['_SINGLEPROPERTYFILTER']._serialized_start=3572 + _globals['_SINGLEPROPERTYFILTER']._serialized_end=4034 + _globals['_SINGLEPROPERTYFILTER_COMPARISONOPERATOR']._serialized_start=3983 + _globals['_SINGLEPROPERTYFILTER_COMPARISONOPERATOR']._serialized_end=4015 + _globals['_ARTIFACTPROPERTYFILTER']._serialized_start=4036 + _globals['_ARTIFACTPROPERTYFILTER']._serialized_end=4107 + _globals['_TAGPROPERTYFILTER']._serialized_start=4109 + _globals['_TAGPROPERTYFILTER']._serialized_end=4169 + _globals['_PARTITIONPROPERTYFILTER']._serialized_start=4171 + _globals['_PARTITIONPROPERTYFILTER']._serialized_end=4262 + _globals['_KEYVALUEPAIR']._serialized_start=4264 + _globals['_KEYVALUEPAIR']._serialized_end=4318 + _globals['_DATASETPROPERTYFILTER']._serialized_start=4321 + _globals['_DATASETPROPERTYFILTER']._serialized_end=4480 + _globals['_PAGINATIONOPTIONS']._serialized_start=4483 + _globals['_PAGINATIONOPTIONS']._serialized_end=4758 + _globals['_PAGINATIONOPTIONS_SORTORDER']._serialized_start=4686 + _globals['_PAGINATIONOPTIONS_SORTORDER']._serialized_end=4728 + _globals['_PAGINATIONOPTIONS_SORTKEY']._serialized_start=4730 + _globals['_PAGINATIONOPTIONS_SORTKEY']._serialized_end=4758 + _globals['_DATACATALOG']._serialized_start=4761 + _globals['_DATACATALOG']._serialized_end=5663 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.pyi index 94cd96797b..22dd8edbfe 100644 --- a/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/datacatalog/datacatalog_pb2.pyi @@ -190,18 +190,20 @@ class Partition(_message.Message): def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... class DatasetID(_message.Message): - __slots__ = ["project", "name", "domain", "version", "UUID"] + __slots__ = ["project", "name", "domain", "version", "UUID", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] UUID_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str name: str domain: str version: str UUID: str - def __init__(self, project: _Optional[str] = ..., name: _Optional[str] = ..., domain: _Optional[str] = ..., version: _Optional[str] = ..., UUID: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., name: _Optional[str] = ..., domain: _Optional[str] = ..., version: _Optional[str] = ..., UUID: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class Artifact(_message.Message): __slots__ = ["id", "dataset", "data", "metadata", "partitions", "tags", "created_at"] @@ -303,16 +305,18 @@ class KeyValuePair(_message.Message): def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... class DatasetPropertyFilter(_message.Message): - __slots__ = ["project", "name", "domain", "version"] + __slots__ = ["project", "name", "domain", "version", "org"] PROJECT_FIELD_NUMBER: _ClassVar[int] NAME_FIELD_NUMBER: _ClassVar[int] DOMAIN_FIELD_NUMBER: _ClassVar[int] VERSION_FIELD_NUMBER: _ClassVar[int] + ORG_FIELD_NUMBER: _ClassVar[int] project: str name: str domain: str version: str - def __init__(self, project: _Optional[str] = ..., name: _Optional[str] = ..., domain: _Optional[str] = ..., version: _Optional[str] = ...) -> None: ... + org: str + def __init__(self, project: _Optional[str] = ..., name: _Optional[str] = ..., domain: _Optional[str] = ..., version: _Optional[str] = ..., org: _Optional[str] = ...) -> None: ... class PaginationOptions(_message.Message): __slots__ = ["limit", "token", "sortKey", "sortOrder"] diff --git a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py index b7035b32b5..7addfe281f 100644 --- a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.py @@ -19,7 +19,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n flyteidl/event/cloudevents.proto\x12\x0e\x66lyteidl.event\x1a\x1a\x66lyteidl/event/event.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x9c\x04\n\x1b\x43loudEventWorkflowExecution\x12\x43\n\traw_event\x18\x01 \x01(\x0b\x32&.flyteidl.event.WorkflowExecutionEventR\x08rawEvent\x12:\n\x0boutput_data\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\noutputData\x12H\n\x10output_interface\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12\x38\n\ninput_data\x18\x04 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\tinputData\x12<\n\x0c\x61rtifact_ids\x18\x05 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12[\n\x13reference_execution\x18\x06 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12\x1c\n\tprincipal\x18\x07 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x08 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\"\x81\x04\n\x17\x43loudEventNodeExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.NodeExecutionEventR\x08rawEvent\x12H\n\x0ctask_exec_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\ntaskExecId\x12:\n\x0boutput_data\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\noutputData\x12H\n\x10output_interface\x18\x04 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12\x38\n\ninput_data\x18\x05 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\tinputData\x12<\n\x0c\x61rtifact_ids\x18\x06 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12\x1c\n\tprincipal\x18\x07 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x08 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\"Z\n\x17\x43loudEventTaskExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.TaskExecutionEventR\x08rawEvent\"\xe7\x02\n\x18\x43loudEventExecutionStart\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\x12?\n\x0elaunch_plan_id\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\x12:\n\x0bworkflow_id\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12<\n\x0c\x61rtifact_ids\x18\x04 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12#\n\rartifact_keys\x18\x05 \x03(\tR\x0c\x61rtifactKeys\x12\x1c\n\tprincipal\x18\x06 \x01(\tR\tprincipalB\xbc\x01\n\x12\x63om.flyteidl.eventB\x10\x43loudeventsProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\xa2\x02\x03\x46\x45X\xaa\x02\x0e\x46lyteidl.Event\xca\x02\x0e\x46lyteidl\\Event\xe2\x02\x1a\x46lyteidl\\Event\\GPBMetadata\xea\x02\x0f\x46lyteidl::Eventb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n flyteidl/event/cloudevents.proto\x12\x0e\x66lyteidl.event\x1a\x1a\x66lyteidl/event/event.proto\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x1d\x66lyteidl/core/interface.proto\x1a\x1f\x66lyteidl/core/artifact_id.proto\x1a\x1e\x66lyteidl/core/identifier.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa6\x03\n\x1b\x43loudEventWorkflowExecution\x12\x43\n\traw_event\x18\x01 \x01(\x0b\x32&.flyteidl.event.WorkflowExecutionEventR\x08rawEvent\x12H\n\x10output_interface\x18\x02 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12<\n\x0c\x61rtifact_ids\x18\x03 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12[\n\x13reference_execution\x18\x04 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x12referenceExecution\x12\x1c\n\tprincipal\x18\x05 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x06 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\"\x8b\x03\n\x17\x43loudEventNodeExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.NodeExecutionEventR\x08rawEvent\x12H\n\x0ctask_exec_id\x18\x02 \x01(\x0b\x32&.flyteidl.core.TaskExecutionIdentifierR\ntaskExecId\x12H\n\x10output_interface\x18\x03 \x01(\x0b\x32\x1d.flyteidl.core.TypedInterfaceR\x0foutputInterface\x12<\n\x0c\x61rtifact_ids\x18\x04 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12\x1c\n\tprincipal\x18\x05 \x01(\tR\tprincipal\x12?\n\x0elaunch_plan_id\x18\x06 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\"Z\n\x17\x43loudEventTaskExecution\x12?\n\traw_event\x18\x01 \x01(\x0b\x32\".flyteidl.event.TaskExecutionEventR\x08rawEvent\"\xef\x02\n\x18\x43loudEventExecutionStart\x12M\n\x0c\x65xecution_id\x18\x01 \x01(\x0b\x32*.flyteidl.core.WorkflowExecutionIdentifierR\x0b\x65xecutionId\x12?\n\x0elaunch_plan_id\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\x0claunchPlanId\x12:\n\x0bworkflow_id\x18\x03 \x01(\x0b\x32\x19.flyteidl.core.IdentifierR\nworkflowId\x12<\n\x0c\x61rtifact_ids\x18\x04 \x03(\x0b\x32\x19.flyteidl.core.ArtifactIDR\x0b\x61rtifactIds\x12+\n\x11\x61rtifact_trackers\x18\x05 \x03(\tR\x10\x61rtifactTrackers\x12\x1c\n\tprincipal\x18\x06 \x01(\tR\tprincipalB\xbc\x01\n\x12\x63om.flyteidl.eventB\x10\x43loudeventsProtoP\x01Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\xa2\x02\x03\x46\x45X\xaa\x02\x0e\x46lyteidl.Event\xca\x02\x0e\x46lyteidl\\Event\xe2\x02\x1a\x46lyteidl\\Event\\GPBMetadata\xea\x02\x0f\x46lyteidl::Eventb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -29,11 +29,11 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\022com.flyteidl.eventB\020CloudeventsProtoP\001Z;github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event\242\002\003FEX\252\002\016Flyteidl.Event\312\002\016Flyteidl\\Event\342\002\032Flyteidl\\Event\\GPBMetadata\352\002\017Flyteidl::Event' _globals['_CLOUDEVENTWORKFLOWEXECUTION']._serialized_start=240 - _globals['_CLOUDEVENTWORKFLOWEXECUTION']._serialized_end=780 - _globals['_CLOUDEVENTNODEEXECUTION']._serialized_start=783 - _globals['_CLOUDEVENTNODEEXECUTION']._serialized_end=1296 - _globals['_CLOUDEVENTTASKEXECUTION']._serialized_start=1298 - _globals['_CLOUDEVENTTASKEXECUTION']._serialized_end=1388 - _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_start=1391 - _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_end=1750 + _globals['_CLOUDEVENTWORKFLOWEXECUTION']._serialized_end=662 + _globals['_CLOUDEVENTNODEEXECUTION']._serialized_start=665 + _globals['_CLOUDEVENTNODEEXECUTION']._serialized_end=1060 + _globals['_CLOUDEVENTTASKEXECUTION']._serialized_start=1062 + _globals['_CLOUDEVENTTASKEXECUTION']._serialized_end=1152 + _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_start=1155 + _globals['_CLOUDEVENTEXECUTIONSTART']._serialized_end=1522 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi index 8444627c32..b79750c9ca 100644 --- a/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/event/cloudevents_pb2.pyi @@ -12,44 +12,36 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor class CloudEventWorkflowExecution(_message.Message): - __slots__ = ["raw_event", "output_data", "output_interface", "input_data", "artifact_ids", "reference_execution", "principal", "launch_plan_id"] + __slots__ = ["raw_event", "output_interface", "artifact_ids", "reference_execution", "principal", "launch_plan_id"] RAW_EVENT_FIELD_NUMBER: _ClassVar[int] - OUTPUT_DATA_FIELD_NUMBER: _ClassVar[int] OUTPUT_INTERFACE_FIELD_NUMBER: _ClassVar[int] - INPUT_DATA_FIELD_NUMBER: _ClassVar[int] ARTIFACT_IDS_FIELD_NUMBER: _ClassVar[int] REFERENCE_EXECUTION_FIELD_NUMBER: _ClassVar[int] PRINCIPAL_FIELD_NUMBER: _ClassVar[int] LAUNCH_PLAN_ID_FIELD_NUMBER: _ClassVar[int] raw_event: _event_pb2.WorkflowExecutionEvent - output_data: _literals_pb2.LiteralMap output_interface: _interface_pb2.TypedInterface - input_data: _literals_pb2.LiteralMap artifact_ids: _containers.RepeatedCompositeFieldContainer[_artifact_id_pb2.ArtifactID] reference_execution: _identifier_pb2.WorkflowExecutionIdentifier principal: str launch_plan_id: _identifier_pb2.Identifier - def __init__(self, raw_event: _Optional[_Union[_event_pb2.WorkflowExecutionEvent, _Mapping]] = ..., output_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., input_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., reference_execution: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ...) -> None: ... + def __init__(self, raw_event: _Optional[_Union[_event_pb2.WorkflowExecutionEvent, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., reference_execution: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ...) -> None: ... class CloudEventNodeExecution(_message.Message): - __slots__ = ["raw_event", "task_exec_id", "output_data", "output_interface", "input_data", "artifact_ids", "principal", "launch_plan_id"] + __slots__ = ["raw_event", "task_exec_id", "output_interface", "artifact_ids", "principal", "launch_plan_id"] RAW_EVENT_FIELD_NUMBER: _ClassVar[int] TASK_EXEC_ID_FIELD_NUMBER: _ClassVar[int] - OUTPUT_DATA_FIELD_NUMBER: _ClassVar[int] OUTPUT_INTERFACE_FIELD_NUMBER: _ClassVar[int] - INPUT_DATA_FIELD_NUMBER: _ClassVar[int] ARTIFACT_IDS_FIELD_NUMBER: _ClassVar[int] PRINCIPAL_FIELD_NUMBER: _ClassVar[int] LAUNCH_PLAN_ID_FIELD_NUMBER: _ClassVar[int] raw_event: _event_pb2.NodeExecutionEvent task_exec_id: _identifier_pb2.TaskExecutionIdentifier - output_data: _literals_pb2.LiteralMap output_interface: _interface_pb2.TypedInterface - input_data: _literals_pb2.LiteralMap artifact_ids: _containers.RepeatedCompositeFieldContainer[_artifact_id_pb2.ArtifactID] principal: str launch_plan_id: _identifier_pb2.Identifier - def __init__(self, raw_event: _Optional[_Union[_event_pb2.NodeExecutionEvent, _Mapping]] = ..., task_exec_id: _Optional[_Union[_identifier_pb2.TaskExecutionIdentifier, _Mapping]] = ..., output_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., input_data: _Optional[_Union[_literals_pb2.LiteralMap, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ...) -> None: ... + def __init__(self, raw_event: _Optional[_Union[_event_pb2.NodeExecutionEvent, _Mapping]] = ..., task_exec_id: _Optional[_Union[_identifier_pb2.TaskExecutionIdentifier, _Mapping]] = ..., output_interface: _Optional[_Union[_interface_pb2.TypedInterface, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., principal: _Optional[str] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ...) -> None: ... class CloudEventTaskExecution(_message.Message): __slots__ = ["raw_event"] @@ -58,17 +50,17 @@ class CloudEventTaskExecution(_message.Message): def __init__(self, raw_event: _Optional[_Union[_event_pb2.TaskExecutionEvent, _Mapping]] = ...) -> None: ... class CloudEventExecutionStart(_message.Message): - __slots__ = ["execution_id", "launch_plan_id", "workflow_id", "artifact_ids", "artifact_keys", "principal"] + __slots__ = ["execution_id", "launch_plan_id", "workflow_id", "artifact_ids", "artifact_trackers", "principal"] EXECUTION_ID_FIELD_NUMBER: _ClassVar[int] LAUNCH_PLAN_ID_FIELD_NUMBER: _ClassVar[int] WORKFLOW_ID_FIELD_NUMBER: _ClassVar[int] ARTIFACT_IDS_FIELD_NUMBER: _ClassVar[int] - ARTIFACT_KEYS_FIELD_NUMBER: _ClassVar[int] + ARTIFACT_TRACKERS_FIELD_NUMBER: _ClassVar[int] PRINCIPAL_FIELD_NUMBER: _ClassVar[int] execution_id: _identifier_pb2.WorkflowExecutionIdentifier launch_plan_id: _identifier_pb2.Identifier workflow_id: _identifier_pb2.Identifier artifact_ids: _containers.RepeatedCompositeFieldContainer[_artifact_id_pb2.ArtifactID] - artifact_keys: _containers.RepeatedScalarFieldContainer[str] + artifact_trackers: _containers.RepeatedScalarFieldContainer[str] principal: str - def __init__(self, execution_id: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., workflow_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., artifact_keys: _Optional[_Iterable[str]] = ..., principal: _Optional[str] = ...) -> None: ... + def __init__(self, execution_id: _Optional[_Union[_identifier_pb2.WorkflowExecutionIdentifier, _Mapping]] = ..., launch_plan_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., workflow_id: _Optional[_Union[_identifier_pb2.Identifier, _Mapping]] = ..., artifact_ids: _Optional[_Iterable[_Union[_artifact_id_pb2.ArtifactID, _Mapping]]] = ..., artifact_trackers: _Optional[_Iterable[str]] = ..., principal: _Optional[str] = ...) -> None: ... diff --git a/flyteidl/gen/pb_python/flyteidl/service/admin_pb2.py b/flyteidl/gen/pb_python/flyteidl/service/admin_pb2.py index d48148be62..b6ac28b9a8 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/admin_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/service/admin_pb2.py @@ -29,7 +29,7 @@ from flyteidl.admin import description_entity_pb2 as flyteidl_dot_admin_dot_description__entity__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/service/admin.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a\x1c\x66lyteidl/admin/project.proto\x1a.flyteidl/admin/project_domain_attributes.proto\x1a\'flyteidl/admin/project_attributes.proto\x1a\x19\x66lyteidl/admin/task.proto\x1a\x1d\x66lyteidl/admin/workflow.proto\x1a(flyteidl/admin/workflow_attributes.proto\x1a flyteidl/admin/launch_plan.proto\x1a\x1a\x66lyteidl/admin/event.proto\x1a\x1e\x66lyteidl/admin/execution.proto\x1a\'flyteidl/admin/matchable_resource.proto\x1a#flyteidl/admin/node_execution.proto\x1a#flyteidl/admin/task_execution.proto\x1a\x1c\x66lyteidl/admin/version.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\'flyteidl/admin/description_entity.proto2\x84N\n\x0c\x41\x64minService\x12m\n\nCreateTask\x12!.flyteidl.admin.TaskCreateRequest\x1a\".flyteidl.admin.TaskCreateResponse\"\x18\x82\xd3\xe4\x93\x02\x12:\x01*\"\r/api/v1/tasks\x12\x88\x01\n\x07GetTask\x12 .flyteidl.admin.ObjectGetRequest\x1a\x14.flyteidl.admin.Task\"E\x82\xd3\xe4\x93\x02?\x12=/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}\x12\x97\x01\n\x0bListTaskIds\x12\x30.flyteidl.admin.NamedEntityIdentifierListRequest\x1a).flyteidl.admin.NamedEntityIdentifierList\"+\x82\xd3\xe4\x93\x02%\x12#/api/v1/task_ids/{project}/{domain}\x12\xae\x01\n\tListTasks\x12#.flyteidl.admin.ResourceListRequest\x1a\x18.flyteidl.admin.TaskList\"b\x82\xd3\xe4\x93\x02\\Z(\x12&/api/v1/tasks/{id.project}/{id.domain}\x12\x30/api/v1/tasks/{id.project}/{id.domain}/{id.name}\x12}\n\x0e\x43reateWorkflow\x12%.flyteidl.admin.WorkflowCreateRequest\x1a&.flyteidl.admin.WorkflowCreateResponse\"\x1c\x82\xd3\xe4\x93\x02\x16:\x01*\"\x11/api/v1/workflows\x12\x94\x01\n\x0bGetWorkflow\x12 .flyteidl.admin.ObjectGetRequest\x1a\x18.flyteidl.admin.Workflow\"I\x82\xd3\xe4\x93\x02\x43\x12\x41/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}\x12\x9f\x01\n\x0fListWorkflowIds\x12\x30.flyteidl.admin.NamedEntityIdentifierListRequest\x1a).flyteidl.admin.NamedEntityIdentifierList\"/\x82\xd3\xe4\x93\x02)\x12\'/api/v1/workflow_ids/{project}/{domain}\x12\xbe\x01\n\rListWorkflows\x12#.flyteidl.admin.ResourceListRequest\x1a\x1c.flyteidl.admin.WorkflowList\"j\x82\xd3\xe4\x93\x02\x64Z,\x12*/api/v1/workflows/{id.project}/{id.domain}\x12\x34/api/v1/workflows/{id.project}/{id.domain}/{id.name}\x12\x86\x01\n\x10\x43reateLaunchPlan\x12\'.flyteidl.admin.LaunchPlanCreateRequest\x1a(.flyteidl.admin.LaunchPlanCreateResponse\"\x1f\x82\xd3\xe4\x93\x02\x19:\x01*\"\x14/api/v1/launch_plans\x12\x9b\x01\n\rGetLaunchPlan\x12 .flyteidl.admin.ObjectGetRequest\x1a\x1a.flyteidl.admin.LaunchPlan\"L\x82\xd3\xe4\x93\x02\x46\x12\x44/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}\x12\xa2\x01\n\x13GetActiveLaunchPlan\x12\'.flyteidl.admin.ActiveLaunchPlanRequest\x1a\x1a.flyteidl.admin.LaunchPlan\"F\x82\xd3\xe4\x93\x02@\x12>/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}\x12\x9c\x01\n\x15ListActiveLaunchPlans\x12+.flyteidl.admin.ActiveLaunchPlanListRequest\x1a\x1e.flyteidl.admin.LaunchPlanList\"6\x82\xd3\xe4\x93\x02\x30\x12./api/v1/active_launch_plans/{project}/{domain}\x12\xa4\x01\n\x11ListLaunchPlanIds\x12\x30.flyteidl.admin.NamedEntityIdentifierListRequest\x1a).flyteidl.admin.NamedEntityIdentifierList\"2\x82\xd3\xe4\x93\x02,\x12*/api/v1/launch_plan_ids/{project}/{domain}\x12\xc8\x01\n\x0fListLaunchPlans\x12#.flyteidl.admin.ResourceListRequest\x1a\x1e.flyteidl.admin.LaunchPlanList\"p\x82\xd3\xe4\x93\x02jZ/\x12-/api/v1/launch_plans/{id.project}/{id.domain}\x12\x37/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}\x12\xb6\x01\n\x10UpdateLaunchPlan\x12\'.flyteidl.admin.LaunchPlanUpdateRequest\x1a(.flyteidl.admin.LaunchPlanUpdateResponse\"O\x82\xd3\xe4\x93\x02I:\x01*\x1a\x44/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}\x12\x81\x01\n\x0f\x43reateExecution\x12&.flyteidl.admin.ExecutionCreateRequest\x1a\'.flyteidl.admin.ExecutionCreateResponse\"\x1d\x82\xd3\xe4\x93\x02\x17:\x01*\"\x12/api/v1/executions\x12\x8e\x01\n\x11RelaunchExecution\x12(.flyteidl.admin.ExecutionRelaunchRequest\x1a\'.flyteidl.admin.ExecutionCreateResponse\"&\x82\xd3\xe4\x93\x02 :\x01*\"\x1b/api/v1/executions/relaunch\x12\x8b\x01\n\x10RecoverExecution\x12\'.flyteidl.admin.ExecutionRecoverRequest\x1a\'.flyteidl.admin.ExecutionCreateResponse\"%\x82\xd3\xe4\x93\x02\x1f:\x01*\"\x1a/api/v1/executions/recover\x12\x95\x01\n\x0cGetExecution\x12+.flyteidl.admin.WorkflowExecutionGetRequest\x1a\x19.flyteidl.admin.Execution\"=\x82\xd3\xe4\x93\x02\x37\x12\x35/api/v1/executions/{id.project}/{id.domain}/{id.name}\x12\xa4\x01\n\x0fUpdateExecution\x12&.flyteidl.admin.ExecutionUpdateRequest\x1a\'.flyteidl.admin.ExecutionUpdateResponse\"@\x82\xd3\xe4\x93\x02::\x01*\x1a\x35/api/v1/executions/{id.project}/{id.domain}/{id.name}\x12\xb9\x01\n\x10GetExecutionData\x12/.flyteidl.admin.WorkflowExecutionGetDataRequest\x1a\x30.flyteidl.admin.WorkflowExecutionGetDataResponse\"B\x82\xd3\xe4\x93\x02<\x12:/api/v1/data/executions/{id.project}/{id.domain}/{id.name}\x12\x89\x01\n\x0eListExecutions\x12#.flyteidl.admin.ResourceListRequest\x1a\x1d.flyteidl.admin.ExecutionList\"3\x82\xd3\xe4\x93\x02-\x12+/api/v1/executions/{id.project}/{id.domain}\x12\xad\x01\n\x12TerminateExecution\x12).flyteidl.admin.ExecutionTerminateRequest\x1a*.flyteidl.admin.ExecutionTerminateResponse\"@\x82\xd3\xe4\x93\x02::\x01**5/api/v1/executions/{id.project}/{id.domain}/{id.name}\x12\xd2\x01\n\x10GetNodeExecution\x12\'.flyteidl.admin.NodeExecutionGetRequest\x1a\x1d.flyteidl.admin.NodeExecution\"v\x82\xd3\xe4\x93\x02p\x12n/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\x12\xde\x01\n\x12ListNodeExecutions\x12(.flyteidl.admin.NodeExecutionListRequest\x1a!.flyteidl.admin.NodeExecutionList\"{\x82\xd3\xe4\x93\x02u\x12s/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\x12\xa5\x04\n\x19ListNodeExecutionsForTask\x12/.flyteidl.admin.NodeExecutionForTaskListRequest\x1a!.flyteidl.admin.NodeExecutionList\"\xb3\x03\x82\xd3\xe4\x93\x02\xac\x03\x12\xa9\x03/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}\x12\xee\x01\n\x14GetNodeExecutionData\x12+.flyteidl.admin.NodeExecutionGetDataRequest\x1a,.flyteidl.admin.NodeExecutionGetDataResponse\"{\x82\xd3\xe4\x93\x02u\x12s/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\x12\x7f\n\x0fRegisterProject\x12&.flyteidl.admin.ProjectRegisterRequest\x1a\'.flyteidl.admin.ProjectRegisterResponse\"\x1b\x82\xd3\xe4\x93\x02\x15:\x01*\"\x10/api/v1/projects\x12q\n\rUpdateProject\x12\x17.flyteidl.admin.Project\x1a%.flyteidl.admin.ProjectUpdateResponse\" \x82\xd3\xe4\x93\x02\x1a:\x01*\x1a\x15/api/v1/projects/{id}\x12\x66\n\x0cListProjects\x12\".flyteidl.admin.ProjectListRequest\x1a\x18.flyteidl.admin.Projects\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/api/v1/projects\x12\x99\x01\n\x13\x43reateWorkflowEvent\x12-.flyteidl.admin.WorkflowExecutionEventRequest\x1a..flyteidl.admin.WorkflowExecutionEventResponse\"#\x82\xd3\xe4\x93\x02\x1d:\x01*\"\x18/api/v1/events/workflows\x12\x89\x01\n\x0f\x43reateNodeEvent\x12).flyteidl.admin.NodeExecutionEventRequest\x1a*.flyteidl.admin.NodeExecutionEventResponse\"\x1f\x82\xd3\xe4\x93\x02\x19:\x01*\"\x14/api/v1/events/nodes\x12\x89\x01\n\x0f\x43reateTaskEvent\x12).flyteidl.admin.TaskExecutionEventRequest\x1a*.flyteidl.admin.TaskExecutionEventResponse\"\x1f\x82\xd3\xe4\x93\x02\x19:\x01*\"\x14/api/v1/events/tasks\x12\x80\x03\n\x10GetTaskExecution\x12\'.flyteidl.admin.TaskExecutionGetRequest\x1a\x1d.flyteidl.admin.TaskExecution\"\xa3\x02\x82\xd3\xe4\x93\x02\x9c\x02\x12\x99\x02/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\x12\x98\x02\n\x12ListTaskExecutions\x12(.flyteidl.admin.TaskExecutionListRequest\x1a!.flyteidl.admin.TaskExecutionList\"\xb4\x01\x82\xd3\xe4\x93\x02\xad\x01\x12\xaa\x01/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}\x12\x9c\x03\n\x14GetTaskExecutionData\x12+.flyteidl.admin.TaskExecutionGetDataRequest\x1a,.flyteidl.admin.TaskExecutionGetDataResponse\"\xa8\x02\x82\xd3\xe4\x93\x02\xa1\x02\x12\x9e\x02/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\x12\xe3\x01\n\x1dUpdateProjectDomainAttributes\x12\x34.flyteidl.admin.ProjectDomainAttributesUpdateRequest\x1a\x35.flyteidl.admin.ProjectDomainAttributesUpdateResponse\"U\x82\xd3\xe4\x93\x02O:\x01*\x1aJ/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}\x12\xc1\x01\n\x1aGetProjectDomainAttributes\x12\x31.flyteidl.admin.ProjectDomainAttributesGetRequest\x1a\x32.flyteidl.admin.ProjectDomainAttributesGetResponse\"<\x82\xd3\xe4\x93\x02\x36\x12\x34/api/v1/project_domain_attributes/{project}/{domain}\x12\xcd\x01\n\x1d\x44\x65leteProjectDomainAttributes\x12\x34.flyteidl.admin.ProjectDomainAttributesDeleteRequest\x1a\x35.flyteidl.admin.ProjectDomainAttributesDeleteResponse\"?\x82\xd3\xe4\x93\x02\x39:\x01**4/api/v1/project_domain_attributes/{project}/{domain}\x12\xb6\x01\n\x17UpdateProjectAttributes\x12..flyteidl.admin.ProjectAttributesUpdateRequest\x1a/.flyteidl.admin.ProjectAttributesUpdateResponse\":\x82\xd3\xe4\x93\x02\x34:\x01*\x1a//api/v1/project_attributes/{attributes.project}\x12\x9f\x01\n\x14GetProjectAttributes\x12+.flyteidl.admin.ProjectAttributesGetRequest\x1a,.flyteidl.admin.ProjectAttributesGetResponse\",\x82\xd3\xe4\x93\x02&\x12$/api/v1/project_attributes/{project}\x12\xab\x01\n\x17\x44\x65leteProjectAttributes\x12..flyteidl.admin.ProjectAttributesDeleteRequest\x1a/.flyteidl.admin.ProjectAttributesDeleteResponse\"/\x82\xd3\xe4\x93\x02):\x01**$/api/v1/project_attributes/{project}\x12\xe4\x01\n\x18UpdateWorkflowAttributes\x12/.flyteidl.admin.WorkflowAttributesUpdateRequest\x1a\x30.flyteidl.admin.WorkflowAttributesUpdateResponse\"e\x82\xd3\xe4\x93\x02_:\x01*\x1aZ/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}\x12\xb7\x01\n\x15GetWorkflowAttributes\x12,.flyteidl.admin.WorkflowAttributesGetRequest\x1a-.flyteidl.admin.WorkflowAttributesGetResponse\"A\x82\xd3\xe4\x93\x02;\x12\x39/api/v1/workflow_attributes/{project}/{domain}/{workflow}\x12\xc3\x01\n\x18\x44\x65leteWorkflowAttributes\x12/.flyteidl.admin.WorkflowAttributesDeleteRequest\x1a\x30.flyteidl.admin.WorkflowAttributesDeleteResponse\"D\x82\xd3\xe4\x93\x02>:\x01**9/api/v1/workflow_attributes/{project}/{domain}/{workflow}\x12\xa0\x01\n\x17ListMatchableAttributes\x12..flyteidl.admin.ListMatchableAttributesRequest\x1a/.flyteidl.admin.ListMatchableAttributesResponse\"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/api/v1/matchable_attributes\x12\x9f\x01\n\x11ListNamedEntities\x12&.flyteidl.admin.NamedEntityListRequest\x1a\x1f.flyteidl.admin.NamedEntityList\"A\x82\xd3\xe4\x93\x02;\x12\x39/api/v1/named_entities/{resource_type}/{project}/{domain}\x12\xa7\x01\n\x0eGetNamedEntity\x12%.flyteidl.admin.NamedEntityGetRequest\x1a\x1b.flyteidl.admin.NamedEntity\"Q\x82\xd3\xe4\x93\x02K\x12I/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}\x12\xbe\x01\n\x11UpdateNamedEntity\x12(.flyteidl.admin.NamedEntityUpdateRequest\x1a).flyteidl.admin.NamedEntityUpdateResponse\"T\x82\xd3\xe4\x93\x02N:\x01*\x1aI/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}\x12l\n\nGetVersion\x12!.flyteidl.admin.GetVersionRequest\x1a\".flyteidl.admin.GetVersionResponse\"\x17\x82\xd3\xe4\x93\x02\x11\x12\x0f/api/v1/version\x12\xc4\x01\n\x14GetDescriptionEntity\x12 .flyteidl.admin.ObjectGetRequest\x1a!.flyteidl.admin.DescriptionEntity\"g\x82\xd3\xe4\x93\x02\x61\x12_/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}\x12\x92\x02\n\x17ListDescriptionEntities\x12,.flyteidl.admin.DescriptionEntityListRequest\x1a%.flyteidl.admin.DescriptionEntityList\"\xa1\x01\x82\xd3\xe4\x93\x02\x9a\x01ZG\x12\x45/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}\x12O/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}\x12\xc5\x01\n\x13GetExecutionMetrics\x12\x32.flyteidl.admin.WorkflowExecutionGetMetricsRequest\x1a\x33.flyteidl.admin.WorkflowExecutionGetMetricsResponse\"E\x82\xd3\xe4\x93\x02?\x12=/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}B\xc2\x01\n\x14\x63om.flyteidl.serviceB\nAdminProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/service/admin.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a\x1c\x66lyteidl/admin/project.proto\x1a.flyteidl/admin/project_domain_attributes.proto\x1a\'flyteidl/admin/project_attributes.proto\x1a\x19\x66lyteidl/admin/task.proto\x1a\x1d\x66lyteidl/admin/workflow.proto\x1a(flyteidl/admin/workflow_attributes.proto\x1a flyteidl/admin/launch_plan.proto\x1a\x1a\x66lyteidl/admin/event.proto\x1a\x1e\x66lyteidl/admin/execution.proto\x1a\'flyteidl/admin/matchable_resource.proto\x1a#flyteidl/admin/node_execution.proto\x1a#flyteidl/admin/task_execution.proto\x1a\x1c\x66lyteidl/admin/version.proto\x1a\x1b\x66lyteidl/admin/common.proto\x1a\'flyteidl/admin/description_entity.proto2\xb8y\n\x0c\x41\x64minService\x12\x8e\x01\n\nCreateTask\x12!.flyteidl.admin.TaskCreateRequest\x1a\".flyteidl.admin.TaskCreateResponse\"9\x82\xd3\xe4\x93\x02\x33:\x01*Z\x1f:\x01*\"\x1a/api/v1/tasks/org/{id.org}\"\r/api/v1/tasks\x12\xd8\x01\n\x07GetTask\x12 .flyteidl.admin.ObjectGetRequest\x1a\x14.flyteidl.admin.Task\"\x94\x01\x82\xd3\xe4\x93\x02\x8d\x01ZL\x12J/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\x12=/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}\x12\xc5\x01\n\x0bListTaskIds\x12\x30.flyteidl.admin.NamedEntityIdentifierListRequest\x1a).flyteidl.admin.NamedEntityIdentifierList\"Y\x82\xd3\xe4\x93\x02SZ,\x12*/api/v1/tasks/org/{org}/{project}/{domain}\x12#/api/v1/task_ids/{project}/{domain}\x12\xa8\x02\n\tListTasks\x12#.flyteidl.admin.ResourceListRequest\x1a\x18.flyteidl.admin.TaskList\"\xdb\x01\x82\xd3\xe4\x93\x02\xd4\x01Z?\x12=/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}Z(\x12&/api/v1/tasks/{id.project}/{id.domain}Z5\x12\x33/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}\x12\x30/api/v1/tasks/{id.project}/{id.domain}/{id.name}\x12\xa2\x01\n\x0e\x43reateWorkflow\x12%.flyteidl.admin.WorkflowCreateRequest\x1a&.flyteidl.admin.WorkflowCreateResponse\"A\x82\xd3\xe4\x93\x02;:\x01*Z#:\x01*\"\x1e/api/v1/workflows/org/{id.org}\"\x11/api/v1/workflows\x12\xe8\x01\n\x0bGetWorkflow\x12 .flyteidl.admin.ObjectGetRequest\x1a\x18.flyteidl.admin.Workflow\"\x9c\x01\x82\xd3\xe4\x93\x02\x95\x01ZP\x12N/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\x12\x41/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}\x12\xd1\x01\n\x0fListWorkflowIds\x12\x30.flyteidl.admin.NamedEntityIdentifierListRequest\x1a).flyteidl.admin.NamedEntityIdentifierList\"a\x82\xd3\xe4\x93\x02[Z0\x12./api/v1/workflows/org/{org}/{project}/{domain}\x12\'/api/v1/workflow_ids/{project}/{domain}\x12\xc0\x02\n\rListWorkflows\x12#.flyteidl.admin.ResourceListRequest\x1a\x1c.flyteidl.admin.WorkflowList\"\xeb\x01\x82\xd3\xe4\x93\x02\xe4\x01ZC\x12\x41/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}Z,\x12*/api/v1/workflows/{id.project}/{id.domain}Z9\x12\x37/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}\x12\x34/api/v1/workflows/{id.project}/{id.domain}/{id.name}\x12\xae\x01\n\x10\x43reateLaunchPlan\x12\'.flyteidl.admin.LaunchPlanCreateRequest\x1a(.flyteidl.admin.LaunchPlanCreateResponse\"G\x82\xd3\xe4\x93\x02\x41:\x01*Z&:\x01*\"!/api/v1/launch_plans/org/{id.org}\"\x14/api/v1/launch_plans\x12\xf2\x01\n\rGetLaunchPlan\x12 .flyteidl.admin.ObjectGetRequest\x1a\x1a.flyteidl.admin.LaunchPlan\"\xa2\x01\x82\xd3\xe4\x93\x02\x9b\x01ZS\x12Q/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\x12\x44/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}\x12\xf3\x01\n\x13GetActiveLaunchPlan\x12\'.flyteidl.admin.ActiveLaunchPlanRequest\x1a\x1a.flyteidl.admin.LaunchPlan\"\x96\x01\x82\xd3\xe4\x93\x02\x8f\x01ZM\x12K/api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}\x12>/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}\x12\xd8\x01\n\x15ListActiveLaunchPlans\x12+.flyteidl.admin.ActiveLaunchPlanListRequest\x1a\x1e.flyteidl.admin.LaunchPlanList\"r\x82\xd3\xe4\x93\x02lZ:\x12\x38/api/v1/active_launch_plans/org/{org}/{project}/{domain}\x12./api/v1/active_launch_plans/{project}/{domain}\x12\xdc\x01\n\x11ListLaunchPlanIds\x12\x30.flyteidl.admin.NamedEntityIdentifierListRequest\x1a).flyteidl.admin.NamedEntityIdentifierList\"j\x82\xd3\xe4\x93\x02\x64Z6\x12\x34/api/v1/launch_plan_ids/org/{org}/{project}/{domain}\x12*/api/v1/launch_plan_ids/{project}/{domain}\x12\xd0\x02\n\x0fListLaunchPlans\x12#.flyteidl.admin.ResourceListRequest\x1a\x1e.flyteidl.admin.LaunchPlanList\"\xf7\x01\x82\xd3\xe4\x93\x02\xf0\x01ZF\x12\x44/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}Z/\x12-/api/v1/launch_plans/{id.project}/{id.domain}Z<\x12:/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}\x12\x37/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}\x12\x8d\x02\n\x10UpdateLaunchPlan\x12\'.flyteidl.admin.LaunchPlanUpdateRequest\x1a(.flyteidl.admin.LaunchPlanUpdateResponse\"\xa5\x01\x82\xd3\xe4\x93\x02\x9e\x01:\x01*ZS\x1aQ/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\x1a\x44/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}\x12\xa4\x01\n\x0f\x43reateExecution\x12&.flyteidl.admin.ExecutionCreateRequest\x1a\'.flyteidl.admin.ExecutionCreateResponse\"@\x82\xd3\xe4\x93\x02::\x01*Z!:\x01*\x1a\x1c/api/v1/executions/org/{org}\"\x12/api/v1/executions\x12\xbd\x01\n\x11RelaunchExecution\x12(.flyteidl.admin.ExecutionRelaunchRequest\x1a\'.flyteidl.admin.ExecutionCreateResponse\"U\x82\xd3\xe4\x93\x02O:\x01*Z-:\x01*\"(/api/v1/executions/org/{id.org}/relaunch\"\x1b/api/v1/executions/relaunch\x12\xb9\x01\n\x10RecoverExecution\x12\'.flyteidl.admin.ExecutionRecoverRequest\x1a\'.flyteidl.admin.ExecutionCreateResponse\"S\x82\xd3\xe4\x93\x02M:\x01*Z,:\x01*\"\'/api/v1/executions/org/{id.org}/recover\"\x1a/api/v1/executions/recover\x12\xdc\x01\n\x0cGetExecution\x12+.flyteidl.admin.WorkflowExecutionGetRequest\x1a\x19.flyteidl.admin.Execution\"\x83\x01\x82\xd3\xe4\x93\x02}ZD\x12\x42/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\x12\x35/api/v1/executions/{id.project}/{id.domain}/{id.name}\x12\xef\x01\n\x0fUpdateExecution\x12&.flyteidl.admin.ExecutionUpdateRequest\x1a\'.flyteidl.admin.ExecutionUpdateResponse\"\x8a\x01\x82\xd3\xe4\x93\x02\x83\x01:\x01*ZG:\x01*\x1a\x42/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\x1a\x35/api/v1/executions/{id.project}/{id.domain}/{id.name}\x12\x86\x02\n\x10GetExecutionData\x12/.flyteidl.admin.WorkflowExecutionGetDataRequest\x1a\x30.flyteidl.admin.WorkflowExecutionGetDataResponse\"\x8e\x01\x82\xd3\xe4\x93\x02\x87\x01ZI\x12G/api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\x12:/api/v1/data/executions/{id.project}/{id.domain}/{id.name}\x12\xc5\x01\n\x0eListExecutions\x12#.flyteidl.admin.ResourceListRequest\x1a\x1d.flyteidl.admin.ExecutionList\"o\x82\xd3\xe4\x93\x02iZ:\x12\x38/api/v1/executions/org/{id.org}/{id.project}/{id.domain}\x12+/api/v1/executions/{id.project}/{id.domain}\x12\xf8\x01\n\x12TerminateExecution\x12).flyteidl.admin.ExecutionTerminateRequest\x1a*.flyteidl.admin.ExecutionTerminateResponse\"\x8a\x01\x82\xd3\xe4\x93\x02\x83\x01:\x01*ZG:\x01**B/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}*5/api/v1/executions/{id.project}/{id.domain}/{id.name}\x12\xe2\x02\n\x10GetNodeExecution\x12\'.flyteidl.admin.NodeExecutionGetRequest\x1a\x1d.flyteidl.admin.NodeExecution\"\x85\x02\x82\xd3\xe4\x93\x02\xfe\x01Z\x8b\x01\x12\x88\x01/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\x12n/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\x12\x9e\x03\n\x16GetDynamicNodeWorkflow\x12-.flyteidl.admin.GetDynamicNodeWorkflowRequest\x1a+.flyteidl.admin.DynamicNodeWorkflowResponse\"\xa7\x02\x82\xd3\xe4\x93\x02\xa0\x02Z\x9c\x01\x12\x99\x01/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow\x12\x7f/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow\x12\xf9\x02\n\x12ListNodeExecutions\x12(.flyteidl.admin.NodeExecutionListRequest\x1a!.flyteidl.admin.NodeExecutionList\"\x95\x02\x82\xd3\xe4\x93\x02\x8e\x02Z\x96\x01\x12\x93\x01/api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\x12s/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\x12\x8f\x08\n\x19ListNodeExecutionsForTask\x12/.flyteidl.admin.NodeExecutionForTaskListRequest\x1a!.flyteidl.admin.NodeExecutionList\"\x9d\x07\x82\xd3\xe4\x93\x02\x96\x07Z\xe7\x03\x12\xe4\x03/api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}\x12\xa9\x03/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}\x12\x83\x03\n\x14GetNodeExecutionData\x12+.flyteidl.admin.NodeExecutionGetDataRequest\x1a,.flyteidl.admin.NodeExecutionGetDataResponse\"\x8f\x02\x82\xd3\xe4\x93\x02\x88\x02Z\x90\x01\x12\x8d\x01/api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\x12s/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\x12\xa8\x01\n\x0fRegisterProject\x12&.flyteidl.admin.ProjectRegisterRequest\x1a\'.flyteidl.admin.ProjectRegisterResponse\"D\x82\xd3\xe4\x93\x02>:\x01*Z\':\x01*\"\"/api/v1/projects/org/{project.org}\"\x10/api/v1/projects\x12\x97\x01\n\rUpdateProject\x12\x17.flyteidl.admin.Project\x1a%.flyteidl.admin.ProjectUpdateResponse\"F\x82\xd3\xe4\x93\x02@:\x01*Z$:\x01*\x1a\x1f/api/v1/projects/org/{org}/{id}\x1a\x15/api/v1/projects/{id}\x12\x84\x01\n\x0cListProjects\x12\".flyteidl.admin.ProjectListRequest\x1a\x18.flyteidl.admin.Projects\"6\x82\xd3\xe4\x93\x02\x30Z\x1c\x12\x1a/api/v1/projects/org/{org}\x12\x10/api/v1/projects\x12\xd5\x01\n\x13\x43reateWorkflowEvent\x12-.flyteidl.admin.WorkflowExecutionEventRequest\x1a..flyteidl.admin.WorkflowExecutionEventResponse\"_\x82\xd3\xe4\x93\x02Y:\x01*Z::\x01*\"5/api/v1/events/org/{event.execution_id.org}/workflows\"\x18/api/v1/events/workflows\x12\xc4\x01\n\x0f\x43reateNodeEvent\x12).flyteidl.admin.NodeExecutionEventRequest\x1a*.flyteidl.admin.NodeExecutionEventResponse\"Z\x82\xd3\xe4\x93\x02T:\x01*Z9:\x01*\"4/api/v1/events/org/{event.id.execution_id.org}/nodes\"\x14/api/v1/events/nodes\x12\xda\x01\n\x0f\x43reateTaskEvent\x12).flyteidl.admin.TaskExecutionEventRequest\x1a*.flyteidl.admin.TaskExecutionEventResponse\"p\x82\xd3\xe4\x93\x02j:\x01*ZO:\x01*\"J/api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks\"\x14/api/v1/events/tasks\x12\xcb\x05\n\x10GetTaskExecution\x12\'.flyteidl.admin.TaskExecutionGetRequest\x1a\x1d.flyteidl.admin.TaskExecution\"\xee\x04\x82\xd3\xe4\x93\x02\xe7\x04Z\xc8\x02\x12\xc5\x02/api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\x12\x99\x02/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\x12\xf1\x03\n\x12ListTaskExecutions\x12(.flyteidl.admin.TaskExecutionListRequest\x1a!.flyteidl.admin.TaskExecutionList\"\x8d\x03\x82\xd3\xe4\x93\x02\x86\x03Z\xd6\x01\x12\xd3\x01/api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}\x12\xaa\x01/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}\x12\xec\x05\n\x14GetTaskExecutionData\x12+.flyteidl.admin.TaskExecutionGetDataRequest\x1a,.flyteidl.admin.TaskExecutionGetDataResponse\"\xf8\x04\x82\xd3\xe4\x93\x02\xf1\x04Z\xcd\x02\x12\xca\x02/api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\x12\x9e\x02/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\x12\xcb\x02\n\x1dUpdateProjectDomainAttributes\x12\x34.flyteidl.admin.ProjectDomainAttributesUpdateRequest\x1a\x35.flyteidl.admin.ProjectDomainAttributesUpdateResponse\"\xbc\x01\x82\xd3\xe4\x93\x02\xb5\x01:\x01*Zd:\x01*\x1a_/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}\x1aJ/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}\x12\x83\x02\n\x1aGetProjectDomainAttributes\x12\x31.flyteidl.admin.ProjectDomainAttributesGetRequest\x1a\x32.flyteidl.admin.ProjectDomainAttributesGetResponse\"~\x82\xd3\xe4\x93\x02xZ@\x12>/api/v1/project_domain_attributes/org/{org}/{project}/{domain}\x12\x34/api/v1/project_domain_attributes/{project}/{domain}\x12\x93\x02\n\x1d\x44\x65leteProjectDomainAttributes\x12\x34.flyteidl.admin.ProjectDomainAttributesDeleteRequest\x1a\x35.flyteidl.admin.ProjectDomainAttributesDeleteResponse\"\x84\x01\x82\xd3\xe4\x93\x02~:\x01*ZC:\x01**>/api/v1/project_domain_attributes/org/{org}/{project}/{domain}*4/api/v1/project_domain_attributes/{project}/{domain}\x12\x8a\x02\n\x17UpdateProjectAttributes\x12..flyteidl.admin.ProjectAttributesUpdateRequest\x1a/.flyteidl.admin.ProjectAttributesUpdateResponse\"\x8d\x01\x82\xd3\xe4\x93\x02\x86\x01:\x01*ZP:\x01*\x1aK/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}\x1a//api/v1/project_attributes/{attributes.project}\x12\xd8\x01\n\x14GetProjectAttributes\x12+.flyteidl.admin.ProjectAttributesGetRequest\x1a,.flyteidl.admin.ProjectAttributesGetResponse\"e\x82\xd3\xe4\x93\x02_Z7\x12\x35/api/v1/project_domain_attributes/org/{org}/{project}\x12$/api/v1/project_attributes/{project}\x12\xe7\x01\n\x17\x44\x65leteProjectAttributes\x12..flyteidl.admin.ProjectAttributesDeleteRequest\x1a/.flyteidl.admin.ProjectAttributesDeleteResponse\"k\x82\xd3\xe4\x93\x02\x65:\x01*Z::\x01**5/api/v1/project_domain_attributes/org/{org}/{project}*$/api/v1/project_attributes/{project}\x12\xdc\x02\n\x18UpdateWorkflowAttributes\x12/.flyteidl.admin.WorkflowAttributesUpdateRequest\x1a\x30.flyteidl.admin.WorkflowAttributesUpdateResponse\"\xdc\x01\x82\xd3\xe4\x93\x02\xd5\x01:\x01*Zt:\x01*\x1ao/api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow}\x1aZ/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}\x12\x80\x02\n\x15GetWorkflowAttributes\x12,.flyteidl.admin.WorkflowAttributesGetRequest\x1a-.flyteidl.admin.WorkflowAttributesGetResponse\"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01ZE\x12\x43/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}\x12\x39/api/v1/workflow_attributes/{project}/{domain}/{workflow}\x12\x8f\x02\n\x18\x44\x65leteWorkflowAttributes\x12/.flyteidl.admin.WorkflowAttributesDeleteRequest\x1a\x30.flyteidl.admin.WorkflowAttributesDeleteResponse\"\x8f\x01\x82\xd3\xe4\x93\x02\x88\x01:\x01*ZH:\x01**C/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}*9/api/v1/workflow_attributes/{project}/{domain}/{workflow}\x12\xca\x01\n\x17ListMatchableAttributes\x12..flyteidl.admin.ListMatchableAttributesRequest\x1a/.flyteidl.admin.ListMatchableAttributesResponse\"N\x82\xd3\xe4\x93\x02HZ(\x12&/api/v1/matchable_attributes/org/{org}\x12\x1c/api/v1/matchable_attributes\x12\xe8\x01\n\x11ListNamedEntities\x12&.flyteidl.admin.NamedEntityListRequest\x1a\x1f.flyteidl.admin.NamedEntityList\"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01ZE\x12\x43/api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain}\x12\x39/api/v1/named_entities/{resource_type}/{project}/{domain}\x12\x83\x02\n\x0eGetNamedEntity\x12%.flyteidl.admin.NamedEntityGetRequest\x1a\x1b.flyteidl.admin.NamedEntity\"\xac\x01\x82\xd3\xe4\x93\x02\xa5\x01ZX\x12V/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}\x12I/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}\x12\x9d\x02\n\x11UpdateNamedEntity\x12(.flyteidl.admin.NamedEntityUpdateRequest\x1a).flyteidl.admin.NamedEntityUpdateResponse\"\xb2\x01\x82\xd3\xe4\x93\x02\xab\x01:\x01*Z[:\x01*\x1aV/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}\x1aI/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}\x12l\n\nGetVersion\x12!.flyteidl.admin.GetVersionRequest\x1a\".flyteidl.admin.GetVersionResponse\"\x17\x82\xd3\xe4\x93\x02\x11\x12\x0f/api/v1/version\x12\xb6\x02\n\x14GetDescriptionEntity\x12 .flyteidl.admin.ObjectGetRequest\x1a!.flyteidl.admin.DescriptionEntity\"\xd8\x01\x82\xd3\xe4\x93\x02\xd1\x01Zn\x12l/api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}\x12_/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}\x12\xc8\x03\n\x17ListDescriptionEntities\x12,.flyteidl.admin.DescriptionEntityListRequest\x1a%.flyteidl.admin.DescriptionEntityList\"\xd7\x02\x82\xd3\xe4\x93\x02\xd0\x02Z^\x12\\/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}ZG\x12\x45/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}ZT\x12R/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}\x12O/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}\x12\x95\x02\n\x13GetExecutionMetrics\x12\x32.flyteidl.admin.WorkflowExecutionGetMetricsRequest\x1a\x33.flyteidl.admin.WorkflowExecutionGetMetricsResponse\"\x94\x01\x82\xd3\xe4\x93\x02\x8d\x01ZL\x12J/api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\x12=/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}B\xc2\x01\n\x14\x63om.flyteidl.serviceB\nAdminProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -39,111 +39,113 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\024com.flyteidl.serviceB\nAdminProtoP\001Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\242\002\003FSX\252\002\020Flyteidl.Service\312\002\020Flyteidl\\Service\342\002\034Flyteidl\\Service\\GPBMetadata\352\002\021Flyteidl::Service' _ADMINSERVICE.methods_by_name['CreateTask']._options = None - _ADMINSERVICE.methods_by_name['CreateTask']._serialized_options = b'\202\323\344\223\002\022:\001*\"\r/api/v1/tasks' + _ADMINSERVICE.methods_by_name['CreateTask']._serialized_options = b'\202\323\344\223\0023:\001*Z\037:\001*\"\032/api/v1/tasks/org/{id.org}\"\r/api/v1/tasks' _ADMINSERVICE.methods_by_name['GetTask']._options = None - _ADMINSERVICE.methods_by_name['GetTask']._serialized_options = b'\202\323\344\223\002?\022=/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}' + _ADMINSERVICE.methods_by_name['GetTask']._serialized_options = b'\202\323\344\223\002\215\001ZL\022J/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\022=/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}' _ADMINSERVICE.methods_by_name['ListTaskIds']._options = None - _ADMINSERVICE.methods_by_name['ListTaskIds']._serialized_options = b'\202\323\344\223\002%\022#/api/v1/task_ids/{project}/{domain}' + _ADMINSERVICE.methods_by_name['ListTaskIds']._serialized_options = b'\202\323\344\223\002SZ,\022*/api/v1/tasks/org/{org}/{project}/{domain}\022#/api/v1/task_ids/{project}/{domain}' _ADMINSERVICE.methods_by_name['ListTasks']._options = None - _ADMINSERVICE.methods_by_name['ListTasks']._serialized_options = b'\202\323\344\223\002\\Z(\022&/api/v1/tasks/{id.project}/{id.domain}\0220/api/v1/tasks/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['ListTasks']._serialized_options = b'\202\323\344\223\002\324\001Z?\022=/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}Z(\022&/api/v1/tasks/{id.project}/{id.domain}Z5\0223/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}\0220/api/v1/tasks/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['CreateWorkflow']._options = None - _ADMINSERVICE.methods_by_name['CreateWorkflow']._serialized_options = b'\202\323\344\223\002\026:\001*\"\021/api/v1/workflows' + _ADMINSERVICE.methods_by_name['CreateWorkflow']._serialized_options = b'\202\323\344\223\002;:\001*Z#:\001*\"\036/api/v1/workflows/org/{id.org}\"\021/api/v1/workflows' _ADMINSERVICE.methods_by_name['GetWorkflow']._options = None - _ADMINSERVICE.methods_by_name['GetWorkflow']._serialized_options = b'\202\323\344\223\002C\022A/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}' + _ADMINSERVICE.methods_by_name['GetWorkflow']._serialized_options = b'\202\323\344\223\002\225\001ZP\022N/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\022A/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}' _ADMINSERVICE.methods_by_name['ListWorkflowIds']._options = None - _ADMINSERVICE.methods_by_name['ListWorkflowIds']._serialized_options = b'\202\323\344\223\002)\022\'/api/v1/workflow_ids/{project}/{domain}' + _ADMINSERVICE.methods_by_name['ListWorkflowIds']._serialized_options = b'\202\323\344\223\002[Z0\022./api/v1/workflows/org/{org}/{project}/{domain}\022\'/api/v1/workflow_ids/{project}/{domain}' _ADMINSERVICE.methods_by_name['ListWorkflows']._options = None - _ADMINSERVICE.methods_by_name['ListWorkflows']._serialized_options = b'\202\323\344\223\002dZ,\022*/api/v1/workflows/{id.project}/{id.domain}\0224/api/v1/workflows/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['ListWorkflows']._serialized_options = b'\202\323\344\223\002\344\001ZC\022A/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}Z,\022*/api/v1/workflows/{id.project}/{id.domain}Z9\0227/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}\0224/api/v1/workflows/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['CreateLaunchPlan']._options = None - _ADMINSERVICE.methods_by_name['CreateLaunchPlan']._serialized_options = b'\202\323\344\223\002\031:\001*\"\024/api/v1/launch_plans' + _ADMINSERVICE.methods_by_name['CreateLaunchPlan']._serialized_options = b'\202\323\344\223\002A:\001*Z&:\001*\"!/api/v1/launch_plans/org/{id.org}\"\024/api/v1/launch_plans' _ADMINSERVICE.methods_by_name['GetLaunchPlan']._options = None - _ADMINSERVICE.methods_by_name['GetLaunchPlan']._serialized_options = b'\202\323\344\223\002F\022D/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}' + _ADMINSERVICE.methods_by_name['GetLaunchPlan']._serialized_options = b'\202\323\344\223\002\233\001ZS\022Q/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\022D/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}' _ADMINSERVICE.methods_by_name['GetActiveLaunchPlan']._options = None - _ADMINSERVICE.methods_by_name['GetActiveLaunchPlan']._serialized_options = b'\202\323\344\223\002@\022>/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['GetActiveLaunchPlan']._serialized_options = b'\202\323\344\223\002\217\001ZM\022K/api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}\022>/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['ListActiveLaunchPlans']._options = None - _ADMINSERVICE.methods_by_name['ListActiveLaunchPlans']._serialized_options = b'\202\323\344\223\0020\022./api/v1/active_launch_plans/{project}/{domain}' + _ADMINSERVICE.methods_by_name['ListActiveLaunchPlans']._serialized_options = b'\202\323\344\223\002lZ:\0228/api/v1/active_launch_plans/org/{org}/{project}/{domain}\022./api/v1/active_launch_plans/{project}/{domain}' _ADMINSERVICE.methods_by_name['ListLaunchPlanIds']._options = None - _ADMINSERVICE.methods_by_name['ListLaunchPlanIds']._serialized_options = b'\202\323\344\223\002,\022*/api/v1/launch_plan_ids/{project}/{domain}' + _ADMINSERVICE.methods_by_name['ListLaunchPlanIds']._serialized_options = b'\202\323\344\223\002dZ6\0224/api/v1/launch_plan_ids/org/{org}/{project}/{domain}\022*/api/v1/launch_plan_ids/{project}/{domain}' _ADMINSERVICE.methods_by_name['ListLaunchPlans']._options = None - _ADMINSERVICE.methods_by_name['ListLaunchPlans']._serialized_options = b'\202\323\344\223\002jZ/\022-/api/v1/launch_plans/{id.project}/{id.domain}\0227/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['ListLaunchPlans']._serialized_options = b'\202\323\344\223\002\360\001ZF\022D/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}Z/\022-/api/v1/launch_plans/{id.project}/{id.domain}Z<\022:/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}\0227/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['UpdateLaunchPlan']._options = None - _ADMINSERVICE.methods_by_name['UpdateLaunchPlan']._serialized_options = b'\202\323\344\223\002I:\001*\032D/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}' + _ADMINSERVICE.methods_by_name['UpdateLaunchPlan']._serialized_options = b'\202\323\344\223\002\236\001:\001*ZS\032Q/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}\032D/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}' _ADMINSERVICE.methods_by_name['CreateExecution']._options = None - _ADMINSERVICE.methods_by_name['CreateExecution']._serialized_options = b'\202\323\344\223\002\027:\001*\"\022/api/v1/executions' + _ADMINSERVICE.methods_by_name['CreateExecution']._serialized_options = b'\202\323\344\223\002::\001*Z!:\001*\032\034/api/v1/executions/org/{org}\"\022/api/v1/executions' _ADMINSERVICE.methods_by_name['RelaunchExecution']._options = None - _ADMINSERVICE.methods_by_name['RelaunchExecution']._serialized_options = b'\202\323\344\223\002 :\001*\"\033/api/v1/executions/relaunch' + _ADMINSERVICE.methods_by_name['RelaunchExecution']._serialized_options = b'\202\323\344\223\002O:\001*Z-:\001*\"(/api/v1/executions/org/{id.org}/relaunch\"\033/api/v1/executions/relaunch' _ADMINSERVICE.methods_by_name['RecoverExecution']._options = None - _ADMINSERVICE.methods_by_name['RecoverExecution']._serialized_options = b'\202\323\344\223\002\037:\001*\"\032/api/v1/executions/recover' + _ADMINSERVICE.methods_by_name['RecoverExecution']._serialized_options = b'\202\323\344\223\002M:\001*Z,:\001*\"\'/api/v1/executions/org/{id.org}/recover\"\032/api/v1/executions/recover' _ADMINSERVICE.methods_by_name['GetExecution']._options = None - _ADMINSERVICE.methods_by_name['GetExecution']._serialized_options = b'\202\323\344\223\0027\0225/api/v1/executions/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['GetExecution']._serialized_options = b'\202\323\344\223\002}ZD\022B/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\0225/api/v1/executions/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['UpdateExecution']._options = None - _ADMINSERVICE.methods_by_name['UpdateExecution']._serialized_options = b'\202\323\344\223\002::\001*\0325/api/v1/executions/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['UpdateExecution']._serialized_options = b'\202\323\344\223\002\203\001:\001*ZG:\001*\032B/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\0325/api/v1/executions/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['GetExecutionData']._options = None - _ADMINSERVICE.methods_by_name['GetExecutionData']._serialized_options = b'\202\323\344\223\002<\022:/api/v1/data/executions/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['GetExecutionData']._serialized_options = b'\202\323\344\223\002\207\001ZI\022G/api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\022:/api/v1/data/executions/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['ListExecutions']._options = None - _ADMINSERVICE.methods_by_name['ListExecutions']._serialized_options = b'\202\323\344\223\002-\022+/api/v1/executions/{id.project}/{id.domain}' + _ADMINSERVICE.methods_by_name['ListExecutions']._serialized_options = b'\202\323\344\223\002iZ:\0228/api/v1/executions/org/{id.org}/{id.project}/{id.domain}\022+/api/v1/executions/{id.project}/{id.domain}' _ADMINSERVICE.methods_by_name['TerminateExecution']._options = None - _ADMINSERVICE.methods_by_name['TerminateExecution']._serialized_options = b'\202\323\344\223\002::\001**5/api/v1/executions/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['TerminateExecution']._serialized_options = b'\202\323\344\223\002\203\001:\001*ZG:\001**B/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}*5/api/v1/executions/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['GetNodeExecution']._options = None - _ADMINSERVICE.methods_by_name['GetNodeExecution']._serialized_options = b'\202\323\344\223\002p\022n/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}' + _ADMINSERVICE.methods_by_name['GetNodeExecution']._serialized_options = b'\202\323\344\223\002\376\001Z\213\001\022\210\001/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\022n/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}' + _ADMINSERVICE.methods_by_name['GetDynamicNodeWorkflow']._options = None + _ADMINSERVICE.methods_by_name['GetDynamicNodeWorkflow']._serialized_options = b'\202\323\344\223\002\240\002Z\234\001\022\231\001/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow\022\177/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow' _ADMINSERVICE.methods_by_name['ListNodeExecutions']._options = None - _ADMINSERVICE.methods_by_name['ListNodeExecutions']._serialized_options = b'\202\323\344\223\002u\022s/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}' + _ADMINSERVICE.methods_by_name['ListNodeExecutions']._serialized_options = b'\202\323\344\223\002\216\002Z\226\001\022\223\001/api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\022s/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}' _ADMINSERVICE.methods_by_name['ListNodeExecutionsForTask']._options = None - _ADMINSERVICE.methods_by_name['ListNodeExecutionsForTask']._serialized_options = b'\202\323\344\223\002\254\003\022\251\003/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}' + _ADMINSERVICE.methods_by_name['ListNodeExecutionsForTask']._serialized_options = b'\202\323\344\223\002\226\007Z\347\003\022\344\003/api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}\022\251\003/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}' _ADMINSERVICE.methods_by_name['GetNodeExecutionData']._options = None - _ADMINSERVICE.methods_by_name['GetNodeExecutionData']._serialized_options = b'\202\323\344\223\002u\022s/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}' + _ADMINSERVICE.methods_by_name['GetNodeExecutionData']._serialized_options = b'\202\323\344\223\002\210\002Z\220\001\022\215\001/api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}\022s/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}' _ADMINSERVICE.methods_by_name['RegisterProject']._options = None - _ADMINSERVICE.methods_by_name['RegisterProject']._serialized_options = b'\202\323\344\223\002\025:\001*\"\020/api/v1/projects' + _ADMINSERVICE.methods_by_name['RegisterProject']._serialized_options = b'\202\323\344\223\002>:\001*Z\':\001*\"\"/api/v1/projects/org/{project.org}\"\020/api/v1/projects' _ADMINSERVICE.methods_by_name['UpdateProject']._options = None - _ADMINSERVICE.methods_by_name['UpdateProject']._serialized_options = b'\202\323\344\223\002\032:\001*\032\025/api/v1/projects/{id}' + _ADMINSERVICE.methods_by_name['UpdateProject']._serialized_options = b'\202\323\344\223\002@:\001*Z$:\001*\032\037/api/v1/projects/org/{org}/{id}\032\025/api/v1/projects/{id}' _ADMINSERVICE.methods_by_name['ListProjects']._options = None - _ADMINSERVICE.methods_by_name['ListProjects']._serialized_options = b'\202\323\344\223\002\022\022\020/api/v1/projects' + _ADMINSERVICE.methods_by_name['ListProjects']._serialized_options = b'\202\323\344\223\0020Z\034\022\032/api/v1/projects/org/{org}\022\020/api/v1/projects' _ADMINSERVICE.methods_by_name['CreateWorkflowEvent']._options = None - _ADMINSERVICE.methods_by_name['CreateWorkflowEvent']._serialized_options = b'\202\323\344\223\002\035:\001*\"\030/api/v1/events/workflows' + _ADMINSERVICE.methods_by_name['CreateWorkflowEvent']._serialized_options = b'\202\323\344\223\002Y:\001*Z::\001*\"5/api/v1/events/org/{event.execution_id.org}/workflows\"\030/api/v1/events/workflows' _ADMINSERVICE.methods_by_name['CreateNodeEvent']._options = None - _ADMINSERVICE.methods_by_name['CreateNodeEvent']._serialized_options = b'\202\323\344\223\002\031:\001*\"\024/api/v1/events/nodes' + _ADMINSERVICE.methods_by_name['CreateNodeEvent']._serialized_options = b'\202\323\344\223\002T:\001*Z9:\001*\"4/api/v1/events/org/{event.id.execution_id.org}/nodes\"\024/api/v1/events/nodes' _ADMINSERVICE.methods_by_name['CreateTaskEvent']._options = None - _ADMINSERVICE.methods_by_name['CreateTaskEvent']._serialized_options = b'\202\323\344\223\002\031:\001*\"\024/api/v1/events/tasks' + _ADMINSERVICE.methods_by_name['CreateTaskEvent']._serialized_options = b'\202\323\344\223\002j:\001*ZO:\001*\"J/api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks\"\024/api/v1/events/tasks' _ADMINSERVICE.methods_by_name['GetTaskExecution']._options = None - _ADMINSERVICE.methods_by_name['GetTaskExecution']._serialized_options = b'\202\323\344\223\002\234\002\022\231\002/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}' + _ADMINSERVICE.methods_by_name['GetTaskExecution']._serialized_options = b'\202\323\344\223\002\347\004Z\310\002\022\305\002/api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\022\231\002/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}' _ADMINSERVICE.methods_by_name['ListTaskExecutions']._options = None - _ADMINSERVICE.methods_by_name['ListTaskExecutions']._serialized_options = b'\202\323\344\223\002\255\001\022\252\001/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}' + _ADMINSERVICE.methods_by_name['ListTaskExecutions']._serialized_options = b'\202\323\344\223\002\206\003Z\326\001\022\323\001/api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}\022\252\001/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}' _ADMINSERVICE.methods_by_name['GetTaskExecutionData']._options = None - _ADMINSERVICE.methods_by_name['GetTaskExecutionData']._serialized_options = b'\202\323\344\223\002\241\002\022\236\002/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}' + _ADMINSERVICE.methods_by_name['GetTaskExecutionData']._serialized_options = b'\202\323\344\223\002\361\004Z\315\002\022\312\002/api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}\022\236\002/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}' _ADMINSERVICE.methods_by_name['UpdateProjectDomainAttributes']._options = None - _ADMINSERVICE.methods_by_name['UpdateProjectDomainAttributes']._serialized_options = b'\202\323\344\223\002O:\001*\032J/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}' + _ADMINSERVICE.methods_by_name['UpdateProjectDomainAttributes']._serialized_options = b'\202\323\344\223\002\265\001:\001*Zd:\001*\032_/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}\032J/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}' _ADMINSERVICE.methods_by_name['GetProjectDomainAttributes']._options = None - _ADMINSERVICE.methods_by_name['GetProjectDomainAttributes']._serialized_options = b'\202\323\344\223\0026\0224/api/v1/project_domain_attributes/{project}/{domain}' + _ADMINSERVICE.methods_by_name['GetProjectDomainAttributes']._serialized_options = b'\202\323\344\223\002xZ@\022>/api/v1/project_domain_attributes/org/{org}/{project}/{domain}\0224/api/v1/project_domain_attributes/{project}/{domain}' _ADMINSERVICE.methods_by_name['DeleteProjectDomainAttributes']._options = None - _ADMINSERVICE.methods_by_name['DeleteProjectDomainAttributes']._serialized_options = b'\202\323\344\223\0029:\001**4/api/v1/project_domain_attributes/{project}/{domain}' + _ADMINSERVICE.methods_by_name['DeleteProjectDomainAttributes']._serialized_options = b'\202\323\344\223\002~:\001*ZC:\001**>/api/v1/project_domain_attributes/org/{org}/{project}/{domain}*4/api/v1/project_domain_attributes/{project}/{domain}' _ADMINSERVICE.methods_by_name['UpdateProjectAttributes']._options = None - _ADMINSERVICE.methods_by_name['UpdateProjectAttributes']._serialized_options = b'\202\323\344\223\0024:\001*\032//api/v1/project_attributes/{attributes.project}' + _ADMINSERVICE.methods_by_name['UpdateProjectAttributes']._serialized_options = b'\202\323\344\223\002\206\001:\001*ZP:\001*\032K/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}\032//api/v1/project_attributes/{attributes.project}' _ADMINSERVICE.methods_by_name['GetProjectAttributes']._options = None - _ADMINSERVICE.methods_by_name['GetProjectAttributes']._serialized_options = b'\202\323\344\223\002&\022$/api/v1/project_attributes/{project}' + _ADMINSERVICE.methods_by_name['GetProjectAttributes']._serialized_options = b'\202\323\344\223\002_Z7\0225/api/v1/project_domain_attributes/org/{org}/{project}\022$/api/v1/project_attributes/{project}' _ADMINSERVICE.methods_by_name['DeleteProjectAttributes']._options = None - _ADMINSERVICE.methods_by_name['DeleteProjectAttributes']._serialized_options = b'\202\323\344\223\002):\001**$/api/v1/project_attributes/{project}' + _ADMINSERVICE.methods_by_name['DeleteProjectAttributes']._serialized_options = b'\202\323\344\223\002e:\001*Z::\001**5/api/v1/project_domain_attributes/org/{org}/{project}*$/api/v1/project_attributes/{project}' _ADMINSERVICE.methods_by_name['UpdateWorkflowAttributes']._options = None - _ADMINSERVICE.methods_by_name['UpdateWorkflowAttributes']._serialized_options = b'\202\323\344\223\002_:\001*\032Z/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}' + _ADMINSERVICE.methods_by_name['UpdateWorkflowAttributes']._serialized_options = b'\202\323\344\223\002\325\001:\001*Zt:\001*\032o/api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow}\032Z/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}' _ADMINSERVICE.methods_by_name['GetWorkflowAttributes']._options = None - _ADMINSERVICE.methods_by_name['GetWorkflowAttributes']._serialized_options = b'\202\323\344\223\002;\0229/api/v1/workflow_attributes/{project}/{domain}/{workflow}' + _ADMINSERVICE.methods_by_name['GetWorkflowAttributes']._serialized_options = b'\202\323\344\223\002\202\001ZE\022C/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}\0229/api/v1/workflow_attributes/{project}/{domain}/{workflow}' _ADMINSERVICE.methods_by_name['DeleteWorkflowAttributes']._options = None - _ADMINSERVICE.methods_by_name['DeleteWorkflowAttributes']._serialized_options = b'\202\323\344\223\002>:\001**9/api/v1/workflow_attributes/{project}/{domain}/{workflow}' + _ADMINSERVICE.methods_by_name['DeleteWorkflowAttributes']._serialized_options = b'\202\323\344\223\002\210\001:\001*ZH:\001**C/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}*9/api/v1/workflow_attributes/{project}/{domain}/{workflow}' _ADMINSERVICE.methods_by_name['ListMatchableAttributes']._options = None - _ADMINSERVICE.methods_by_name['ListMatchableAttributes']._serialized_options = b'\202\323\344\223\002\036\022\034/api/v1/matchable_attributes' + _ADMINSERVICE.methods_by_name['ListMatchableAttributes']._serialized_options = b'\202\323\344\223\002HZ(\022&/api/v1/matchable_attributes/org/{org}\022\034/api/v1/matchable_attributes' _ADMINSERVICE.methods_by_name['ListNamedEntities']._options = None - _ADMINSERVICE.methods_by_name['ListNamedEntities']._serialized_options = b'\202\323\344\223\002;\0229/api/v1/named_entities/{resource_type}/{project}/{domain}' + _ADMINSERVICE.methods_by_name['ListNamedEntities']._serialized_options = b'\202\323\344\223\002\202\001ZE\022C/api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain}\0229/api/v1/named_entities/{resource_type}/{project}/{domain}' _ADMINSERVICE.methods_by_name['GetNamedEntity']._options = None - _ADMINSERVICE.methods_by_name['GetNamedEntity']._serialized_options = b'\202\323\344\223\002K\022I/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['GetNamedEntity']._serialized_options = b'\202\323\344\223\002\245\001ZX\022V/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}\022I/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['UpdateNamedEntity']._options = None - _ADMINSERVICE.methods_by_name['UpdateNamedEntity']._serialized_options = b'\202\323\344\223\002N:\001*\032I/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['UpdateNamedEntity']._serialized_options = b'\202\323\344\223\002\253\001:\001*Z[:\001*\032V/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}\032I/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['GetVersion']._options = None _ADMINSERVICE.methods_by_name['GetVersion']._serialized_options = b'\202\323\344\223\002\021\022\017/api/v1/version' _ADMINSERVICE.methods_by_name['GetDescriptionEntity']._options = None - _ADMINSERVICE.methods_by_name['GetDescriptionEntity']._serialized_options = b'\202\323\344\223\002a\022_/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}' + _ADMINSERVICE.methods_by_name['GetDescriptionEntity']._serialized_options = b'\202\323\344\223\002\321\001Zn\022l/api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}\022_/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}' _ADMINSERVICE.methods_by_name['ListDescriptionEntities']._options = None - _ADMINSERVICE.methods_by_name['ListDescriptionEntities']._serialized_options = b'\202\323\344\223\002\232\001ZG\022E/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}\022O/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['ListDescriptionEntities']._serialized_options = b'\202\323\344\223\002\320\002Z^\022\\/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}ZG\022E/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}ZT\022R/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}\022O/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}' _ADMINSERVICE.methods_by_name['GetExecutionMetrics']._options = None - _ADMINSERVICE.methods_by_name['GetExecutionMetrics']._serialized_options = b'\202\323\344\223\002?\022=/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}' + _ADMINSERVICE.methods_by_name['GetExecutionMetrics']._serialized_options = b'\202\323\344\223\002\215\001ZL\022J/api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}\022=/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}' _globals['_ADMINSERVICE']._serialized_start=609 - _globals['_ADMINSERVICE']._serialized_end=10597 + _globals['_ADMINSERVICE']._serialized_end=16153 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/service/admin_pb2_grpc.py b/flyteidl/gen/pb_python/flyteidl/service/admin_pb2_grpc.py index 2e7bda23fe..5bb1baac60 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/admin_pb2_grpc.py +++ b/flyteidl/gen/pb_python/flyteidl/service/admin_pb2_grpc.py @@ -150,6 +150,11 @@ def __init__(self, channel): request_serializer=flyteidl_dot_admin_dot_node__execution__pb2.NodeExecutionGetRequest.SerializeToString, response_deserializer=flyteidl_dot_admin_dot_node__execution__pb2.NodeExecution.FromString, ) + self.GetDynamicNodeWorkflow = channel.unary_unary( + '/flyteidl.service.AdminService/GetDynamicNodeWorkflow', + request_serializer=flyteidl_dot_admin_dot_node__execution__pb2.GetDynamicNodeWorkflowRequest.SerializeToString, + response_deserializer=flyteidl_dot_admin_dot_node__execution__pb2.DynamicNodeWorkflowResponse.FromString, + ) self.ListNodeExecutions = channel.unary_unary( '/flyteidl.service.AdminService/ListNodeExecutions', request_serializer=flyteidl_dot_admin_dot_node__execution__pb2.NodeExecutionListRequest.SerializeToString, @@ -317,7 +322,7 @@ def GetTask(self, request, context): raise NotImplementedError('Method not implemented!') def ListTaskIds(self, request, context): - """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -474,6 +479,13 @@ def GetNodeExecution(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def GetDynamicNodeWorkflow(self, request, context): + """Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def ListNodeExecutions(self, request, context): """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. """ @@ -503,7 +515,7 @@ def RegisterProject(self, request, context): raise NotImplementedError('Method not implemented!') def UpdateProject(self, request, context): - """Updates an existing :ref:`ref_flyteidl.admin.Project` + """Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. """ @@ -512,7 +524,7 @@ def UpdateProject(self, request, context): raise NotImplementedError('Method not implemented!') def ListProjects(self, request, context): - """Fetches a list of :ref:`ref_flyteidl.admin.Project` + """Fetches a list of :ref:`ref_flyteidl.admin.Project` """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -801,6 +813,11 @@ def add_AdminServiceServicer_to_server(servicer, server): request_deserializer=flyteidl_dot_admin_dot_node__execution__pb2.NodeExecutionGetRequest.FromString, response_serializer=flyteidl_dot_admin_dot_node__execution__pb2.NodeExecution.SerializeToString, ), + 'GetDynamicNodeWorkflow': grpc.unary_unary_rpc_method_handler( + servicer.GetDynamicNodeWorkflow, + request_deserializer=flyteidl_dot_admin_dot_node__execution__pb2.GetDynamicNodeWorkflowRequest.FromString, + response_serializer=flyteidl_dot_admin_dot_node__execution__pb2.DynamicNodeWorkflowResponse.SerializeToString, + ), 'ListNodeExecutions': grpc.unary_unary_rpc_method_handler( servicer.ListNodeExecutions, request_deserializer=flyteidl_dot_admin_dot_node__execution__pb2.NodeExecutionListRequest.FromString, @@ -1366,6 +1383,23 @@ def GetNodeExecution(request, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod + def GetDynamicNodeWorkflow(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/flyteidl.service.AdminService/GetDynamicNodeWorkflow', + flyteidl_dot_admin_dot_node__execution__pb2.GetDynamicNodeWorkflowRequest.SerializeToString, + flyteidl_dot_admin_dot_node__execution__pb2.DynamicNodeWorkflowResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod def ListNodeExecutions(request, target, diff --git a/flyteidl/gen/pb_python/flyteidl/service/agent_pb2.py b/flyteidl/gen/pb_python/flyteidl/service/agent_pb2.py index f1234012be..4a1a3d8f70 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/agent_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/service/agent_pb2.py @@ -15,7 +15,7 @@ from flyteidl.admin import agent_pb2 as flyteidl_dot_admin_dot_agent__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/service/agent.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a\x1a\x66lyteidl/admin/agent.proto2\x8f\x02\n\x11\x41syncAgentService\x12U\n\nCreateTask\x12!.flyteidl.admin.CreateTaskRequest\x1a\".flyteidl.admin.CreateTaskResponse\"\x00\x12L\n\x07GetTask\x12\x1e.flyteidl.admin.GetTaskRequest\x1a\x1f.flyteidl.admin.GetTaskResponse\"\x00\x12U\n\nDeleteTask\x12!.flyteidl.admin.DeleteTaskRequest\x1a\".flyteidl.admin.DeleteTaskResponse\"\x00\x32\xf0\x01\n\x14\x41gentMetadataService\x12k\n\x08GetAgent\x12\x1f.flyteidl.admin.GetAgentRequest\x1a .flyteidl.admin.GetAgentResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/api/v1/agent/{name}\x12k\n\nListAgents\x12!.flyteidl.admin.ListAgentsRequest\x1a\".flyteidl.admin.ListAgentsResponse\"\x16\x82\xd3\xe4\x93\x02\x10\x12\x0e/api/v1/agentsB\xc2\x01\n\x14\x63om.flyteidl.serviceB\nAgentProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1c\x66lyteidl/service/agent.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a\x1a\x66lyteidl/admin/agent.proto2\xcc\x03\n\x11\x41syncAgentService\x12U\n\nCreateTask\x12!.flyteidl.admin.CreateTaskRequest\x1a\".flyteidl.admin.CreateTaskResponse\"\x00\x12L\n\x07GetTask\x12\x1e.flyteidl.admin.GetTaskRequest\x1a\x1f.flyteidl.admin.GetTaskResponse\"\x00\x12U\n\nDeleteTask\x12!.flyteidl.admin.DeleteTaskRequest\x1a\".flyteidl.admin.DeleteTaskResponse\"\x00\x12\x61\n\x0eGetTaskMetrics\x12%.flyteidl.admin.GetTaskMetricsRequest\x1a&.flyteidl.admin.GetTaskMetricsResponse\"\x00\x12X\n\x0bGetTaskLogs\x12\".flyteidl.admin.GetTaskLogsRequest\x1a#.flyteidl.admin.GetTaskLogsResponse\"\x00\x32\xf0\x01\n\x14\x41gentMetadataService\x12k\n\x08GetAgent\x12\x1f.flyteidl.admin.GetAgentRequest\x1a .flyteidl.admin.GetAgentResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/api/v1/agent/{name}\x12k\n\nListAgents\x12!.flyteidl.admin.ListAgentsRequest\x1a\".flyteidl.admin.ListAgentsResponse\"\x16\x82\xd3\xe4\x93\x02\x10\x12\x0e/api/v1/agentsB\xc2\x01\n\x14\x63om.flyteidl.serviceB\nAgentProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -29,7 +29,7 @@ _AGENTMETADATASERVICE.methods_by_name['ListAgents']._options = None _AGENTMETADATASERVICE.methods_by_name['ListAgents']._serialized_options = b'\202\323\344\223\002\020\022\016/api/v1/agents' _globals['_ASYNCAGENTSERVICE']._serialized_start=109 - _globals['_ASYNCAGENTSERVICE']._serialized_end=380 - _globals['_AGENTMETADATASERVICE']._serialized_start=383 - _globals['_AGENTMETADATASERVICE']._serialized_end=623 + _globals['_ASYNCAGENTSERVICE']._serialized_end=569 + _globals['_AGENTMETADATASERVICE']._serialized_start=572 + _globals['_AGENTMETADATASERVICE']._serialized_end=812 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/service/agent_pb2_grpc.py b/flyteidl/gen/pb_python/flyteidl/service/agent_pb2_grpc.py index 94f75b1682..7d04b0cd33 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/agent_pb2_grpc.py +++ b/flyteidl/gen/pb_python/flyteidl/service/agent_pb2_grpc.py @@ -30,6 +30,16 @@ def __init__(self, channel): request_serializer=flyteidl_dot_admin_dot_agent__pb2.DeleteTaskRequest.SerializeToString, response_deserializer=flyteidl_dot_admin_dot_agent__pb2.DeleteTaskResponse.FromString, ) + self.GetTaskMetrics = channel.unary_unary( + '/flyteidl.service.AsyncAgentService/GetTaskMetrics', + request_serializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskMetricsRequest.SerializeToString, + response_deserializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskMetricsResponse.FromString, + ) + self.GetTaskLogs = channel.unary_unary( + '/flyteidl.service.AsyncAgentService/GetTaskLogs', + request_serializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskLogsRequest.SerializeToString, + response_deserializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskLogsResponse.FromString, + ) class AsyncAgentServiceServicer(object): @@ -57,6 +67,24 @@ def DeleteTask(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def GetTaskMetrics(self, request, context): + """GetTaskMetrics returns one or more task execution metrics, if available. + + Errors include + * OutOfRange if metrics are not available for the specified task time range + * various other errors + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTaskLogs(self, request, context): + """GetTaskLogs returns task execution logs, if available. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_AsyncAgentServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -75,6 +103,16 @@ def add_AsyncAgentServiceServicer_to_server(servicer, server): request_deserializer=flyteidl_dot_admin_dot_agent__pb2.DeleteTaskRequest.FromString, response_serializer=flyteidl_dot_admin_dot_agent__pb2.DeleteTaskResponse.SerializeToString, ), + 'GetTaskMetrics': grpc.unary_unary_rpc_method_handler( + servicer.GetTaskMetrics, + request_deserializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskMetricsRequest.FromString, + response_serializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskMetricsResponse.SerializeToString, + ), + 'GetTaskLogs': grpc.unary_unary_rpc_method_handler( + servicer.GetTaskLogs, + request_deserializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskLogsRequest.FromString, + response_serializer=flyteidl_dot_admin_dot_agent__pb2.GetTaskLogsResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'flyteidl.service.AsyncAgentService', rpc_method_handlers) @@ -137,6 +175,40 @@ def DeleteTask(request, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod + def GetTaskMetrics(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/flyteidl.service.AsyncAgentService/GetTaskMetrics', + flyteidl_dot_admin_dot_agent__pb2.GetTaskMetricsRequest.SerializeToString, + flyteidl_dot_admin_dot_agent__pb2.GetTaskMetricsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetTaskLogs(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/flyteidl.service.AsyncAgentService/GetTaskLogs', + flyteidl_dot_admin_dot_agent__pb2.GetTaskLogsRequest.SerializeToString, + flyteidl_dot_admin_dot_agent__pb2.GetTaskLogsResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + class AgentMetadataServiceStub(object): """AgentMetadataService defines an RPC service that is also served over HTTP via grpc-gateway. diff --git a/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.py b/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.py index 4de8daabf8..db56c22adf 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.py @@ -13,10 +13,9 @@ from flyteidl.core import literals_pb2 as flyteidl_dot_core_dot_literals__pb2 from flyteidl.core import tasks_pb2 as flyteidl_dot_core_dot_tasks__pb2 -from flyteidl.core import interface_pb2 as flyteidl_dot_core_dot_interface__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.flyteidl/service/external_plugin_service.proto\x12\x10\x66lyteidl.service\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\x1a\x1d\x66lyteidl/core/interface.proto\"\xa8\x01\n\x11TaskCreateRequest\x12\x31\n\x06inputs\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x37\n\x08template\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x03 \x01(\tR\x0coutputPrefix:\x02\x18\x01\"/\n\x12TaskCreateResponse\x12\x15\n\x06job_id\x18\x01 \x01(\tR\x05jobId:\x02\x18\x01\"H\n\x0eTaskGetRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12\x15\n\x06job_id\x18\x02 \x01(\tR\x05jobId:\x02\x18\x01\"y\n\x0fTaskGetResponse\x12-\n\x05state\x18\x01 \x01(\x0e\x32\x17.flyteidl.service.StateR\x05state\x12\x33\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x07outputs:\x02\x18\x01\"K\n\x11TaskDeleteRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12\x15\n\x06job_id\x18\x02 \x01(\tR\x05jobId:\x02\x18\x01\"\x18\n\x12TaskDeleteResponse:\x02\x18\x01*b\n\x05State\x12\x15\n\x11RETRYABLE_FAILURE\x10\x00\x12\x15\n\x11PERMANENT_FAILURE\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x1a\x02\x18\x01\x32\xa8\x02\n\x15\x45xternalPluginService\x12\\\n\nCreateTask\x12#.flyteidl.service.TaskCreateRequest\x1a$.flyteidl.service.TaskCreateResponse\"\x03\x88\x02\x01\x12S\n\x07GetTask\x12 .flyteidl.service.TaskGetRequest\x1a!.flyteidl.service.TaskGetResponse\"\x03\x88\x02\x01\x12\\\n\nDeleteTask\x12#.flyteidl.service.TaskDeleteRequest\x1a$.flyteidl.service.TaskDeleteResponse\"\x03\x88\x02\x01\x42\xd2\x01\n\x14\x63om.flyteidl.serviceB\x1a\x45xternalPluginServiceProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.flyteidl/service/external_plugin_service.proto\x12\x10\x66lyteidl.service\x1a\x1c\x66lyteidl/core/literals.proto\x1a\x19\x66lyteidl/core/tasks.proto\"\xa8\x01\n\x11TaskCreateRequest\x12\x31\n\x06inputs\x18\x01 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x06inputs\x12\x37\n\x08template\x18\x02 \x01(\x0b\x32\x1b.flyteidl.core.TaskTemplateR\x08template\x12#\n\routput_prefix\x18\x03 \x01(\tR\x0coutputPrefix:\x02\x18\x01\"/\n\x12TaskCreateResponse\x12\x15\n\x06job_id\x18\x01 \x01(\tR\x05jobId:\x02\x18\x01\"H\n\x0eTaskGetRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12\x15\n\x06job_id\x18\x02 \x01(\tR\x05jobId:\x02\x18\x01\"y\n\x0fTaskGetResponse\x12-\n\x05state\x18\x01 \x01(\x0e\x32\x17.flyteidl.service.StateR\x05state\x12\x33\n\x07outputs\x18\x02 \x01(\x0b\x32\x19.flyteidl.core.LiteralMapR\x07outputs:\x02\x18\x01\"K\n\x11TaskDeleteRequest\x12\x1b\n\ttask_type\x18\x01 \x01(\tR\x08taskType\x12\x15\n\x06job_id\x18\x02 \x01(\tR\x05jobId:\x02\x18\x01\"\x18\n\x12TaskDeleteResponse:\x02\x18\x01*b\n\x05State\x12\x15\n\x11RETRYABLE_FAILURE\x10\x00\x12\x15\n\x11PERMANENT_FAILURE\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x1a\x02\x18\x01\x32\xa8\x02\n\x15\x45xternalPluginService\x12\\\n\nCreateTask\x12#.flyteidl.service.TaskCreateRequest\x1a$.flyteidl.service.TaskCreateResponse\"\x03\x88\x02\x01\x12S\n\x07GetTask\x12 .flyteidl.service.TaskGetRequest\x1a!.flyteidl.service.TaskGetResponse\"\x03\x88\x02\x01\x12\\\n\nDeleteTask\x12#.flyteidl.service.TaskDeleteRequest\x1a$.flyteidl.service.TaskDeleteResponse\"\x03\x88\x02\x01\x42\xd2\x01\n\x14\x63om.flyteidl.serviceB\x1a\x45xternalPluginServiceProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -45,20 +44,20 @@ _EXTERNALPLUGINSERVICE.methods_by_name['GetTask']._serialized_options = b'\210\002\001' _EXTERNALPLUGINSERVICE.methods_by_name['DeleteTask']._options = None _EXTERNALPLUGINSERVICE.methods_by_name['DeleteTask']._serialized_options = b'\210\002\001' - _globals['_STATE']._serialized_start=676 - _globals['_STATE']._serialized_end=774 - _globals['_TASKCREATEREQUEST']._serialized_start=157 - _globals['_TASKCREATEREQUEST']._serialized_end=325 - _globals['_TASKCREATERESPONSE']._serialized_start=327 - _globals['_TASKCREATERESPONSE']._serialized_end=374 - _globals['_TASKGETREQUEST']._serialized_start=376 - _globals['_TASKGETREQUEST']._serialized_end=448 - _globals['_TASKGETRESPONSE']._serialized_start=450 - _globals['_TASKGETRESPONSE']._serialized_end=571 - _globals['_TASKDELETEREQUEST']._serialized_start=573 - _globals['_TASKDELETEREQUEST']._serialized_end=648 - _globals['_TASKDELETERESPONSE']._serialized_start=650 - _globals['_TASKDELETERESPONSE']._serialized_end=674 - _globals['_EXTERNALPLUGINSERVICE']._serialized_start=777 - _globals['_EXTERNALPLUGINSERVICE']._serialized_end=1073 + _globals['_STATE']._serialized_start=645 + _globals['_STATE']._serialized_end=743 + _globals['_TASKCREATEREQUEST']._serialized_start=126 + _globals['_TASKCREATEREQUEST']._serialized_end=294 + _globals['_TASKCREATERESPONSE']._serialized_start=296 + _globals['_TASKCREATERESPONSE']._serialized_end=343 + _globals['_TASKGETREQUEST']._serialized_start=345 + _globals['_TASKGETREQUEST']._serialized_end=417 + _globals['_TASKGETRESPONSE']._serialized_start=419 + _globals['_TASKGETRESPONSE']._serialized_end=540 + _globals['_TASKDELETEREQUEST']._serialized_start=542 + _globals['_TASKDELETEREQUEST']._serialized_end=617 + _globals['_TASKDELETERESPONSE']._serialized_start=619 + _globals['_TASKDELETERESPONSE']._serialized_end=643 + _globals['_EXTERNALPLUGINSERVICE']._serialized_start=746 + _globals['_EXTERNALPLUGINSERVICE']._serialized_end=1042 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.pyi b/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.pyi index b5163a8bfe..c09566929f 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.pyi +++ b/flyteidl/gen/pb_python/flyteidl/service/external_plugin_service_pb2.pyi @@ -1,6 +1,5 @@ from flyteidl.core import literals_pb2 as _literals_pb2 from flyteidl.core import tasks_pb2 as _tasks_pb2 -from flyteidl.core import interface_pb2 as _interface_pb2 from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/README.md b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/README.md index 9136b44368..e243e91f80 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/README.md +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/README.md @@ -71,62 +71,120 @@ All URIs are relative to *http://localhost* Class | Method | HTTP request | Description ------------ | ------------- | ------------- | ------------- *AdminServiceApi* | [**create_execution**](docs/AdminServiceApi.md#create_execution) | **POST** /api/v1/executions | Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` +*AdminServiceApi* | [**create_execution2**](docs/AdminServiceApi.md#create_execution2) | **PUT** /api/v1/executions/org/{org} | Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` *AdminServiceApi* | [**create_launch_plan**](docs/AdminServiceApi.md#create_launch_plan) | **POST** /api/v1/launch_plans | Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition +*AdminServiceApi* | [**create_launch_plan2**](docs/AdminServiceApi.md#create_launch_plan2) | **POST** /api/v1/launch_plans/org/{id.org} | Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition *AdminServiceApi* | [**create_node_event**](docs/AdminServiceApi.md#create_node_event) | **POST** /api/v1/events/nodes | Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. +*AdminServiceApi* | [**create_node_event2**](docs/AdminServiceApi.md#create_node_event2) | **POST** /api/v1/events/org/{event.id.execution_id.org}/nodes | Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. *AdminServiceApi* | [**create_task**](docs/AdminServiceApi.md#create_task) | **POST** /api/v1/tasks | Create and upload a :ref:`ref_flyteidl.admin.Task` definition +*AdminServiceApi* | [**create_task2**](docs/AdminServiceApi.md#create_task2) | **POST** /api/v1/tasks/org/{id.org} | Create and upload a :ref:`ref_flyteidl.admin.Task` definition *AdminServiceApi* | [**create_task_event**](docs/AdminServiceApi.md#create_task_event) | **POST** /api/v1/events/tasks | Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. +*AdminServiceApi* | [**create_task_event2**](docs/AdminServiceApi.md#create_task_event2) | **POST** /api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks | Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. *AdminServiceApi* | [**create_workflow**](docs/AdminServiceApi.md#create_workflow) | **POST** /api/v1/workflows | Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition +*AdminServiceApi* | [**create_workflow2**](docs/AdminServiceApi.md#create_workflow2) | **POST** /api/v1/workflows/org/{id.org} | Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition *AdminServiceApi* | [**create_workflow_event**](docs/AdminServiceApi.md#create_workflow_event) | **POST** /api/v1/events/workflows | Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. +*AdminServiceApi* | [**create_workflow_event2**](docs/AdminServiceApi.md#create_workflow_event2) | **POST** /api/v1/events/org/{event.execution_id.org}/workflows | Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. *AdminServiceApi* | [**delete_project_attributes**](docs/AdminServiceApi.md#delete_project_attributes) | **DELETE** /api/v1/project_attributes/{project} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**delete_project_attributes2**](docs/AdminServiceApi.md#delete_project_attributes2) | **DELETE** /api/v1/project_domain_attributes/org/{org}/{project} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**delete_project_domain_attributes**](docs/AdminServiceApi.md#delete_project_domain_attributes) | **DELETE** /api/v1/project_domain_attributes/{project}/{domain} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**delete_project_domain_attributes2**](docs/AdminServiceApi.md#delete_project_domain_attributes2) | **DELETE** /api/v1/project_domain_attributes/org/{org}/{project}/{domain} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**delete_workflow_attributes**](docs/AdminServiceApi.md#delete_workflow_attributes) | **DELETE** /api/v1/workflow_attributes/{project}/{domain}/{workflow} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +*AdminServiceApi* | [**delete_workflow_attributes2**](docs/AdminServiceApi.md#delete_workflow_attributes2) | **DELETE** /api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow} | Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. *AdminServiceApi* | [**get_active_launch_plan**](docs/AdminServiceApi.md#get_active_launch_plan) | **GET** /api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name} | Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. +*AdminServiceApi* | [**get_active_launch_plan2**](docs/AdminServiceApi.md#get_active_launch_plan2) | **GET** /api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. *AdminServiceApi* | [**get_description_entity**](docs/AdminServiceApi.md#get_description_entity) | **GET** /api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. +*AdminServiceApi* | [**get_description_entity2**](docs/AdminServiceApi.md#get_description_entity2) | **GET** /api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. +*AdminServiceApi* | [**get_dynamic_node_workflow**](docs/AdminServiceApi.md#get_dynamic_node_workflow) | **GET** /api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow | Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. +*AdminServiceApi* | [**get_dynamic_node_workflow2**](docs/AdminServiceApi.md#get_dynamic_node_workflow2) | **GET** /api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow | Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. *AdminServiceApi* | [**get_execution**](docs/AdminServiceApi.md#get_execution) | **GET** /api/v1/executions/{id.project}/{id.domain}/{id.name} | Fetches a :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**get_execution2**](docs/AdminServiceApi.md#get_execution2) | **GET** /api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetches a :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**get_execution_data**](docs/AdminServiceApi.md#get_execution_data) | **GET** /api/v1/data/executions/{id.project}/{id.domain}/{id.name} | Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**get_execution_data2**](docs/AdminServiceApi.md#get_execution_data2) | **GET** /api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**get_execution_metrics**](docs/AdminServiceApi.md#get_execution_metrics) | **GET** /api/v1/metrics/executions/{id.project}/{id.domain}/{id.name} | Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**get_execution_metrics2**](docs/AdminServiceApi.md#get_execution_metrics2) | **GET** /api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**get_launch_plan**](docs/AdminServiceApi.md#get_launch_plan) | **GET** /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. +*AdminServiceApi* | [**get_launch_plan2**](docs/AdminServiceApi.md#get_launch_plan2) | **GET** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. *AdminServiceApi* | [**get_named_entity**](docs/AdminServiceApi.md#get_named_entity) | **GET** /api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name} | Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. +*AdminServiceApi* | [**get_named_entity2**](docs/AdminServiceApi.md#get_named_entity2) | **GET** /api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name} | Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. *AdminServiceApi* | [**get_node_execution**](docs/AdminServiceApi.md#get_node_execution) | **GET** /api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. +*AdminServiceApi* | [**get_node_execution2**](docs/AdminServiceApi.md#get_node_execution2) | **GET** /api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. *AdminServiceApi* | [**get_node_execution_data**](docs/AdminServiceApi.md#get_node_execution_data) | **GET** /api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. +*AdminServiceApi* | [**get_node_execution_data2**](docs/AdminServiceApi.md#get_node_execution_data2) | **GET** /api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id} | Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. *AdminServiceApi* | [**get_project_attributes**](docs/AdminServiceApi.md#get_project_attributes) | **GET** /api/v1/project_attributes/{project} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**get_project_attributes2**](docs/AdminServiceApi.md#get_project_attributes2) | **GET** /api/v1/project_domain_attributes/org/{org}/{project} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**get_project_domain_attributes**](docs/AdminServiceApi.md#get_project_domain_attributes) | **GET** /api/v1/project_domain_attributes/{project}/{domain} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**get_project_domain_attributes2**](docs/AdminServiceApi.md#get_project_domain_attributes2) | **GET** /api/v1/project_domain_attributes/org/{org}/{project}/{domain} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**get_task**](docs/AdminServiceApi.md#get_task) | **GET** /api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Task` definition. +*AdminServiceApi* | [**get_task2**](docs/AdminServiceApi.md#get_task2) | **GET** /api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Task` definition. *AdminServiceApi* | [**get_task_execution**](docs/AdminServiceApi.md#get_task_execution) | **GET** /api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**get_task_execution2**](docs/AdminServiceApi.md#get_task_execution2) | **GET** /api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**get_task_execution_data**](docs/AdminServiceApi.md#get_task_execution_data) | **GET** /api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**get_task_execution_data2**](docs/AdminServiceApi.md#get_task_execution_data2) | **GET** /api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt} | Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**get_version**](docs/AdminServiceApi.md#get_version) | **GET** /api/v1/version | *AdminServiceApi* | [**get_workflow**](docs/AdminServiceApi.md#get_workflow) | **GET** /api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. +*AdminServiceApi* | [**get_workflow2**](docs/AdminServiceApi.md#get_workflow2) | **GET** /api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. *AdminServiceApi* | [**get_workflow_attributes**](docs/AdminServiceApi.md#get_workflow_attributes) | **GET** /api/v1/workflow_attributes/{project}/{domain}/{workflow} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +*AdminServiceApi* | [**get_workflow_attributes2**](docs/AdminServiceApi.md#get_workflow_attributes2) | **GET** /api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow} | Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. *AdminServiceApi* | [**list_active_launch_plans**](docs/AdminServiceApi.md#list_active_launch_plans) | **GET** /api/v1/active_launch_plans/{project}/{domain} | List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. +*AdminServiceApi* | [**list_active_launch_plans2**](docs/AdminServiceApi.md#list_active_launch_plans2) | **GET** /api/v1/active_launch_plans/org/{org}/{project}/{domain} | List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. *AdminServiceApi* | [**list_description_entities**](docs/AdminServiceApi.md#list_description_entities) | **GET** /api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. -*AdminServiceApi* | [**list_description_entities2**](docs/AdminServiceApi.md#list_description_entities2) | **GET** /api/v1/description_entities/{resource_type}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +*AdminServiceApi* | [**list_description_entities2**](docs/AdminServiceApi.md#list_description_entities2) | **GET** /api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +*AdminServiceApi* | [**list_description_entities3**](docs/AdminServiceApi.md#list_description_entities3) | **GET** /api/v1/description_entities/{resource_type}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. +*AdminServiceApi* | [**list_description_entities4**](docs/AdminServiceApi.md#list_description_entities4) | **GET** /api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. *AdminServiceApi* | [**list_executions**](docs/AdminServiceApi.md#list_executions) | **GET** /api/v1/executions/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**list_executions2**](docs/AdminServiceApi.md#list_executions2) | **GET** /api/v1/executions/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**list_launch_plan_ids**](docs/AdminServiceApi.md#list_launch_plan_ids) | **GET** /api/v1/launch_plan_ids/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. +*AdminServiceApi* | [**list_launch_plan_ids2**](docs/AdminServiceApi.md#list_launch_plan_ids2) | **GET** /api/v1/launch_plan_ids/org/{org}/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. *AdminServiceApi* | [**list_launch_plans**](docs/AdminServiceApi.md#list_launch_plans) | **GET** /api/v1/launch_plans/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. -*AdminServiceApi* | [**list_launch_plans2**](docs/AdminServiceApi.md#list_launch_plans2) | **GET** /api/v1/launch_plans/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +*AdminServiceApi* | [**list_launch_plans2**](docs/AdminServiceApi.md#list_launch_plans2) | **GET** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +*AdminServiceApi* | [**list_launch_plans3**](docs/AdminServiceApi.md#list_launch_plans3) | **GET** /api/v1/launch_plans/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. +*AdminServiceApi* | [**list_launch_plans4**](docs/AdminServiceApi.md#list_launch_plans4) | **GET** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. *AdminServiceApi* | [**list_matchable_attributes**](docs/AdminServiceApi.md#list_matchable_attributes) | **GET** /api/v1/matchable_attributes | Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. +*AdminServiceApi* | [**list_matchable_attributes2**](docs/AdminServiceApi.md#list_matchable_attributes2) | **GET** /api/v1/matchable_attributes/org/{org} | Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. *AdminServiceApi* | [**list_named_entities**](docs/AdminServiceApi.md#list_named_entities) | **GET** /api/v1/named_entities/{resource_type}/{project}/{domain} | Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. +*AdminServiceApi* | [**list_named_entities2**](docs/AdminServiceApi.md#list_named_entities2) | **GET** /api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain} | Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. *AdminServiceApi* | [**list_node_executions**](docs/AdminServiceApi.md#list_node_executions) | **GET** /api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. +*AdminServiceApi* | [**list_node_executions2**](docs/AdminServiceApi.md#list_node_executions2) | **GET** /api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. *AdminServiceApi* | [**list_node_executions_for_task**](docs/AdminServiceApi.md#list_node_executions_for_task) | **GET** /api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**list_node_executions_for_task2**](docs/AdminServiceApi.md#list_node_executions_for_task2) | **GET** /api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt} | Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**list_projects**](docs/AdminServiceApi.md#list_projects) | **GET** /api/v1/projects | Fetches a list of :ref:`ref_flyteidl.admin.Project` +*AdminServiceApi* | [**list_projects2**](docs/AdminServiceApi.md#list_projects2) | **GET** /api/v1/projects/org/{org} | Fetches a list of :ref:`ref_flyteidl.admin.Project` *AdminServiceApi* | [**list_task_executions**](docs/AdminServiceApi.md#list_task_executions) | **GET** /api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} | Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. +*AdminServiceApi* | [**list_task_executions2**](docs/AdminServiceApi.md#list_task_executions2) | **GET** /api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} | Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. *AdminServiceApi* | [**list_task_ids**](docs/AdminServiceApi.md#list_task_ids) | **GET** /api/v1/task_ids/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. +*AdminServiceApi* | [**list_task_ids2**](docs/AdminServiceApi.md#list_task_ids2) | **GET** /api/v1/tasks/org/{org}/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. *AdminServiceApi* | [**list_tasks**](docs/AdminServiceApi.md#list_tasks) | **GET** /api/v1/tasks/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. -*AdminServiceApi* | [**list_tasks2**](docs/AdminServiceApi.md#list_tasks2) | **GET** /api/v1/tasks/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +*AdminServiceApi* | [**list_tasks2**](docs/AdminServiceApi.md#list_tasks2) | **GET** /api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +*AdminServiceApi* | [**list_tasks3**](docs/AdminServiceApi.md#list_tasks3) | **GET** /api/v1/tasks/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. +*AdminServiceApi* | [**list_tasks4**](docs/AdminServiceApi.md#list_tasks4) | **GET** /api/v1/tasks/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. *AdminServiceApi* | [**list_workflow_ids**](docs/AdminServiceApi.md#list_workflow_ids) | **GET** /api/v1/workflow_ids/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. +*AdminServiceApi* | [**list_workflow_ids2**](docs/AdminServiceApi.md#list_workflow_ids2) | **GET** /api/v1/workflows/org/{org}/{project}/{domain} | Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. *AdminServiceApi* | [**list_workflows**](docs/AdminServiceApi.md#list_workflows) | **GET** /api/v1/workflows/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. -*AdminServiceApi* | [**list_workflows2**](docs/AdminServiceApi.md#list_workflows2) | **GET** /api/v1/workflows/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +*AdminServiceApi* | [**list_workflows2**](docs/AdminServiceApi.md#list_workflows2) | **GET** /api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +*AdminServiceApi* | [**list_workflows3**](docs/AdminServiceApi.md#list_workflows3) | **GET** /api/v1/workflows/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. +*AdminServiceApi* | [**list_workflows4**](docs/AdminServiceApi.md#list_workflows4) | **GET** /api/v1/workflows/org/{id.org}/{id.project}/{id.domain} | Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. *AdminServiceApi* | [**recover_execution**](docs/AdminServiceApi.md#recover_execution) | **POST** /api/v1/executions/recover | Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. +*AdminServiceApi* | [**recover_execution2**](docs/AdminServiceApi.md#recover_execution2) | **POST** /api/v1/executions/org/{id.org}/recover | Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. *AdminServiceApi* | [**register_project**](docs/AdminServiceApi.md#register_project) | **POST** /api/v1/projects | Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. +*AdminServiceApi* | [**register_project2**](docs/AdminServiceApi.md#register_project2) | **POST** /api/v1/projects/org/{project.org} | Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. *AdminServiceApi* | [**relaunch_execution**](docs/AdminServiceApi.md#relaunch_execution) | **POST** /api/v1/executions/relaunch | Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` +*AdminServiceApi* | [**relaunch_execution2**](docs/AdminServiceApi.md#relaunch_execution2) | **POST** /api/v1/executions/org/{id.org}/relaunch | Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` *AdminServiceApi* | [**terminate_execution**](docs/AdminServiceApi.md#terminate_execution) | **DELETE** /api/v1/executions/{id.project}/{id.domain}/{id.name} | Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**terminate_execution2**](docs/AdminServiceApi.md#terminate_execution2) | **DELETE** /api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**update_execution**](docs/AdminServiceApi.md#update_execution) | **PUT** /api/v1/executions/{id.project}/{id.domain}/{id.name} | Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. +*AdminServiceApi* | [**update_execution2**](docs/AdminServiceApi.md#update_execution2) | **PUT** /api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name} | Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. *AdminServiceApi* | [**update_launch_plan**](docs/AdminServiceApi.md#update_launch_plan) | **PUT** /api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version} | Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. +*AdminServiceApi* | [**update_launch_plan2**](docs/AdminServiceApi.md#update_launch_plan2) | **PUT** /api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version} | Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. *AdminServiceApi* | [**update_named_entity**](docs/AdminServiceApi.md#update_named_entity) | **PUT** /api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name} | Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. -*AdminServiceApi* | [**update_project**](docs/AdminServiceApi.md#update_project) | **PUT** /api/v1/projects/{id} | Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. +*AdminServiceApi* | [**update_named_entity2**](docs/AdminServiceApi.md#update_named_entity2) | **PUT** /api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name} | Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. +*AdminServiceApi* | [**update_project**](docs/AdminServiceApi.md#update_project) | **PUT** /api/v1/projects/{id} | Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. +*AdminServiceApi* | [**update_project2**](docs/AdminServiceApi.md#update_project2) | **PUT** /api/v1/projects/org/{org}/{id} | Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. *AdminServiceApi* | [**update_project_attributes**](docs/AdminServiceApi.md#update_project_attributes) | **PUT** /api/v1/project_attributes/{attributes.project} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level +*AdminServiceApi* | [**update_project_attributes2**](docs/AdminServiceApi.md#update_project_attributes2) | **PUT** /api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level *AdminServiceApi* | [**update_project_domain_attributes**](docs/AdminServiceApi.md#update_project_domain_attributes) | **PUT** /api/v1/project_domain_attributes/{attributes.project}/{attributes.domain} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. +*AdminServiceApi* | [**update_project_domain_attributes2**](docs/AdminServiceApi.md#update_project_domain_attributes2) | **PUT** /api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. *AdminServiceApi* | [**update_workflow_attributes**](docs/AdminServiceApi.md#update_workflow_attributes) | **PUT** /api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. +*AdminServiceApi* | [**update_workflow_attributes2**](docs/AdminServiceApi.md#update_workflow_attributes2) | **PUT** /api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow} | Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. ## Documentation For Models @@ -143,6 +201,7 @@ Class | Method | HTTP request | Description - [AdminDescriptionEntityList](docs/AdminDescriptionEntityList.md) - [AdminDescriptionFormat](docs/AdminDescriptionFormat.md) - [AdminDomain](docs/AdminDomain.md) + - [AdminDynamicNodeWorkflowResponse](docs/AdminDynamicNodeWorkflowResponse.md) - [AdminEmailNotification](docs/AdminEmailNotification.md) - [AdminEnvs](docs/AdminEnvs.md) - [AdminExecution](docs/AdminExecution.md) @@ -351,6 +410,7 @@ Class | Method | HTTP request | Description - [CoreTaskNode](docs/CoreTaskNode.md) - [CoreTaskNodeOverrides](docs/CoreTaskNodeOverrides.md) - [CoreTaskTemplate](docs/CoreTaskTemplate.md) + - [CoreTimePartition](docs/CoreTimePartition.md) - [CoreTypeAnnotation](docs/CoreTypeAnnotation.md) - [CoreTypeStructure](docs/CoreTypeStructure.md) - [CoreTypedInterface](docs/CoreTypedInterface.md) diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/__init__.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/__init__.py index 6cbddc0b5b..f57c6ff9a0 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/__init__.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/__init__.py @@ -34,6 +34,7 @@ from flyteadmin.models.admin_description_entity_list import AdminDescriptionEntityList from flyteadmin.models.admin_description_format import AdminDescriptionFormat from flyteadmin.models.admin_domain import AdminDomain +from flyteadmin.models.admin_dynamic_node_workflow_response import AdminDynamicNodeWorkflowResponse from flyteadmin.models.admin_email_notification import AdminEmailNotification from flyteadmin.models.admin_envs import AdminEnvs from flyteadmin.models.admin_execution import AdminExecution @@ -242,6 +243,7 @@ from flyteadmin.models.core_task_node import CoreTaskNode from flyteadmin.models.core_task_node_overrides import CoreTaskNodeOverrides from flyteadmin.models.core_task_template import CoreTaskTemplate +from flyteadmin.models.core_time_partition import CoreTimePartition from flyteadmin.models.core_type_annotation import CoreTypeAnnotation from flyteadmin.models.core_type_structure import CoreTypeStructure from flyteadmin.models.core_typed_interface import CoreTypedInterface diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/api/admin_service_api.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/api/admin_service_api.py index 01e9838ee5..cfbd1f5ce0 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/api/admin_service_api.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/api/admin_service_api.py @@ -130,43 +130,45 @@ def create_execution_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def create_launch_plan(self, body, **kwargs): # noqa: E501 - """Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition # noqa: E501 + def create_execution2(self, org, body, **kwargs): # noqa: E501 + """Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_launch_plan(body, async_req=True) + >>> thread = api.create_execution2(org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminLaunchPlanCreateRequest body: (required) - :return: AdminLaunchPlanCreateResponse + :param str org: Optional, org key applied to the resource. (required) + :param AdminExecutionCreateRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.create_launch_plan_with_http_info(body, **kwargs) # noqa: E501 + return self.create_execution2_with_http_info(org, body, **kwargs) # noqa: E501 else: - (data) = self.create_launch_plan_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_execution2_with_http_info(org, body, **kwargs) # noqa: E501 return data - def create_launch_plan_with_http_info(self, body, **kwargs): # noqa: E501 - """Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition # noqa: E501 + def create_execution2_with_http_info(self, org, body, **kwargs): # noqa: E501 + """Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_launch_plan_with_http_info(body, async_req=True) + >>> thread = api.create_execution2_with_http_info(org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminLaunchPlanCreateRequest body: (required) - :return: AdminLaunchPlanCreateResponse + :param str org: Optional, org key applied to the resource. (required) + :param AdminExecutionCreateRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['body'] # noqa: E501 + all_params = ['org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -177,18 +179,24 @@ def create_launch_plan_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method create_launch_plan" % key + " to method create_execution2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `create_execution2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_launch_plan`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_execution2`") # noqa: E501 collection_formats = {} path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 query_params = [] @@ -212,14 +220,14 @@ def create_launch_plan_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/launch_plans', 'POST', + '/api/v1/executions/org/{org}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminLaunchPlanCreateResponse', # noqa: E501 + response_type='AdminExecutionCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -227,38 +235,38 @@ def create_launch_plan_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def create_node_event(self, body, **kwargs): # noqa: E501 - """Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. # noqa: E501 + def create_launch_plan(self, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_node_event(body, async_req=True) + >>> thread = api.create_launch_plan(body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminNodeExecutionEventRequest body: (required) - :return: AdminNodeExecutionEventResponse + :param AdminLaunchPlanCreateRequest body: (required) + :return: AdminLaunchPlanCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.create_node_event_with_http_info(body, **kwargs) # noqa: E501 + return self.create_launch_plan_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.create_node_event_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_launch_plan_with_http_info(body, **kwargs) # noqa: E501 return data - def create_node_event_with_http_info(self, body, **kwargs): # noqa: E501 - """Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. # noqa: E501 + def create_launch_plan_with_http_info(self, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_node_event_with_http_info(body, async_req=True) + >>> thread = api.create_launch_plan_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminNodeExecutionEventRequest body: (required) - :return: AdminNodeExecutionEventResponse + :param AdminLaunchPlanCreateRequest body: (required) + :return: AdminLaunchPlanCreateResponse If the method is called asynchronously, returns the request thread. """ @@ -274,14 +282,14 @@ def create_node_event_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method create_node_event" % key + " to method create_launch_plan" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_node_event`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_launch_plan`") # noqa: E501 collection_formats = {} @@ -309,14 +317,14 @@ def create_node_event_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/events/nodes', 'POST', + '/api/v1/launch_plans', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNodeExecutionEventResponse', # noqa: E501 + response_type='AdminLaunchPlanCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -324,43 +332,45 @@ def create_node_event_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def create_task(self, body, **kwargs): # noqa: E501 - """Create and upload a :ref:`ref_flyteidl.admin.Task` definition # noqa: E501 + def create_launch_plan2(self, id_org, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_task(body, async_req=True) + >>> thread = api.create_launch_plan2(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param FlyteidladminTaskCreateRequest body: (required) - :return: FlyteidladminTaskCreateResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminLaunchPlanCreateRequest body: (required) + :return: AdminLaunchPlanCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.create_task_with_http_info(body, **kwargs) # noqa: E501 + return self.create_launch_plan2_with_http_info(id_org, body, **kwargs) # noqa: E501 else: - (data) = self.create_task_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_launch_plan2_with_http_info(id_org, body, **kwargs) # noqa: E501 return data - def create_task_with_http_info(self, body, **kwargs): # noqa: E501 - """Create and upload a :ref:`ref_flyteidl.admin.Task` definition # noqa: E501 + def create_launch_plan2_with_http_info(self, id_org, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_task_with_http_info(body, async_req=True) + >>> thread = api.create_launch_plan2_with_http_info(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param FlyteidladminTaskCreateRequest body: (required) - :return: FlyteidladminTaskCreateResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminLaunchPlanCreateRequest body: (required) + :return: AdminLaunchPlanCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['body'] # noqa: E501 + all_params = ['id_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -371,18 +381,24 @@ def create_task_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method create_task" % key + " to method create_launch_plan2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `create_launch_plan2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_task`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_launch_plan2`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 query_params = [] @@ -406,14 +422,14 @@ def create_task_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/tasks', 'POST', + '/api/v1/launch_plans/org/{id.org}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='FlyteidladminTaskCreateResponse', # noqa: E501 + response_type='AdminLaunchPlanCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -421,38 +437,38 @@ def create_task_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def create_task_event(self, body, **kwargs): # noqa: E501 - """Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. # noqa: E501 + def create_node_event(self, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_task_event(body, async_req=True) + >>> thread = api.create_node_event(body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminTaskExecutionEventRequest body: (required) - :return: AdminTaskExecutionEventResponse + :param AdminNodeExecutionEventRequest body: (required) + :return: AdminNodeExecutionEventResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.create_task_event_with_http_info(body, **kwargs) # noqa: E501 + return self.create_node_event_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.create_task_event_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_node_event_with_http_info(body, **kwargs) # noqa: E501 return data - def create_task_event_with_http_info(self, body, **kwargs): # noqa: E501 - """Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. # noqa: E501 + def create_node_event_with_http_info(self, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_task_event_with_http_info(body, async_req=True) + >>> thread = api.create_node_event_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminTaskExecutionEventRequest body: (required) - :return: AdminTaskExecutionEventResponse + :param AdminNodeExecutionEventRequest body: (required) + :return: AdminNodeExecutionEventResponse If the method is called asynchronously, returns the request thread. """ @@ -468,14 +484,14 @@ def create_task_event_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method create_task_event" % key + " to method create_node_event" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_task_event`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_node_event`") # noqa: E501 collection_formats = {} @@ -503,14 +519,14 @@ def create_task_event_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/events/tasks', 'POST', + '/api/v1/events/nodes', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminTaskExecutionEventResponse', # noqa: E501 + response_type='AdminNodeExecutionEventResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -518,43 +534,45 @@ def create_task_event_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def create_workflow(self, body, **kwargs): # noqa: E501 - """Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition # noqa: E501 + def create_node_event2(self, event_id_execution_id_org, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_workflow(body, async_req=True) + >>> thread = api.create_node_event2(event_id_execution_id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminWorkflowCreateRequest body: (required) - :return: AdminWorkflowCreateResponse + :param str event_id_execution_id_org: Optional, org key applied to the resource. (required) + :param AdminNodeExecutionEventRequest body: (required) + :return: AdminNodeExecutionEventResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.create_workflow_with_http_info(body, **kwargs) # noqa: E501 + return self.create_node_event2_with_http_info(event_id_execution_id_org, body, **kwargs) # noqa: E501 else: - (data) = self.create_workflow_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_node_event2_with_http_info(event_id_execution_id_org, body, **kwargs) # noqa: E501 return data - def create_workflow_with_http_info(self, body, **kwargs): # noqa: E501 - """Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition # noqa: E501 + def create_node_event2_with_http_info(self, event_id_execution_id_org, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_workflow_with_http_info(body, async_req=True) + >>> thread = api.create_node_event2_with_http_info(event_id_execution_id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminWorkflowCreateRequest body: (required) - :return: AdminWorkflowCreateResponse + :param str event_id_execution_id_org: Optional, org key applied to the resource. (required) + :param AdminNodeExecutionEventRequest body: (required) + :return: AdminNodeExecutionEventResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['body'] # noqa: E501 + all_params = ['event_id_execution_id_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -565,18 +583,24 @@ def create_workflow_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method create_workflow" % key + " to method create_node_event2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'event_id_execution_id_org' is set + if ('event_id_execution_id_org' not in params or + params['event_id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `event_id_execution_id_org` when calling `create_node_event2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_workflow`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_node_event2`") # noqa: E501 collection_formats = {} path_params = {} + if 'event_id_execution_id_org' in params: + path_params['event.id.execution_id.org'] = params['event_id_execution_id_org'] # noqa: E501 query_params = [] @@ -600,14 +624,14 @@ def create_workflow_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflows', 'POST', + '/api/v1/events/org/{event.id.execution_id.org}/nodes', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowCreateResponse', # noqa: E501 + response_type='AdminNodeExecutionEventResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -615,38 +639,38 @@ def create_workflow_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def create_workflow_event(self, body, **kwargs): # noqa: E501 - """Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. # noqa: E501 + def create_task(self, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Task` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_workflow_event(body, async_req=True) + >>> thread = api.create_task(body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminWorkflowExecutionEventRequest body: (required) - :return: AdminWorkflowExecutionEventResponse + :param FlyteidladminTaskCreateRequest body: (required) + :return: FlyteidladminTaskCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.create_workflow_event_with_http_info(body, **kwargs) # noqa: E501 + return self.create_task_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.create_workflow_event_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.create_task_with_http_info(body, **kwargs) # noqa: E501 return data - def create_workflow_event_with_http_info(self, body, **kwargs): # noqa: E501 - """Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. # noqa: E501 + def create_task_with_http_info(self, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Task` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_workflow_event_with_http_info(body, async_req=True) + >>> thread = api.create_task_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminWorkflowExecutionEventRequest body: (required) - :return: AdminWorkflowExecutionEventResponse + :param FlyteidladminTaskCreateRequest body: (required) + :return: FlyteidladminTaskCreateResponse If the method is called asynchronously, returns the request thread. """ @@ -662,14 +686,14 @@ def create_workflow_event_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method create_workflow_event" % key + " to method create_task" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `create_workflow_event`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_task`") # noqa: E501 collection_formats = {} @@ -697,14 +721,14 @@ def create_workflow_event_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/events/workflows', 'POST', + '/api/v1/tasks', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowExecutionEventResponse', # noqa: E501 + response_type='FlyteidladminTaskCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -712,45 +736,45 @@ def create_workflow_event_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def delete_project_attributes(self, project, body, **kwargs): # noqa: E501 - """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def create_task2(self, id_org, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Task` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_project_attributes(project, body, async_req=True) + >>> thread = api.create_task2(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param AdminProjectAttributesDeleteRequest body: (required) - :return: AdminProjectAttributesDeleteResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param FlyteidladminTaskCreateRequest body: (required) + :return: FlyteidladminTaskCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.delete_project_attributes_with_http_info(project, body, **kwargs) # noqa: E501 + return self.create_task2_with_http_info(id_org, body, **kwargs) # noqa: E501 else: - (data) = self.delete_project_attributes_with_http_info(project, body, **kwargs) # noqa: E501 + (data) = self.create_task2_with_http_info(id_org, body, **kwargs) # noqa: E501 return data - def delete_project_attributes_with_http_info(self, project, body, **kwargs): # noqa: E501 - """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def create_task2_with_http_info(self, id_org, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Task` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_project_attributes_with_http_info(project, body, async_req=True) + >>> thread = api.create_task2_with_http_info(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param AdminProjectAttributesDeleteRequest body: (required) - :return: AdminProjectAttributesDeleteResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param FlyteidladminTaskCreateRequest body: (required) + :return: FlyteidladminTaskCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'body'] # noqa: E501 + all_params = ['id_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -761,24 +785,24 @@ def delete_project_attributes_with_http_info(self, project, body, **kwargs): # if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method delete_project_attributes" % key + " to method create_task2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `delete_project_attributes`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `create_task2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `delete_project_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_task2`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 query_params = [] @@ -802,14 +826,14 @@ def delete_project_attributes_with_http_info(self, project, body, **kwargs): # auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/project_attributes/{project}', 'DELETE', + '/api/v1/tasks/org/{id.org}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectAttributesDeleteResponse', # noqa: E501 + response_type='FlyteidladminTaskCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -817,47 +841,43 @@ def delete_project_attributes_with_http_info(self, project, body, **kwargs): # _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def delete_project_domain_attributes(self, project, domain, body, **kwargs): # noqa: E501 - """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def create_task_event(self, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_project_domain_attributes(project, domain, body, async_req=True) + >>> thread = api.create_task_event(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param AdminProjectDomainAttributesDeleteRequest body: (required) - :return: AdminProjectDomainAttributesDeleteResponse + :param AdminTaskExecutionEventRequest body: (required) + :return: AdminTaskExecutionEventResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.delete_project_domain_attributes_with_http_info(project, domain, body, **kwargs) # noqa: E501 + return self.create_task_event_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.delete_project_domain_attributes_with_http_info(project, domain, body, **kwargs) # noqa: E501 + (data) = self.create_task_event_with_http_info(body, **kwargs) # noqa: E501 return data - def delete_project_domain_attributes_with_http_info(self, project, domain, body, **kwargs): # noqa: E501 - """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def create_task_event_with_http_info(self, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_project_domain_attributes_with_http_info(project, domain, body, async_req=True) + >>> thread = api.create_task_event_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param AdminProjectDomainAttributesDeleteRequest body: (required) - :return: AdminProjectDomainAttributesDeleteResponse + :param AdminTaskExecutionEventRequest body: (required) + :return: AdminTaskExecutionEventResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'body'] # noqa: E501 + all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -868,30 +888,18 @@ def delete_project_domain_attributes_with_http_info(self, project, domain, body, if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method delete_project_domain_attributes" % key + " to method create_task_event" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `delete_project_domain_attributes`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `delete_project_domain_attributes`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `delete_project_domain_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_task_event`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 query_params = [] @@ -915,14 +923,14 @@ def delete_project_domain_attributes_with_http_info(self, project, domain, body, auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/project_domain_attributes/{project}/{domain}', 'DELETE', + '/api/v1/events/tasks', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectDomainAttributesDeleteResponse', # noqa: E501 + response_type='AdminTaskExecutionEventResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -930,49 +938,45 @@ def delete_project_domain_attributes_with_http_info(self, project, domain, body, _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def delete_workflow_attributes(self, project, domain, workflow, body, **kwargs): # noqa: E501 - """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + def create_task_event2(self, event_parent_node_execution_id_execution_id_org, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_workflow_attributes(project, domain, workflow, body, async_req=True) + >>> thread = api.create_task_event2(event_parent_node_execution_id_execution_id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param str workflow: Workflow name which this set of attributes references. +required (required) - :param AdminWorkflowAttributesDeleteRequest body: (required) - :return: AdminWorkflowAttributesDeleteResponse + :param str event_parent_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param AdminTaskExecutionEventRequest body: (required) + :return: AdminTaskExecutionEventResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.delete_workflow_attributes_with_http_info(project, domain, workflow, body, **kwargs) # noqa: E501 + return self.create_task_event2_with_http_info(event_parent_node_execution_id_execution_id_org, body, **kwargs) # noqa: E501 else: - (data) = self.delete_workflow_attributes_with_http_info(project, domain, workflow, body, **kwargs) # noqa: E501 + (data) = self.create_task_event2_with_http_info(event_parent_node_execution_id_execution_id_org, body, **kwargs) # noqa: E501 return data - def delete_workflow_attributes_with_http_info(self, project, domain, workflow, body, **kwargs): # noqa: E501 - """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + def create_task_event2_with_http_info(self, event_parent_node_execution_id_execution_id_org, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_workflow_attributes_with_http_info(project, domain, workflow, body, async_req=True) + >>> thread = api.create_task_event2_with_http_info(event_parent_node_execution_id_execution_id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param str workflow: Workflow name which this set of attributes references. +required (required) - :param AdminWorkflowAttributesDeleteRequest body: (required) - :return: AdminWorkflowAttributesDeleteResponse + :param str event_parent_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param AdminTaskExecutionEventRequest body: (required) + :return: AdminTaskExecutionEventResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'workflow', 'body'] # noqa: E501 + all_params = ['event_parent_node_execution_id_execution_id_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -983,36 +987,24 @@ def delete_workflow_attributes_with_http_info(self, project, domain, workflow, b if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method delete_workflow_attributes" % key + " to method create_task_event2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `delete_workflow_attributes`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `delete_workflow_attributes`") # noqa: E501 - # verify the required parameter 'workflow' is set - if ('workflow' not in params or - params['workflow'] is None): - raise ValueError("Missing the required parameter `workflow` when calling `delete_workflow_attributes`") # noqa: E501 + # verify the required parameter 'event_parent_node_execution_id_execution_id_org' is set + if ('event_parent_node_execution_id_execution_id_org' not in params or + params['event_parent_node_execution_id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `event_parent_node_execution_id_execution_id_org` when calling `create_task_event2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `delete_workflow_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `create_task_event2`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 - if 'workflow' in params: - path_params['workflow'] = params['workflow'] # noqa: E501 + if 'event_parent_node_execution_id_execution_id_org' in params: + path_params['event.parent_node_execution_id.execution_id.org'] = params['event_parent_node_execution_id_execution_id_org'] # noqa: E501 query_params = [] @@ -1036,14 +1028,14 @@ def delete_workflow_attributes_with_http_info(self, project, domain, workflow, b auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflow_attributes/{project}/{domain}/{workflow}', 'DELETE', + '/api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowAttributesDeleteResponse', # noqa: E501 + response_type='AdminTaskExecutionEventResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1051,47 +1043,43 @@ def delete_workflow_attributes_with_http_info(self, project, domain, workflow, b _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_active_launch_plan(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + def create_workflow(self, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_active_launch_plan(id_project, id_domain, id_name, async_req=True) + >>> thread = api.create_workflow(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :return: AdminLaunchPlan + :param AdminWorkflowCreateRequest body: (required) + :return: AdminWorkflowCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_active_launch_plan_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.create_workflow_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.get_active_launch_plan_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.create_workflow_with_http_info(body, **kwargs) # noqa: E501 return data - def get_active_launch_plan_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + def create_workflow_with_http_info(self, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_active_launch_plan_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> thread = api.create_workflow_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :return: AdminLaunchPlan + :param AdminWorkflowCreateRequest body: (required) + :return: AdminWorkflowCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name'] # noqa: E501 + all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1102,32 +1090,18 @@ def get_active_launch_plan_with_http_info(self, id_project, id_domain, id_name, if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_active_launch_plan" % key + " to method create_workflow" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_active_launch_plan`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_active_launch_plan`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_active_launch_plan`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `create_workflow`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] @@ -1137,6 +1111,8 @@ def get_active_launch_plan_with_http_info(self, id_project, id_domain, id_name, local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1149,14 +1125,14 @@ def get_active_launch_plan_with_http_info(self, id_project, id_domain, id_name, auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/workflows', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminLaunchPlan', # noqa: E501 + response_type='AdminWorkflowCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1164,51 +1140,45 @@ def get_active_launch_plan_with_http_info(self, id_project, id_domain, id_name, _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_description_entity(self, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. # noqa: E501 + def create_workflow2(self, id_org, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_description_entity(id_resource_type, id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.create_workflow2(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :return: AdminDescriptionEntity + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminWorkflowCreateRequest body: (required) + :return: AdminWorkflowCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_description_entity_with_http_info(id_resource_type, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return self.create_workflow2_with_http_info(id_org, body, **kwargs) # noqa: E501 else: - (data) = self.get_description_entity_with_http_info(id_resource_type, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + (data) = self.create_workflow2_with_http_info(id_org, body, **kwargs) # noqa: E501 return data - def get_description_entity_with_http_info(self, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. # noqa: E501 + def create_workflow2_with_http_info(self, id_org, body, **kwargs): # noqa: E501 + """Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_description_entity_with_http_info(id_resource_type, id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.create_workflow2_with_http_info(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :return: AdminDescriptionEntity + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminWorkflowCreateRequest body: (required) + :return: AdminWorkflowCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_resource_type', 'id_project', 'id_domain', 'id_name', 'id_version'] # noqa: E501 + all_params = ['id_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1219,44 +1189,24 @@ def get_description_entity_with_http_info(self, id_resource_type, id_project, id if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_description_entity" % key + " to method create_workflow2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_resource_type' is set - if ('id_resource_type' not in params or - params['id_resource_type'] is None): - raise ValueError("Missing the required parameter `id_resource_type` when calling `get_description_entity`") # noqa: E501 - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_description_entity`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_description_entity`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_description_entity`") # noqa: E501 - # verify the required parameter 'id_version' is set - if ('id_version' not in params or - params['id_version'] is None): - raise ValueError("Missing the required parameter `id_version` when calling `get_description_entity`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `create_workflow2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `create_workflow2`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_resource_type' in params: - path_params['id.resource_type'] = params['id_resource_type'] # noqa: E501 - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 - if 'id_version' in params: - path_params['id.version'] = params['id_version'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 query_params = [] @@ -1266,6 +1216,8 @@ def get_description_entity_with_http_info(self, id_resource_type, id_project, id local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1278,14 +1230,14 @@ def get_description_entity_with_http_info(self, id_resource_type, id_project, id auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + '/api/v1/workflows/org/{id.org}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminDescriptionEntity', # noqa: E501 + response_type='AdminWorkflowCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1293,47 +1245,43 @@ def get_description_entity_with_http_info(self, id_resource_type, id_project, id _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_execution(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetches a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def create_workflow_event(self, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_execution(id_project, id_domain, id_name, async_req=True) + >>> thread = api.create_workflow_event(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :return: AdminExecution + :param AdminWorkflowExecutionEventRequest body: (required) + :return: AdminWorkflowExecutionEventResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_execution_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.create_workflow_event_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.get_execution_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.create_workflow_event_with_http_info(body, **kwargs) # noqa: E501 return data - def get_execution_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetches a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def create_workflow_event_with_http_info(self, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_execution_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> thread = api.create_workflow_event_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :return: AdminExecution + :param AdminWorkflowExecutionEventRequest body: (required) + :return: AdminWorkflowExecutionEventResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name'] # noqa: E501 + all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1344,32 +1292,18 @@ def get_execution_with_http_info(self, id_project, id_domain, id_name, **kwargs) if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_execution" % key + " to method create_workflow_event" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_execution`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_execution`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_execution`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `create_workflow_event`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] @@ -1379,6 +1313,8 @@ def get_execution_with_http_info(self, id_project, id_domain, id_name, **kwargs) local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1391,14 +1327,14 @@ def get_execution_with_http_info(self, id_project, id_domain, id_name, **kwargs) auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/executions/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/events/workflows', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminExecution', # noqa: E501 + response_type='AdminWorkflowExecutionEventResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1406,47 +1342,45 @@ def get_execution_with_http_info(self, id_project, id_domain, id_name, **kwargs) _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_execution_data(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def create_workflow_event2(self, event_execution_id_org, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_execution_data(id_project, id_domain, id_name, async_req=True) + >>> thread = api.create_workflow_event2(event_execution_id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :return: AdminWorkflowExecutionGetDataResponse + :param str event_execution_id_org: Optional, org key applied to the resource. (required) + :param AdminWorkflowExecutionEventRequest body: (required) + :return: AdminWorkflowExecutionEventResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_execution_data_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.create_workflow_event2_with_http_info(event_execution_id_org, body, **kwargs) # noqa: E501 else: - (data) = self.get_execution_data_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.create_workflow_event2_with_http_info(event_execution_id_org, body, **kwargs) # noqa: E501 return data - def get_execution_data_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def create_workflow_event2_with_http_info(self, event_execution_id_org, body, **kwargs): # noqa: E501 + """Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_execution_data_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> thread = api.create_workflow_event2_with_http_info(event_execution_id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :return: AdminWorkflowExecutionGetDataResponse + :param str event_execution_id_org: Optional, org key applied to the resource. (required) + :param AdminWorkflowExecutionEventRequest body: (required) + :return: AdminWorkflowExecutionEventResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name'] # noqa: E501 + all_params = ['event_execution_id_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1457,32 +1391,24 @@ def get_execution_data_with_http_info(self, id_project, id_domain, id_name, **kw if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_execution_data" % key + " to method create_workflow_event2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_execution_data`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_execution_data`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_execution_data`") # noqa: E501 + # verify the required parameter 'event_execution_id_org' is set + if ('event_execution_id_org' not in params or + params['event_execution_id_org'] is None): + raise ValueError("Missing the required parameter `event_execution_id_org` when calling `create_workflow_event2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `create_workflow_event2`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 + if 'event_execution_id_org' in params: + path_params['event.execution_id.org'] = params['event_execution_id_org'] # noqa: E501 query_params = [] @@ -1492,6 +1418,8 @@ def get_execution_data_with_http_info(self, id_project, id_domain, id_name, **kw local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1504,14 +1432,14 @@ def get_execution_data_with_http_info(self, id_project, id_domain, id_name, **kw auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/data/executions/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/events/org/{event.execution_id.org}/workflows', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowExecutionGetDataResponse', # noqa: E501 + response_type='AdminWorkflowExecutionEventResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1519,49 +1447,45 @@ def get_execution_data_with_http_info(self, id_project, id_domain, id_name, **kw _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_execution_metrics(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def delete_project_attributes(self, project, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_execution_metrics(id_project, id_domain, id_name, async_req=True) + >>> thread = api.delete_project_attributes(project, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :param int depth: depth defines the number of Flyte entity levels to traverse when breaking down execution details. - :return: AdminWorkflowExecutionGetMetricsResponse + :param str project: Unique project id which this set of attributes references. +required (required) + :param AdminProjectAttributesDeleteRequest body: (required) + :return: AdminProjectAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_execution_metrics_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.delete_project_attributes_with_http_info(project, body, **kwargs) # noqa: E501 else: - (data) = self.get_execution_metrics_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.delete_project_attributes_with_http_info(project, body, **kwargs) # noqa: E501 return data - def get_execution_metrics_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def delete_project_attributes_with_http_info(self, project, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_execution_metrics_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> thread = api.delete_project_attributes_with_http_info(project, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :param int depth: depth defines the number of Flyte entity levels to traverse when breaking down execution details. - :return: AdminWorkflowExecutionGetMetricsResponse + :param str project: Unique project id which this set of attributes references. +required (required) + :param AdminProjectAttributesDeleteRequest body: (required) + :return: AdminProjectAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'depth'] # noqa: E501 + all_params = ['project', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1572,36 +1496,26 @@ def get_execution_metrics_with_http_info(self, id_project, id_domain, id_name, * if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_execution_metrics" % key + " to method delete_project_attributes" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_execution_metrics`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_execution_metrics`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_execution_metrics`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `delete_project_attributes`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `delete_project_attributes`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 query_params = [] - if 'depth' in params: - query_params.append(('depth', params['depth'])) # noqa: E501 header_params = {} @@ -1609,6 +1523,8 @@ def get_execution_metrics_with_http_info(self, id_project, id_domain, id_name, * local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1621,14 +1537,14 @@ def get_execution_metrics_with_http_info(self, id_project, id_domain, id_name, * auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/project_attributes/{project}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowExecutionGetMetricsResponse', # noqa: E501 + response_type='AdminProjectAttributesDeleteResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1636,51 +1552,47 @@ def get_execution_metrics_with_http_info(self, id_project, id_domain, id_name, * _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_launch_plan(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. # noqa: E501 + def delete_project_attributes2(self, org, project, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_launch_plan(id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.delete_project_attributes2(org, project, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminLaunchPlan + :param str org: Optional, org key applied to the project. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param AdminProjectAttributesDeleteRequest body: (required) + :return: AdminProjectAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return self.delete_project_attributes2_with_http_info(org, project, body, **kwargs) # noqa: E501 else: - (data) = self.get_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + (data) = self.delete_project_attributes2_with_http_info(org, project, body, **kwargs) # noqa: E501 return data - def get_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. # noqa: E501 + def delete_project_attributes2_with_http_info(self, org, project, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.delete_project_attributes2_with_http_info(org, project, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminLaunchPlan + :param str org: Optional, org key applied to the project. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param AdminProjectAttributesDeleteRequest body: (required) + :return: AdminProjectAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type'] # noqa: E501 + all_params = ['org', 'project', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1691,42 +1603,32 @@ def get_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_vers if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_launch_plan" % key + " to method delete_project_attributes2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_launch_plan`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_launch_plan`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_launch_plan`") # noqa: E501 - # verify the required parameter 'id_version' is set - if ('id_version' not in params or - params['id_version'] is None): - raise ValueError("Missing the required parameter `id_version` when calling `get_launch_plan`") # noqa: E501 + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `delete_project_attributes2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `delete_project_attributes2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `delete_project_attributes2`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 - if 'id_version' in params: - path_params['id.version'] = params['id_version'] # noqa: E501 + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 query_params = [] - if 'id_resource_type' in params: - query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 header_params = {} @@ -1734,6 +1636,8 @@ def get_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_vers local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1746,14 +1650,14 @@ def get_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_vers auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + '/api/v1/project_domain_attributes/org/{org}/{project}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminLaunchPlan', # noqa: E501 + response_type='AdminProjectAttributesDeleteResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1761,49 +1665,47 @@ def get_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_vers _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_named_entity(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + def delete_project_domain_attributes(self, project, domain, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_named_entity(resource_type, id_project, id_domain, id_name, async_req=True) + >>> thread = api.delete_project_domain_attributes(project, domain, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required (required) - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :return: AdminNamedEntity + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param AdminProjectDomainAttributesDeleteRequest body: (required) + :return: AdminProjectDomainAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.delete_project_domain_attributes_with_http_info(project, domain, body, **kwargs) # noqa: E501 else: - (data) = self.get_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.delete_project_domain_attributes_with_http_info(project, domain, body, **kwargs) # noqa: E501 return data - def get_named_entity_with_http_info(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + def delete_project_domain_attributes_with_http_info(self, project, domain, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, async_req=True) + >>> thread = api.delete_project_domain_attributes_with_http_info(project, domain, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required (required) - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :return: AdminNamedEntity + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param AdminProjectDomainAttributesDeleteRequest body: (required) + :return: AdminProjectDomainAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['resource_type', 'id_project', 'id_domain', 'id_name'] # noqa: E501 + all_params = ['project', 'domain', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1814,38 +1716,30 @@ def get_named_entity_with_http_info(self, resource_type, id_project, id_domain, if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_named_entity" % key + " to method delete_project_domain_attributes" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'resource_type' is set - if ('resource_type' not in params or - params['resource_type'] is None): - raise ValueError("Missing the required parameter `resource_type` when calling `get_named_entity`") # noqa: E501 - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_named_entity`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_named_entity`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_named_entity`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `delete_project_domain_attributes`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `delete_project_domain_attributes`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `delete_project_domain_attributes`") # noqa: E501 collection_formats = {} path_params = {} - if 'resource_type' in params: - path_params['resource_type'] = params['resource_type'] # noqa: E501 - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 query_params = [] @@ -1855,6 +1749,8 @@ def get_named_entity_with_http_info(self, resource_type, id_project, id_domain, local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1867,14 +1763,14 @@ def get_named_entity_with_http_info(self, resource_type, id_project, id_domain, auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/project_domain_attributes/{project}/{domain}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNamedEntity', # noqa: E501 + response_type='AdminProjectDomainAttributesDeleteResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -1882,49 +1778,49 @@ def get_named_entity_with_http_info(self, resource_type, id_project, id_domain, _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_node_execution(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 - """Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + def delete_project_domain_attributes2(self, org, project, domain, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_node_execution(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> thread = api.delete_project_domain_attributes2(org, project, domain, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_id: (required) - :return: FlyteidladminNodeExecution + :param str org: Optional, org key applied to the attributes. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param AdminProjectDomainAttributesDeleteRequest body: (required) + :return: AdminProjectDomainAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_node_execution_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + return self.delete_project_domain_attributes2_with_http_info(org, project, domain, body, **kwargs) # noqa: E501 else: - (data) = self.get_node_execution_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + (data) = self.delete_project_domain_attributes2_with_http_info(org, project, domain, body, **kwargs) # noqa: E501 return data - def get_node_execution_with_http_info(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 - """Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + def delete_project_domain_attributes2_with_http_info(self, org, project, domain, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_node_execution_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> thread = api.delete_project_domain_attributes2_with_http_info(org, project, domain, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_id: (required) - :return: FlyteidladminNodeExecution + :param str org: Optional, org key applied to the attributes. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param AdminProjectDomainAttributesDeleteRequest body: (required) + :return: AdminProjectDomainAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id'] # noqa: E501 + all_params = ['org', 'project', 'domain', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1935,38 +1831,36 @@ def get_node_execution_with_http_info(self, id_execution_id_project, id_executio if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_node_execution" % key + " to method delete_project_domain_attributes2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_execution_id_project' is set - if ('id_execution_id_project' not in params or - params['id_execution_id_project'] is None): - raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_node_execution`") # noqa: E501 - # verify the required parameter 'id_execution_id_domain' is set - if ('id_execution_id_domain' not in params or - params['id_execution_id_domain'] is None): - raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_node_execution`") # noqa: E501 - # verify the required parameter 'id_execution_id_name' is set - if ('id_execution_id_name' not in params or - params['id_execution_id_name'] is None): - raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_node_execution`") # noqa: E501 - # verify the required parameter 'id_node_id' is set - if ('id_node_id' not in params or - params['id_node_id'] is None): - raise ValueError("Missing the required parameter `id_node_id` when calling `get_node_execution`") # noqa: E501 + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `delete_project_domain_attributes2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `delete_project_domain_attributes2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `delete_project_domain_attributes2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `delete_project_domain_attributes2`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_execution_id_project' in params: - path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 - if 'id_execution_id_domain' in params: - path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 - if 'id_execution_id_name' in params: - path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 - if 'id_node_id' in params: - path_params['id.node_id'] = params['id_node_id'] # noqa: E501 + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 query_params = [] @@ -1976,6 +1870,8 @@ def get_node_execution_with_http_info(self, id_execution_id_project, id_executio local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -1988,14 +1884,14 @@ def get_node_execution_with_http_info(self, id_execution_id_project, id_executio auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}', 'GET', + '/api/v1/project_domain_attributes/org/{org}/{project}/{domain}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='FlyteidladminNodeExecution', # noqa: E501 + response_type='AdminProjectDomainAttributesDeleteResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2003,49 +1899,49 @@ def get_node_execution_with_http_info(self, id_execution_id_project, id_executio _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_node_execution_data(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 - """Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + def delete_workflow_attributes(self, project, domain, workflow, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_node_execution_data(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> thread = api.delete_workflow_attributes(project, domain, workflow, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_id: (required) - :return: AdminNodeExecutionGetDataResponse + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param AdminWorkflowAttributesDeleteRequest body: (required) + :return: AdminWorkflowAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_node_execution_data_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + return self.delete_workflow_attributes_with_http_info(project, domain, workflow, body, **kwargs) # noqa: E501 else: - (data) = self.get_node_execution_data_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + (data) = self.delete_workflow_attributes_with_http_info(project, domain, workflow, body, **kwargs) # noqa: E501 return data - def get_node_execution_data_with_http_info(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 - """Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + def delete_workflow_attributes_with_http_info(self, project, domain, workflow, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_node_execution_data_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> thread = api.delete_workflow_attributes_with_http_info(project, domain, workflow, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_id: (required) - :return: AdminNodeExecutionGetDataResponse + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param AdminWorkflowAttributesDeleteRequest body: (required) + :return: AdminWorkflowAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id'] # noqa: E501 + all_params = ['project', 'domain', 'workflow', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2056,38 +1952,36 @@ def get_node_execution_data_with_http_info(self, id_execution_id_project, id_exe if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_node_execution_data" % key + " to method delete_workflow_attributes" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_execution_id_project' is set - if ('id_execution_id_project' not in params or - params['id_execution_id_project'] is None): - raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_node_execution_data`") # noqa: E501 - # verify the required parameter 'id_execution_id_domain' is set - if ('id_execution_id_domain' not in params or - params['id_execution_id_domain'] is None): - raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_node_execution_data`") # noqa: E501 - # verify the required parameter 'id_execution_id_name' is set - if ('id_execution_id_name' not in params or - params['id_execution_id_name'] is None): - raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_node_execution_data`") # noqa: E501 - # verify the required parameter 'id_node_id' is set - if ('id_node_id' not in params or - params['id_node_id'] is None): - raise ValueError("Missing the required parameter `id_node_id` when calling `get_node_execution_data`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `delete_workflow_attributes`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `delete_workflow_attributes`") # noqa: E501 + # verify the required parameter 'workflow' is set + if ('workflow' not in params or + params['workflow'] is None): + raise ValueError("Missing the required parameter `workflow` when calling `delete_workflow_attributes`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `delete_workflow_attributes`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_execution_id_project' in params: - path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 - if 'id_execution_id_domain' in params: - path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 - if 'id_execution_id_name' in params: - path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 - if 'id_node_id' in params: - path_params['id.node_id'] = params['id_node_id'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + if 'workflow' in params: + path_params['workflow'] = params['workflow'] # noqa: E501 query_params = [] @@ -2097,6 +1991,8 @@ def get_node_execution_data_with_http_info(self, id_execution_id_project, id_exe local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -2109,14 +2005,14 @@ def get_node_execution_data_with_http_info(self, id_execution_id_project, id_exe auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}', 'GET', + '/api/v1/workflow_attributes/{project}/{domain}/{workflow}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNodeExecutionGetDataResponse', # noqa: E501 + response_type='AdminWorkflowAttributesDeleteResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2124,45 +2020,51 @@ def get_node_execution_data_with_http_info(self, id_execution_id_project, id_exe _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_project_attributes(self, project, **kwargs): # noqa: E501 - """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def delete_workflow_attributes2(self, org, project, domain, workflow, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_project_attributes(project, async_req=True) + >>> thread = api.delete_workflow_attributes2(org, project, domain, workflow, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str org: Optional, org key applied to the attributes. (required) :param str project: Unique project id which this set of attributes references. +required (required) - :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminProjectAttributesGetResponse + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param AdminWorkflowAttributesDeleteRequest body: (required) + :return: AdminWorkflowAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_project_attributes_with_http_info(project, **kwargs) # noqa: E501 + return self.delete_workflow_attributes2_with_http_info(org, project, domain, workflow, body, **kwargs) # noqa: E501 else: - (data) = self.get_project_attributes_with_http_info(project, **kwargs) # noqa: E501 + (data) = self.delete_workflow_attributes2_with_http_info(org, project, domain, workflow, body, **kwargs) # noqa: E501 return data - def get_project_attributes_with_http_info(self, project, **kwargs): # noqa: E501 - """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def delete_workflow_attributes2_with_http_info(self, org, project, domain, workflow, body, **kwargs): # noqa: E501 + """Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_project_attributes_with_http_info(project, async_req=True) + >>> thread = api.delete_workflow_attributes2_with_http_info(org, project, domain, workflow, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str org: Optional, org key applied to the attributes. (required) :param str project: Unique project id which this set of attributes references. +required (required) - :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminProjectAttributesGetResponse + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param AdminWorkflowAttributesDeleteRequest body: (required) + :return: AdminWorkflowAttributesDeleteResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'resource_type'] # noqa: E501 + all_params = ['org', 'project', 'domain', 'workflow', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2173,24 +2075,44 @@ def get_project_attributes_with_http_info(self, project, **kwargs): # noqa: E50 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_project_attributes" % key + " to method delete_workflow_attributes2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `delete_workflow_attributes2`") # noqa: E501 # verify the required parameter 'project' is set if ('project' not in params or params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `get_project_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `project` when calling `delete_workflow_attributes2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `delete_workflow_attributes2`") # noqa: E501 + # verify the required parameter 'workflow' is set + if ('workflow' not in params or + params['workflow'] is None): + raise ValueError("Missing the required parameter `workflow` when calling `delete_workflow_attributes2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `delete_workflow_attributes2`") # noqa: E501 collection_formats = {} path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 if 'project' in params: path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + if 'workflow' in params: + path_params['workflow'] = params['workflow'] # noqa: E501 query_params = [] - if 'resource_type' in params: - query_params.append(('resource_type', params['resource_type'])) # noqa: E501 header_params = {} @@ -2198,6 +2120,8 @@ def get_project_attributes_with_http_info(self, project, **kwargs): # noqa: E50 local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -2210,14 +2134,14 @@ def get_project_attributes_with_http_info(self, project, **kwargs): # noqa: E50 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/project_attributes/{project}', 'GET', + '/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectAttributesGetResponse', # noqa: E501 + response_type='AdminWorkflowAttributesDeleteResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2225,47 +2149,49 @@ def get_project_attributes_with_http_info(self, project, **kwargs): # noqa: E50 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_project_domain_attributes(self, project, domain, **kwargs): # noqa: E501 - """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def get_active_launch_plan(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_project_domain_attributes(project, domain, async_req=True) + >>> thread = api.get_active_launch_plan(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminProjectDomainAttributesGetResponse + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_project_domain_attributes_with_http_info(project, domain, **kwargs) # noqa: E501 + return self.get_active_launch_plan_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.get_project_domain_attributes_with_http_info(project, domain, **kwargs) # noqa: E501 + (data) = self.get_active_launch_plan_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def get_project_domain_attributes_with_http_info(self, project, domain, **kwargs): # noqa: E501 - """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def get_active_launch_plan_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_project_domain_attributes_with_http_info(project, domain, async_req=True) + >>> thread = api.get_active_launch_plan_with_http_info(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminProjectDomainAttributesGetResponse + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'resource_type'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'id_org'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2276,30 +2202,36 @@ def get_project_domain_attributes_with_http_info(self, project, domain, **kwargs if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_project_domain_attributes" % key + " to method get_active_launch_plan" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `get_project_domain_attributes`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `get_project_domain_attributes`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_active_launch_plan`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_active_launch_plan`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_active_launch_plan`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'resource_type' in params: - query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 header_params = {} @@ -2319,14 +2251,14 @@ def get_project_domain_attributes_with_http_info(self, project, domain, **kwargs auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/project_domain_attributes/{project}/{domain}', 'GET', + '/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectDomainAttributesGetResponse', # noqa: E501 + response_type='AdminLaunchPlan', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2334,51 +2266,49 @@ def get_project_domain_attributes_with_http_info(self, project, domain, **kwargs _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_task(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.Task` definition. # noqa: E501 + def get_active_launch_plan2(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_task(id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.get_active_launch_plan2(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminTask + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_task_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return self.get_active_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.get_task_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + (data) = self.get_active_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def get_task_with_http_info(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.Task` definition. # noqa: E501 + def get_active_launch_plan2_with_http_info(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_task_with_http_info(id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.get_active_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminTask + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2389,42 +2319,40 @@ def get_task_with_http_info(self, id_project, id_domain, id_name, id_version, ** if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_task" % key + " to method get_active_launch_plan2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_active_launch_plan2`") # noqa: E501 # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_task`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `get_active_launch_plan2`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_task`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `get_active_launch_plan2`") # noqa: E501 # verify the required parameter 'id_name' is set if ('id_name' not in params or params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_task`") # noqa: E501 - # verify the required parameter 'id_version' is set - if ('id_version' not in params or - params['id_version'] is None): - raise ValueError("Missing the required parameter `id_version` when calling `get_task`") # noqa: E501 + raise ValueError("Missing the required parameter `id_name` when calling `get_active_launch_plan2`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 if 'id_project' in params: path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: path_params['id.domain'] = params['id_domain'] # noqa: E501 if 'id_name' in params: path_params['id.name'] = params['id_name'] # noqa: E501 - if 'id_version' in params: - path_params['id.version'] = params['id_version'] # noqa: E501 query_params = [] - if 'id_resource_type' in params: - query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 header_params = {} @@ -2444,14 +2372,14 @@ def get_task_with_http_info(self, id_project, id_domain, id_name, id_version, ** auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + '/api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminTask', # noqa: E501 + response_type='AdminLaunchPlan', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2459,61 +2387,53 @@ def get_task_with_http_info(self, id_project, id_domain, id_name, id_version, ** _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_task_execution(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 - """Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def get_description_entity(self, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_task_execution(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> thread = api.get_description_entity(id_resource_type, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_execution_id_node_id: (required) - :param str id_task_id_project: Name of the project the resource belongs to. (required) - :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_task_id_name: User provided value for the resource. (required) - :param str id_task_id_version: Specific version of the resource. (required) - :param int id_retry_attempt: (required) - :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: FlyteidladminTaskExecution + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminDescriptionEntity If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_task_execution_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + return self.get_description_entity_with_http_info(id_resource_type, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 else: - (data) = self.get_task_execution_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + (data) = self.get_description_entity_with_http_info(id_resource_type, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 return data - def get_task_execution_with_http_info(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 - """Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def get_description_entity_with_http_info(self, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_task_execution_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> thread = api.get_description_entity_with_http_info(id_resource_type, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_execution_id_node_id: (required) - :param str id_task_id_project: Name of the project the resource belongs to. (required) - :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_task_id_name: User provided value for the resource. (required) - :param str id_task_id_version: Specific version of the resource. (required) - :param int id_retry_attempt: (required) - :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: FlyteidladminTaskExecution + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminDescriptionEntity If the method is called asynchronously, returns the request thread. """ - all_params = ['id_node_execution_id_execution_id_project', 'id_node_execution_id_execution_id_domain', 'id_node_execution_id_execution_id_name', 'id_node_execution_id_node_id', 'id_task_id_project', 'id_task_id_domain', 'id_task_id_name', 'id_task_id_version', 'id_retry_attempt', 'id_task_id_resource_type'] # noqa: E501 + all_params = ['id_resource_type', 'id_project', 'id_domain', 'id_name', 'id_version', 'id_org'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2524,72 +2444,48 @@ def get_task_execution_with_http_info(self, id_node_execution_id_execution_id_pr if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_task_execution" % key + " to method get_description_entity" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_node_execution_id_execution_id_project' is set - if ('id_node_execution_id_execution_id_project' not in params or - params['id_node_execution_id_execution_id_project'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_project` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_node_execution_id_execution_id_domain' is set - if ('id_node_execution_id_execution_id_domain' not in params or - params['id_node_execution_id_execution_id_domain'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_domain` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_node_execution_id_execution_id_name' is set - if ('id_node_execution_id_execution_id_name' not in params or - params['id_node_execution_id_execution_id_name'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_name` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_node_execution_id_node_id' is set - if ('id_node_execution_id_node_id' not in params or - params['id_node_execution_id_node_id'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_node_id` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_task_id_project' is set - if ('id_task_id_project' not in params or - params['id_task_id_project'] is None): - raise ValueError("Missing the required parameter `id_task_id_project` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_task_id_domain' is set - if ('id_task_id_domain' not in params or - params['id_task_id_domain'] is None): - raise ValueError("Missing the required parameter `id_task_id_domain` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_task_id_name' is set - if ('id_task_id_name' not in params or - params['id_task_id_name'] is None): - raise ValueError("Missing the required parameter `id_task_id_name` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_task_id_version' is set - if ('id_task_id_version' not in params or - params['id_task_id_version'] is None): - raise ValueError("Missing the required parameter `id_task_id_version` when calling `get_task_execution`") # noqa: E501 - # verify the required parameter 'id_retry_attempt' is set - if ('id_retry_attempt' not in params or - params['id_retry_attempt'] is None): - raise ValueError("Missing the required parameter `id_retry_attempt` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_resource_type' is set + if ('id_resource_type' not in params or + params['id_resource_type'] is None): + raise ValueError("Missing the required parameter `id_resource_type` when calling `get_description_entity`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_description_entity`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_description_entity`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_description_entity`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_description_entity`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_node_execution_id_execution_id_project' in params: - path_params['id.node_execution_id.execution_id.project'] = params['id_node_execution_id_execution_id_project'] # noqa: E501 - if 'id_node_execution_id_execution_id_domain' in params: - path_params['id.node_execution_id.execution_id.domain'] = params['id_node_execution_id_execution_id_domain'] # noqa: E501 - if 'id_node_execution_id_execution_id_name' in params: - path_params['id.node_execution_id.execution_id.name'] = params['id_node_execution_id_execution_id_name'] # noqa: E501 - if 'id_node_execution_id_node_id' in params: - path_params['id.node_execution_id.node_id'] = params['id_node_execution_id_node_id'] # noqa: E501 - if 'id_task_id_project' in params: - path_params['id.task_id.project'] = params['id_task_id_project'] # noqa: E501 - if 'id_task_id_domain' in params: - path_params['id.task_id.domain'] = params['id_task_id_domain'] # noqa: E501 - if 'id_task_id_name' in params: - path_params['id.task_id.name'] = params['id_task_id_name'] # noqa: E501 - if 'id_task_id_version' in params: - path_params['id.task_id.version'] = params['id_task_id_version'] # noqa: E501 - if 'id_retry_attempt' in params: - path_params['id.retry_attempt'] = params['id_retry_attempt'] # noqa: E501 + if 'id_resource_type' in params: + path_params['id.resource_type'] = params['id_resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 query_params = [] - if 'id_task_id_resource_type' in params: - query_params.append(('id.task_id.resource_type', params['id_task_id_resource_type'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 header_params = {} @@ -2609,14 +2505,14 @@ def get_task_execution_with_http_info(self, id_node_execution_id_execution_id_pr auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}', 'GET', + '/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='FlyteidladminTaskExecution', # noqa: E501 + response_type='AdminDescriptionEntity', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2624,61 +2520,53 @@ def get_task_execution_with_http_info(self, id_node_execution_id_execution_id_pr _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_task_execution_data(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 - """Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def get_description_entity2(self, id_org, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_task_execution_data(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> thread = api.get_description_entity2(id_org, id_resource_type, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_execution_id_node_id: (required) - :param str id_task_id_project: Name of the project the resource belongs to. (required) - :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_task_id_name: User provided value for the resource. (required) - :param str id_task_id_version: Specific version of the resource. (required) - :param int id_retry_attempt: (required) - :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminTaskExecutionGetDataResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :return: AdminDescriptionEntity If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_task_execution_data_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + return self.get_description_entity2_with_http_info(id_org, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 else: - (data) = self.get_task_execution_data_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + (data) = self.get_description_entity2_with_http_info(id_org, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 return data - def get_task_execution_data_with_http_info(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 - """Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def get_description_entity2_with_http_info(self, id_org, id_resource_type, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_task_execution_data_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> thread = api.get_description_entity2_with_http_info(id_org, id_resource_type, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str id_node_execution_id_node_id: (required) - :param str id_task_id_project: Name of the project the resource belongs to. (required) - :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_task_id_name: User provided value for the resource. (required) - :param str id_task_id_version: Specific version of the resource. (required) - :param int id_retry_attempt: (required) - :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminTaskExecutionGetDataResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :return: AdminDescriptionEntity If the method is called asynchronously, returns the request thread. """ - all_params = ['id_node_execution_id_execution_id_project', 'id_node_execution_id_execution_id_domain', 'id_node_execution_id_execution_id_name', 'id_node_execution_id_node_id', 'id_task_id_project', 'id_task_id_domain', 'id_task_id_name', 'id_task_id_version', 'id_retry_attempt', 'id_task_id_resource_type'] # noqa: E501 + all_params = ['id_org', 'id_resource_type', 'id_project', 'id_domain', 'id_name', 'id_version'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2689,72 +2577,52 @@ def get_task_execution_data_with_http_info(self, id_node_execution_id_execution_ if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_task_execution_data" % key + " to method get_description_entity2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_node_execution_id_execution_id_project' is set - if ('id_node_execution_id_execution_id_project' not in params or - params['id_node_execution_id_execution_id_project'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_project` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_node_execution_id_execution_id_domain' is set - if ('id_node_execution_id_execution_id_domain' not in params or - params['id_node_execution_id_execution_id_domain'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_domain` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_node_execution_id_execution_id_name' is set - if ('id_node_execution_id_execution_id_name' not in params or - params['id_node_execution_id_execution_id_name'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_name` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_node_execution_id_node_id' is set - if ('id_node_execution_id_node_id' not in params or - params['id_node_execution_id_node_id'] is None): - raise ValueError("Missing the required parameter `id_node_execution_id_node_id` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_task_id_project' is set - if ('id_task_id_project' not in params or - params['id_task_id_project'] is None): - raise ValueError("Missing the required parameter `id_task_id_project` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_task_id_domain' is set - if ('id_task_id_domain' not in params or - params['id_task_id_domain'] is None): - raise ValueError("Missing the required parameter `id_task_id_domain` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_task_id_name' is set - if ('id_task_id_name' not in params or - params['id_task_id_name'] is None): - raise ValueError("Missing the required parameter `id_task_id_name` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_task_id_version' is set - if ('id_task_id_version' not in params or - params['id_task_id_version'] is None): - raise ValueError("Missing the required parameter `id_task_id_version` when calling `get_task_execution_data`") # noqa: E501 - # verify the required parameter 'id_retry_attempt' is set - if ('id_retry_attempt' not in params or - params['id_retry_attempt'] is None): - raise ValueError("Missing the required parameter `id_retry_attempt` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_description_entity2`") # noqa: E501 + # verify the required parameter 'id_resource_type' is set + if ('id_resource_type' not in params or + params['id_resource_type'] is None): + raise ValueError("Missing the required parameter `id_resource_type` when calling `get_description_entity2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_description_entity2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_description_entity2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_description_entity2`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_description_entity2`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_node_execution_id_execution_id_project' in params: - path_params['id.node_execution_id.execution_id.project'] = params['id_node_execution_id_execution_id_project'] # noqa: E501 - if 'id_node_execution_id_execution_id_domain' in params: - path_params['id.node_execution_id.execution_id.domain'] = params['id_node_execution_id_execution_id_domain'] # noqa: E501 - if 'id_node_execution_id_execution_id_name' in params: - path_params['id.node_execution_id.execution_id.name'] = params['id_node_execution_id_execution_id_name'] # noqa: E501 - if 'id_node_execution_id_node_id' in params: - path_params['id.node_execution_id.node_id'] = params['id_node_execution_id_node_id'] # noqa: E501 - if 'id_task_id_project' in params: - path_params['id.task_id.project'] = params['id_task_id_project'] # noqa: E501 - if 'id_task_id_domain' in params: - path_params['id.task_id.domain'] = params['id_task_id_domain'] # noqa: E501 - if 'id_task_id_name' in params: - path_params['id.task_id.name'] = params['id_task_id_name'] # noqa: E501 - if 'id_task_id_version' in params: - path_params['id.task_id.version'] = params['id_task_id_version'] # noqa: E501 - if 'id_retry_attempt' in params: - path_params['id.retry_attempt'] = params['id_retry_attempt'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_resource_type' in params: + path_params['id.resource_type'] = params['id_resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 query_params = [] - if 'id_task_id_resource_type' in params: - query_params.append(('id.task_id.resource_type', params['id_task_id_resource_type'])) # noqa: E501 header_params = {} @@ -2774,14 +2642,14 @@ def get_task_execution_data_with_http_info(self, id_node_execution_id_execution_ auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}', 'GET', + '/api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminTaskExecutionGetDataResponse', # noqa: E501 + response_type='AdminDescriptionEntity', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2789,41 +2657,51 @@ def get_task_execution_data_with_http_info(self, id_node_execution_id_execution_ _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_version(self, **kwargs): # noqa: E501 - """get_version # noqa: E501 + def get_dynamic_node_workflow(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_version(async_req=True) + >>> thread = api.get_dynamic_node_workflow(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) >>> result = thread.get() :param async_req bool - :return: AdminGetVersionResponse + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :param str id_execution_id_org: Optional, org key applied to the resource. + :return: AdminDynamicNodeWorkflowResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_version_with_http_info(**kwargs) # noqa: E501 + return self.get_dynamic_node_workflow_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 else: - (data) = self.get_version_with_http_info(**kwargs) # noqa: E501 + (data) = self.get_dynamic_node_workflow_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 return data - def get_version_with_http_info(self, **kwargs): # noqa: E501 - """get_version # noqa: E501 + def get_dynamic_node_workflow_with_http_info(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_version_with_http_info(async_req=True) + >>> thread = api.get_dynamic_node_workflow_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) >>> result = thread.get() :param async_req bool - :return: AdminGetVersionResponse + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :param str id_execution_id_org: Optional, org key applied to the resource. + :return: AdminDynamicNodeWorkflowResponse If the method is called asynchronously, returns the request thread. """ - all_params = [] # noqa: E501 + all_params = ['id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id', 'id_execution_id_org'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2834,16 +2712,42 @@ def get_version_with_http_info(self, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_version" % key + " to method get_dynamic_node_workflow" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'id_execution_id_project' is set + if ('id_execution_id_project' not in params or + params['id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_dynamic_node_workflow`") # noqa: E501 + # verify the required parameter 'id_execution_id_domain' is set + if ('id_execution_id_domain' not in params or + params['id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_dynamic_node_workflow`") # noqa: E501 + # verify the required parameter 'id_execution_id_name' is set + if ('id_execution_id_name' not in params or + params['id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_dynamic_node_workflow`") # noqa: E501 + # verify the required parameter 'id_node_id' is set + if ('id_node_id' not in params or + params['id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_id` when calling `get_dynamic_node_workflow`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_execution_id_project' in params: + path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 + if 'id_execution_id_domain' in params: + path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 + if 'id_execution_id_name' in params: + path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 + if 'id_node_id' in params: + path_params['id.node_id'] = params['id_node_id'] # noqa: E501 query_params = [] + if 'id_execution_id_org' in params: + query_params.append(('id.execution_id.org', params['id_execution_id_org'])) # noqa: E501 header_params = {} @@ -2863,14 +2767,14 @@ def get_version_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/version', 'GET', + '/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminGetVersionResponse', # noqa: E501 + response_type='AdminDynamicNodeWorkflowResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -2878,51 +2782,51 @@ def get_version_with_http_info(self, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_workflow(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. # noqa: E501 + def get_dynamic_node_workflow2(self, id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_workflow(id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.get_dynamic_node_workflow2(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminWorkflow + :param str id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :return: AdminDynamicNodeWorkflowResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_workflow_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return self.get_dynamic_node_workflow2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 else: - (data) = self.get_workflow_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + (data) = self.get_dynamic_node_workflow2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 return data - def get_workflow_with_http_info(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 - """Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. # noqa: E501 + def get_dynamic_node_workflow2_with_http_info(self, id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_workflow_with_http_info(id_project, id_domain, id_name, id_version, async_req=True) + >>> thread = api.get_dynamic_node_workflow2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :return: AdminWorkflow + :param str id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :return: AdminDynamicNodeWorkflowResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type'] # noqa: E501 + all_params = ['id_execution_id_org', 'id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -2933,42 +2837,46 @@ def get_workflow_with_http_info(self, id_project, id_domain, id_name, id_version if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_workflow" % key + " to method get_dynamic_node_workflow2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `get_workflow`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `get_workflow`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `get_workflow`") # noqa: E501 - # verify the required parameter 'id_version' is set - if ('id_version' not in params or - params['id_version'] is None): - raise ValueError("Missing the required parameter `id_version` when calling `get_workflow`") # noqa: E501 + # verify the required parameter 'id_execution_id_org' is set + if ('id_execution_id_org' not in params or + params['id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `id_execution_id_org` when calling `get_dynamic_node_workflow2`") # noqa: E501 + # verify the required parameter 'id_execution_id_project' is set + if ('id_execution_id_project' not in params or + params['id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_dynamic_node_workflow2`") # noqa: E501 + # verify the required parameter 'id_execution_id_domain' is set + if ('id_execution_id_domain' not in params or + params['id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_dynamic_node_workflow2`") # noqa: E501 + # verify the required parameter 'id_execution_id_name' is set + if ('id_execution_id_name' not in params or + params['id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_dynamic_node_workflow2`") # noqa: E501 + # verify the required parameter 'id_node_id' is set + if ('id_node_id' not in params or + params['id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_id` when calling `get_dynamic_node_workflow2`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 - if 'id_version' in params: - path_params['id.version'] = params['id_version'] # noqa: E501 + if 'id_execution_id_org' in params: + path_params['id.execution_id.org'] = params['id_execution_id_org'] # noqa: E501 + if 'id_execution_id_project' in params: + path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 + if 'id_execution_id_domain' in params: + path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 + if 'id_execution_id_name' in params: + path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 + if 'id_node_id' in params: + path_params['id.node_id'] = params['id_node_id'] # noqa: E501 query_params = [] - if 'id_resource_type' in params: - query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 header_params = {} @@ -2988,14 +2896,14 @@ def get_workflow_with_http_info(self, id_project, id_domain, id_name, id_version auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + '/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflow', # noqa: E501 + response_type='AdminDynamicNodeWorkflowResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3003,49 +2911,49 @@ def get_workflow_with_http_info(self, id_project, id_domain, id_name, id_version _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def get_workflow_attributes(self, project, domain, workflow, **kwargs): # noqa: E501 - """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + def get_execution(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_workflow_attributes(project, domain, workflow, async_req=True) + >>> thread = api.get_execution(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param str workflow: Workflow name which this set of attributes references. +required (required) - :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminWorkflowAttributesGetResponse + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminExecution If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.get_workflow_attributes_with_http_info(project, domain, workflow, **kwargs) # noqa: E501 + return self.get_execution_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.get_workflow_attributes_with_http_info(project, domain, workflow, **kwargs) # noqa: E501 + (data) = self.get_execution_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def get_workflow_attributes_with_http_info(self, project, domain, workflow, **kwargs): # noqa: E501 - """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + def get_execution_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_workflow_attributes_with_http_info(project, domain, workflow, async_req=True) + >>> thread = api.get_execution_with_http_info(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Unique project id which this set of attributes references. +required (required) - :param str domain: Unique domain id which this set of attributes references. +required (required) - :param str workflow: Workflow name which this set of attributes references. +required (required) - :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminWorkflowAttributesGetResponse + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminExecution If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'workflow', 'resource_type'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'id_org'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3056,36 +2964,36 @@ def get_workflow_attributes_with_http_info(self, project, domain, workflow, **kw if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method get_workflow_attributes" % key + " to method get_execution" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `get_workflow_attributes`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `get_workflow_attributes`") # noqa: E501 - # verify the required parameter 'workflow' is set - if ('workflow' not in params or - params['workflow'] is None): - raise ValueError("Missing the required parameter `workflow` when calling `get_workflow_attributes`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_execution`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_execution`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_execution`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 - if 'workflow' in params: - path_params['workflow'] = params['workflow'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'resource_type' in params: - query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 header_params = {} @@ -3105,14 +3013,14 @@ def get_workflow_attributes_with_http_info(self, project, domain, workflow, **kw auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflow_attributes/{project}/{domain}/{workflow}', 'GET', + '/api/v1/executions/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowAttributesGetResponse', # noqa: E501 + response_type='AdminExecution', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3120,53 +3028,49 @@ def get_workflow_attributes_with_http_info(self, project, domain, workflow, **kw _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_active_launch_plans(self, project, domain, **kwargs): # noqa: E501 - """List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + def get_execution2(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_active_launch_plans(project, domain, async_req=True) + >>> thread = api.get_execution2(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Name of the project that contains the identifiers. +required. (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required. (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminLaunchPlanList + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :return: AdminExecution If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_active_launch_plans_with_http_info(project, domain, **kwargs) # noqa: E501 + return self.get_execution2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.list_active_launch_plans_with_http_info(project, domain, **kwargs) # noqa: E501 + (data) = self.get_execution2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def list_active_launch_plans_with_http_info(self, project, domain, **kwargs): # noqa: E501 - """List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + def get_execution2_with_http_info(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_active_launch_plans_with_http_info(project, domain, async_req=True) + >>> thread = api.get_execution2_with_http_info(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Name of the project that contains the identifiers. +required. (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required. (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminLaunchPlanList + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :return: AdminExecution If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3177,36 +3081,40 @@ def list_active_launch_plans_with_http_info(self, project, domain, **kwargs): # if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_active_launch_plans" % key + " to method get_execution2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `list_active_launch_plans`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `list_active_launch_plans`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_execution2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_execution2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_execution2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_execution2`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -3226,14 +3134,14 @@ def list_active_launch_plans_with_http_info(self, project, domain, **kwargs): # auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/active_launch_plans/{project}/{domain}', 'GET', + '/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminLaunchPlanList', # noqa: E501 + response_type='AdminExecution', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3241,59 +3149,49 @@ def list_active_launch_plans_with_http_info(self, project, domain, **kwargs): # _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_description_entities(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + def get_execution_data(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_description_entities(resource_type, id_project, id_domain, id_name, async_req=True) + >>> thread = api.get_execution_data(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminDescriptionEntityList + :param str id_name: User or system provided value for the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminWorkflowExecutionGetDataResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_description_entities_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.get_execution_data_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.list_description_entities_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.get_execution_data_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def list_description_entities_with_http_info(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + def get_execution_data_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_description_entities_with_http_info(resource_type, id_project, id_domain, id_name, async_req=True) + >>> thread = api.get_execution_data_with_http_info(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminDescriptionEntityList + :param str id_name: User or system provided value for the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminWorkflowExecutionGetDataResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['resource_type', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'id_org'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3304,32 +3202,26 @@ def list_description_entities_with_http_info(self, resource_type, id_project, id if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_description_entities" % key + " to method get_execution_data" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'resource_type' is set - if ('resource_type' not in params or - params['resource_type'] is None): - raise ValueError("Missing the required parameter `resource_type` when calling `list_description_entities`") # noqa: E501 # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_description_entities`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `get_execution_data`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_description_entities`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `get_execution_data`") # noqa: E501 # verify the required parameter 'id_name' is set if ('id_name' not in params or params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `list_description_entities`") # noqa: E501 + raise ValueError("Missing the required parameter `id_name` when calling `get_execution_data`") # noqa: E501 collection_formats = {} path_params = {} - if 'resource_type' in params: - path_params['resource_type'] = params['resource_type'] # noqa: E501 if 'id_project' in params: path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: @@ -3338,16 +3230,8 @@ def list_description_entities_with_http_info(self, resource_type, id_project, id path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 header_params = {} @@ -3367,14 +3251,14 @@ def list_description_entities_with_http_info(self, resource_type, id_project, id auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/data/executions/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminDescriptionEntityList', # noqa: E501 + response_type='AdminWorkflowExecutionGetDataResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3382,59 +3266,49 @@ def list_description_entities_with_http_info(self, resource_type, id_project, id _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_description_entities2(self, resource_type, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + def get_execution_data2(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_description_entities2(resource_type, id_project, id_domain, async_req=True) + >>> thread = api.get_execution_data2(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminDescriptionEntityList + :param str id_name: User or system provided value for the resource. (required) + :return: AdminWorkflowExecutionGetDataResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_description_entities2_with_http_info(resource_type, id_project, id_domain, **kwargs) # noqa: E501 + return self.get_execution_data2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.list_description_entities2_with_http_info(resource_type, id_project, id_domain, **kwargs) # noqa: E501 + (data) = self.get_execution_data2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def list_description_entities2_with_http_info(self, resource_type, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + def get_execution_data2_with_http_info(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_description_entities2_with_http_info(resource_type, id_project, id_domain, async_req=True) + >>> thread = api.get_execution_data2_with_http_info(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminDescriptionEntityList + :param str id_name: User or system provided value for the resource. (required) + :return: AdminWorkflowExecutionGetDataResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['resource_type', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3445,46 +3319,40 @@ def list_description_entities2_with_http_info(self, resource_type, id_project, i if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_description_entities2" % key + " to method get_execution_data2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'resource_type' is set - if ('resource_type' not in params or - params['resource_type'] is None): - raise ValueError("Missing the required parameter `resource_type` when calling `list_description_entities2`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_execution_data2`") # noqa: E501 # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_description_entities2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `get_execution_data2`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_description_entities2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `get_execution_data2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_execution_data2`") # noqa: E501 collection_formats = {} path_params = {} - if 'resource_type' in params: - path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 if 'id_project' in params: path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'id_name' in params: - query_params.append(('id.name', params['id_name'])) # noqa: E501 - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -3504,14 +3372,14 @@ def list_description_entities2_with_http_info(self, resource_type, id_project, i auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}', 'GET', + '/api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminDescriptionEntityList', # noqa: E501 + response_type='AdminWorkflowExecutionGetDataResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3519,57 +3387,51 @@ def list_description_entities2_with_http_info(self, resource_type, id_project, i _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_executions(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def get_execution_metrics(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_executions(id_project, id_domain, async_req=True) + >>> thread = api.get_execution_metrics(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminExecutionList + :param str id_name: User or system provided value for the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :param int depth: depth defines the number of Flyte entity levels to traverse when breaking down execution details. + :return: AdminWorkflowExecutionGetMetricsResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_executions_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return self.get_execution_metrics_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.list_executions_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + (data) = self.get_execution_metrics_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def list_executions_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def get_execution_metrics_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_executions_with_http_info(id_project, id_domain, async_req=True) + >>> thread = api.get_execution_metrics_with_http_info(id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminExecutionList + :param str id_name: User or system provided value for the resource. (required) + :param str id_org: Optional, org key applied to the resource. + :param int depth: depth defines the number of Flyte entity levels to traverse when breaking down execution details. + :return: AdminWorkflowExecutionGetMetricsResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'depth'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3580,18 +3442,22 @@ def list_executions_with_http_info(self, id_project, id_domain, **kwargs): # no if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_executions" % key + " to method get_execution_metrics" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_executions`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `get_execution_metrics`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_executions`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `get_execution_metrics`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_execution_metrics`") # noqa: E501 collection_formats = {} @@ -3600,20 +3466,14 @@ def list_executions_with_http_info(self, id_project, id_domain, **kwargs): # no path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'id_name' in params: - query_params.append(('id.name', params['id_name'])) # noqa: E501 - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'depth' in params: + query_params.append(('depth', params['depth'])) # noqa: E501 header_params = {} @@ -3633,14 +3493,14 @@ def list_executions_with_http_info(self, id_project, id_domain, **kwargs): # no auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/executions/{id.project}/{id.domain}', 'GET', + '/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminExecutionList', # noqa: E501 + response_type='AdminWorkflowExecutionGetMetricsResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3648,55 +3508,51 @@ def list_executions_with_http_info(self, id_project, id_domain, **kwargs): # no _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_launch_plan_ids(self, project, domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. # noqa: E501 + def get_execution_metrics2(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_launch_plan_ids(project, domain, async_req=True) + >>> thread = api.get_execution_metrics2(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityIdentifierList + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :param int depth: depth defines the number of Flyte entity levels to traverse when breaking down execution details. + :return: AdminWorkflowExecutionGetMetricsResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_launch_plan_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + return self.get_execution_metrics2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.list_launch_plan_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + (data) = self.get_execution_metrics2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def list_launch_plan_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. # noqa: E501 + def get_execution_metrics2_with_http_info(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_launch_plan_ids_with_http_info(project, domain, async_req=True) + >>> thread = api.get_execution_metrics2_with_http_info(id_org, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityIdentifierList + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :param int depth: depth defines the number of Flyte entity levels to traverse when breaking down execution details. + :return: AdminWorkflowExecutionGetMetricsResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'depth'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3707,38 +3563,42 @@ def list_launch_plan_ids_with_http_info(self, project, domain, **kwargs): # noq if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_launch_plan_ids" % key + " to method get_execution_metrics2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `list_launch_plan_ids`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `list_launch_plan_ids`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_execution_metrics2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_execution_metrics2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_execution_metrics2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_execution_metrics2`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 + if 'depth' in params: + query_params.append(('depth', params['depth'])) # noqa: E501 header_params = {} @@ -3758,14 +3618,14 @@ def list_launch_plan_ids_with_http_info(self, project, domain, **kwargs): # noq auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/launch_plan_ids/{project}/{domain}', 'GET', + '/api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNamedEntityIdentifierList', # noqa: E501 + response_type='AdminWorkflowExecutionGetMetricsResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3773,57 +3633,53 @@ def list_launch_plan_ids_with_http_info(self, project, domain, **kwargs): # noq _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_launch_plans(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + def get_launch_plan(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_launch_plans(id_project, id_domain, id_name, async_req=True) + >>> thread = api.get_launch_plan(id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminLaunchPlanList + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_org: Optional, org key applied to the resource. + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_launch_plans_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.get_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 else: - (data) = self.list_launch_plans_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.get_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 return data - def list_launch_plans_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + def get_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_launch_plans_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> thread = api.get_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminLaunchPlanList + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_org: Optional, org key applied to the resource. + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type', 'id_org'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3834,22 +3690,26 @@ def list_launch_plans_with_http_info(self, id_project, id_domain, id_name, **kwa if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_launch_plans" % key + " to method get_launch_plan" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_launch_plans`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `get_launch_plan`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_launch_plans`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `get_launch_plan`") # noqa: E501 # verify the required parameter 'id_name' is set if ('id_name' not in params or params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `list_launch_plans`") # noqa: E501 + raise ValueError("Missing the required parameter `id_name` when calling `get_launch_plan`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_launch_plan`") # noqa: E501 collection_formats = {} @@ -3860,18 +3720,14 @@ def list_launch_plans_with_http_info(self, id_project, id_domain, id_name, **kwa path_params['id.domain'] = params['id_domain'] # noqa: E501 if 'id_name' in params: path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'id_resource_type' in params: + query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 header_params = {} @@ -3891,14 +3747,14 @@ def list_launch_plans_with_http_info(self, id_project, id_domain, id_name, **kwa auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminLaunchPlanList', # noqa: E501 + response_type='AdminLaunchPlan', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -3906,57 +3762,53 @@ def list_launch_plans_with_http_info(self, id_project, id_domain, id_name, **kwa _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_launch_plans2(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + def get_launch_plan2(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_launch_plans2(id_project, id_domain, async_req=True) + >>> thread = api.get_launch_plan2(id_org, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminLaunchPlanList + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_launch_plans2_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return self.get_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 else: - (data) = self.list_launch_plans2_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + (data) = self.get_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 return data - def list_launch_plans2_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + def get_launch_plan2_with_http_info(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_launch_plans2_with_http_info(id_project, id_domain, async_req=True) + >>> thread = api.get_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminLaunchPlanList + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :return: AdminLaunchPlan If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -3967,40 +3819,48 @@ def list_launch_plans2_with_http_info(self, id_project, id_domain, **kwargs): # if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_launch_plans2" % key + " to method get_launch_plan2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_launch_plan2`") # noqa: E501 # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_launch_plans2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `get_launch_plan2`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_launch_plans2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `get_launch_plan2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_launch_plan2`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_launch_plan2`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 if 'id_project' in params: path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 query_params = [] - if 'id_name' in params: - query_params.append(('id.name', params['id_name'])) # noqa: E501 - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'id_resource_type' in params: + query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 header_params = {} @@ -4020,14 +3880,14 @@ def list_launch_plans2_with_http_info(self, id_project, id_domain, **kwargs): # auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/launch_plans/{id.project}/{id.domain}', 'GET', + '/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminLaunchPlanList', # noqa: E501 + response_type='AdminLaunchPlan', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4035,43 +3895,51 @@ def list_launch_plans2_with_http_info(self, id_project, id_domain, **kwargs): # _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_matchable_attributes(self, **kwargs): # noqa: E501 - """Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. # noqa: E501 + def get_named_entity(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_matchable_attributes(async_req=True) + >>> thread = api.get_named_entity(resource_type, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminListMatchableAttributesResponse + :param str resource_type: Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminNamedEntity If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_matchable_attributes_with_http_info(**kwargs) # noqa: E501 + return self.get_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 else: - (data) = self.list_matchable_attributes_with_http_info(**kwargs) # noqa: E501 + (data) = self.get_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 return data - def list_matchable_attributes_with_http_info(self, **kwargs): # noqa: E501 - """Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. # noqa: E501 + def get_named_entity_with_http_info(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_matchable_attributes_with_http_info(async_req=True) + >>> thread = api.get_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. - :return: AdminListMatchableAttributesResponse + :param str resource_type: Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :return: AdminNamedEntity If the method is called asynchronously, returns the request thread. """ - all_params = ['resource_type'] # noqa: E501 + all_params = ['resource_type', 'id_project', 'id_domain', 'id_name', 'id_org'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -4082,18 +3950,4693 @@ def list_matchable_attributes_with_http_info(self, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_matchable_attributes" % key + " to method get_named_entity" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `get_named_entity`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_named_entity`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_named_entity`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_named_entity`") # noqa: E501 collection_formats = {} path_params = {} + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'resource_type' in params: - query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntity', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_named_entity2(self, id_org, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_named_entity2(id_org, resource_type, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :return: AdminNamedEntity + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_named_entity2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.get_named_entity2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def get_named_entity2_with_http_info(self, id_org, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_named_entity2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Resource type of the metadata to get. One of Task, Workflow or LaunchPlan. +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :return: AdminNamedEntity + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'resource_type', 'id_project', 'id_domain', 'id_name'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_named_entity2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_named_entity2`") # noqa: E501 + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `get_named_entity2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_named_entity2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_named_entity2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_named_entity2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntity', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_node_execution(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :param str id_execution_id_org: Optional, org key applied to the resource. + :return: FlyteidladminNodeExecution + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_node_execution_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + else: + (data) = self.get_node_execution_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + return data + + def get_node_execution_with_http_info(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :param str id_execution_id_org: Optional, org key applied to the resource. + :return: FlyteidladminNodeExecution + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id', 'id_execution_id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_node_execution" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_execution_id_project' is set + if ('id_execution_id_project' not in params or + params['id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_node_execution`") # noqa: E501 + # verify the required parameter 'id_execution_id_domain' is set + if ('id_execution_id_domain' not in params or + params['id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_node_execution`") # noqa: E501 + # verify the required parameter 'id_execution_id_name' is set + if ('id_execution_id_name' not in params or + params['id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_node_execution`") # noqa: E501 + # verify the required parameter 'id_node_id' is set + if ('id_node_id' not in params or + params['id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_id` when calling `get_node_execution`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_execution_id_project' in params: + path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 + if 'id_execution_id_domain' in params: + path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 + if 'id_execution_id_name' in params: + path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 + if 'id_node_id' in params: + path_params['id.node_id'] = params['id_node_id'] # noqa: E501 + + query_params = [] + if 'id_execution_id_org' in params: + query_params.append(('id.execution_id.org', params['id_execution_id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='FlyteidladminNodeExecution', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_node_execution2(self, id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution2(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :return: FlyteidladminNodeExecution + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_node_execution2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + else: + (data) = self.get_node_execution2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + return data + + def get_node_execution2_with_http_info(self, id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :return: FlyteidladminNodeExecution + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_execution_id_org', 'id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_node_execution2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_execution_id_org' is set + if ('id_execution_id_org' not in params or + params['id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `id_execution_id_org` when calling `get_node_execution2`") # noqa: E501 + # verify the required parameter 'id_execution_id_project' is set + if ('id_execution_id_project' not in params or + params['id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_node_execution2`") # noqa: E501 + # verify the required parameter 'id_execution_id_domain' is set + if ('id_execution_id_domain' not in params or + params['id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_node_execution2`") # noqa: E501 + # verify the required parameter 'id_execution_id_name' is set + if ('id_execution_id_name' not in params or + params['id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_node_execution2`") # noqa: E501 + # verify the required parameter 'id_node_id' is set + if ('id_node_id' not in params or + params['id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_id` when calling `get_node_execution2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_execution_id_org' in params: + path_params['id.execution_id.org'] = params['id_execution_id_org'] # noqa: E501 + if 'id_execution_id_project' in params: + path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 + if 'id_execution_id_domain' in params: + path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 + if 'id_execution_id_name' in params: + path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 + if 'id_node_id' in params: + path_params['id.node_id'] = params['id_node_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='FlyteidladminNodeExecution', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_node_execution_data(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution_data(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :param str id_execution_id_org: Optional, org key applied to the resource. + :return: AdminNodeExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_node_execution_data_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + else: + (data) = self.get_node_execution_data_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + return data + + def get_node_execution_data_with_http_info(self, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution_data_with_http_info(id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :param str id_execution_id_org: Optional, org key applied to the resource. + :return: AdminNodeExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id', 'id_execution_id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_node_execution_data" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_execution_id_project' is set + if ('id_execution_id_project' not in params or + params['id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_node_execution_data`") # noqa: E501 + # verify the required parameter 'id_execution_id_domain' is set + if ('id_execution_id_domain' not in params or + params['id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_node_execution_data`") # noqa: E501 + # verify the required parameter 'id_execution_id_name' is set + if ('id_execution_id_name' not in params or + params['id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_node_execution_data`") # noqa: E501 + # verify the required parameter 'id_node_id' is set + if ('id_node_id' not in params or + params['id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_id` when calling `get_node_execution_data`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_execution_id_project' in params: + path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 + if 'id_execution_id_domain' in params: + path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 + if 'id_execution_id_name' in params: + path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 + if 'id_node_id' in params: + path_params['id.node_id'] = params['id_node_id'] # noqa: E501 + + query_params = [] + if 'id_execution_id_org' in params: + query_params.append(('id.execution_id.org', params['id_execution_id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNodeExecutionGetDataResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_node_execution_data2(self, id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution_data2(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :return: AdminNodeExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_node_execution_data2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + else: + (data) = self.get_node_execution_data2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs) # noqa: E501 + return data + + def get_node_execution_data2_with_http_info(self, id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_node_execution_data2_with_http_info(id_execution_id_org, id_execution_id_project, id_execution_id_domain, id_execution_id_name, id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_id: (required) + :return: AdminNodeExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_execution_id_org', 'id_execution_id_project', 'id_execution_id_domain', 'id_execution_id_name', 'id_node_id'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_node_execution_data2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_execution_id_org' is set + if ('id_execution_id_org' not in params or + params['id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `id_execution_id_org` when calling `get_node_execution_data2`") # noqa: E501 + # verify the required parameter 'id_execution_id_project' is set + if ('id_execution_id_project' not in params or + params['id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_execution_id_project` when calling `get_node_execution_data2`") # noqa: E501 + # verify the required parameter 'id_execution_id_domain' is set + if ('id_execution_id_domain' not in params or + params['id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_execution_id_domain` when calling `get_node_execution_data2`") # noqa: E501 + # verify the required parameter 'id_execution_id_name' is set + if ('id_execution_id_name' not in params or + params['id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_execution_id_name` when calling `get_node_execution_data2`") # noqa: E501 + # verify the required parameter 'id_node_id' is set + if ('id_node_id' not in params or + params['id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_id` when calling `get_node_execution_data2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_execution_id_org' in params: + path_params['id.execution_id.org'] = params['id_execution_id_org'] # noqa: E501 + if 'id_execution_id_project' in params: + path_params['id.execution_id.project'] = params['id_execution_id_project'] # noqa: E501 + if 'id_execution_id_domain' in params: + path_params['id.execution_id.domain'] = params['id_execution_id_domain'] # noqa: E501 + if 'id_execution_id_name' in params: + path_params['id.execution_id.name'] = params['id_execution_id_name'] # noqa: E501 + if 'id_node_id' in params: + path_params['id.node_id'] = params['id_node_id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNodeExecutionGetDataResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_project_attributes(self, project, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_attributes(project, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Unique project id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org key applied to the project. + :return: AdminProjectAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_project_attributes_with_http_info(project, **kwargs) # noqa: E501 + else: + (data) = self.get_project_attributes_with_http_info(project, **kwargs) # noqa: E501 + return data + + def get_project_attributes_with_http_info(self, project, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_attributes_with_http_info(project, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Unique project id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org key applied to the project. + :return: AdminProjectAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['project', 'resource_type', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_project_attributes" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `get_project_attributes`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/project_attributes/{project}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminProjectAttributesGetResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_project_attributes2(self, org, project, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_attributes2(org, project, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the project. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminProjectAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_project_attributes2_with_http_info(org, project, **kwargs) # noqa: E501 + else: + (data) = self.get_project_attributes2_with_http_info(org, project, **kwargs) # noqa: E501 + return data + + def get_project_attributes2_with_http_info(self, org, project, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_attributes2_with_http_info(org, project, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the project. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminProjectAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'project', 'resource_type'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_project_attributes2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `get_project_attributes2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `get_project_attributes2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/project_domain_attributes/org/{org}/{project}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminProjectAttributesGetResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_project_domain_attributes(self, project, domain, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_domain_attributes(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org key applied to the attributes. + :return: AdminProjectDomainAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_project_domain_attributes_with_http_info(project, domain, **kwargs) # noqa: E501 + else: + (data) = self.get_project_domain_attributes_with_http_info(project, domain, **kwargs) # noqa: E501 + return data + + def get_project_domain_attributes_with_http_info(self, project, domain, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_domain_attributes_with_http_info(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org key applied to the attributes. + :return: AdminProjectDomainAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['project', 'domain', 'resource_type', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_project_domain_attributes" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `get_project_domain_attributes`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `get_project_domain_attributes`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/project_domain_attributes/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminProjectDomainAttributesGetResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_project_domain_attributes2(self, org, project, domain, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_domain_attributes2(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the attributes. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminProjectDomainAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_project_domain_attributes2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + else: + (data) = self.get_project_domain_attributes2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + return data + + def get_project_domain_attributes2_with_http_info(self, org, project, domain, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_project_domain_attributes2_with_http_info(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the attributes. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminProjectDomainAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'project', 'domain', 'resource_type'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_project_domain_attributes2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `get_project_domain_attributes2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `get_project_domain_attributes2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `get_project_domain_attributes2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/project_domain_attributes/org/{org}/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminProjectDomainAttributesGetResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_task(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Task` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task(id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_org: Optional, org key applied to the resource. + :return: AdminTask + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_task_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + else: + (data) = self.get_task_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return data + + def get_task_with_http_info(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Task` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_with_http_info(id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_org: Optional, org key applied to the resource. + :return: AdminTask + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type', 'id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_task" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_task`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_task`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_task`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_task`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 + + query_params = [] + if 'id_resource_type' in params: + query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTask', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_task2(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Task` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task2(id_org, id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :return: AdminTask + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_task2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + else: + (data) = self.get_task2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return data + + def get_task2_with_http_info(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Task` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task2_with_http_info(id_org, id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :return: AdminTask + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_task2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_task2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_task2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_task2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_task2`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_task2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 + + query_params = [] + if 'id_resource_type' in params: + query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTask', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_task_execution(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. + :return: FlyteidladminTaskExecution + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_task_execution_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + else: + (data) = self.get_task_execution_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + return data + + def get_task_execution_with_http_info(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. + :return: FlyteidladminTaskExecution + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_node_execution_id_execution_id_project', 'id_node_execution_id_execution_id_domain', 'id_node_execution_id_execution_id_name', 'id_node_execution_id_node_id', 'id_task_id_project', 'id_task_id_domain', 'id_task_id_name', 'id_task_id_version', 'id_retry_attempt', 'id_task_id_resource_type', 'id_task_id_org', 'id_node_execution_id_execution_id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_task_execution" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_node_execution_id_execution_id_project' is set + if ('id_node_execution_id_execution_id_project' not in params or + params['id_node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_project` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_domain' is set + if ('id_node_execution_id_execution_id_domain' not in params or + params['id_node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_domain` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_name' is set + if ('id_node_execution_id_execution_id_name' not in params or + params['id_node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_name` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_node_id' is set + if ('id_node_execution_id_node_id' not in params or + params['id_node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_node_id` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_task_id_project' is set + if ('id_task_id_project' not in params or + params['id_task_id_project'] is None): + raise ValueError("Missing the required parameter `id_task_id_project` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_task_id_domain' is set + if ('id_task_id_domain' not in params or + params['id_task_id_domain'] is None): + raise ValueError("Missing the required parameter `id_task_id_domain` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_task_id_name' is set + if ('id_task_id_name' not in params or + params['id_task_id_name'] is None): + raise ValueError("Missing the required parameter `id_task_id_name` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_task_id_version' is set + if ('id_task_id_version' not in params or + params['id_task_id_version'] is None): + raise ValueError("Missing the required parameter `id_task_id_version` when calling `get_task_execution`") # noqa: E501 + # verify the required parameter 'id_retry_attempt' is set + if ('id_retry_attempt' not in params or + params['id_retry_attempt'] is None): + raise ValueError("Missing the required parameter `id_retry_attempt` when calling `get_task_execution`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_node_execution_id_execution_id_project' in params: + path_params['id.node_execution_id.execution_id.project'] = params['id_node_execution_id_execution_id_project'] # noqa: E501 + if 'id_node_execution_id_execution_id_domain' in params: + path_params['id.node_execution_id.execution_id.domain'] = params['id_node_execution_id_execution_id_domain'] # noqa: E501 + if 'id_node_execution_id_execution_id_name' in params: + path_params['id.node_execution_id.execution_id.name'] = params['id_node_execution_id_execution_id_name'] # noqa: E501 + if 'id_node_execution_id_node_id' in params: + path_params['id.node_execution_id.node_id'] = params['id_node_execution_id_node_id'] # noqa: E501 + if 'id_task_id_project' in params: + path_params['id.task_id.project'] = params['id_task_id_project'] # noqa: E501 + if 'id_task_id_domain' in params: + path_params['id.task_id.domain'] = params['id_task_id_domain'] # noqa: E501 + if 'id_task_id_name' in params: + path_params['id.task_id.name'] = params['id_task_id_name'] # noqa: E501 + if 'id_task_id_version' in params: + path_params['id.task_id.version'] = params['id_task_id_version'] # noqa: E501 + if 'id_retry_attempt' in params: + path_params['id.retry_attempt'] = params['id_retry_attempt'] # noqa: E501 + + query_params = [] + if 'id_task_id_resource_type' in params: + query_params.append(('id.task_id.resource_type', params['id_task_id_resource_type'])) # noqa: E501 + if 'id_task_id_org' in params: + query_params.append(('id.task_id.org', params['id_task_id_org'])) # noqa: E501 + if 'id_node_execution_id_execution_id_org' in params: + query_params.append(('id.node_execution_id.execution_id.org', params['id_node_execution_id_execution_id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='FlyteidladminTaskExecution', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_task_execution2(self, id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution2(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :return: FlyteidladminTaskExecution + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_task_execution2_with_http_info(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + else: + (data) = self.get_task_execution2_with_http_info(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + return data + + def get_task_execution2_with_http_info(self, id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution2_with_http_info(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :return: FlyteidladminTaskExecution + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_node_execution_id_execution_id_org', 'id_node_execution_id_execution_id_project', 'id_node_execution_id_execution_id_domain', 'id_node_execution_id_execution_id_name', 'id_node_execution_id_node_id', 'id_task_id_project', 'id_task_id_domain', 'id_task_id_name', 'id_task_id_version', 'id_retry_attempt', 'id_task_id_resource_type', 'id_task_id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_task_execution2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_node_execution_id_execution_id_org' is set + if ('id_node_execution_id_execution_id_org' not in params or + params['id_node_execution_id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_org` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_project' is set + if ('id_node_execution_id_execution_id_project' not in params or + params['id_node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_project` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_domain' is set + if ('id_node_execution_id_execution_id_domain' not in params or + params['id_node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_domain` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_name' is set + if ('id_node_execution_id_execution_id_name' not in params or + params['id_node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_name` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_node_id' is set + if ('id_node_execution_id_node_id' not in params or + params['id_node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_node_id` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_task_id_project' is set + if ('id_task_id_project' not in params or + params['id_task_id_project'] is None): + raise ValueError("Missing the required parameter `id_task_id_project` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_task_id_domain' is set + if ('id_task_id_domain' not in params or + params['id_task_id_domain'] is None): + raise ValueError("Missing the required parameter `id_task_id_domain` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_task_id_name' is set + if ('id_task_id_name' not in params or + params['id_task_id_name'] is None): + raise ValueError("Missing the required parameter `id_task_id_name` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_task_id_version' is set + if ('id_task_id_version' not in params or + params['id_task_id_version'] is None): + raise ValueError("Missing the required parameter `id_task_id_version` when calling `get_task_execution2`") # noqa: E501 + # verify the required parameter 'id_retry_attempt' is set + if ('id_retry_attempt' not in params or + params['id_retry_attempt'] is None): + raise ValueError("Missing the required parameter `id_retry_attempt` when calling `get_task_execution2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_node_execution_id_execution_id_org' in params: + path_params['id.node_execution_id.execution_id.org'] = params['id_node_execution_id_execution_id_org'] # noqa: E501 + if 'id_node_execution_id_execution_id_project' in params: + path_params['id.node_execution_id.execution_id.project'] = params['id_node_execution_id_execution_id_project'] # noqa: E501 + if 'id_node_execution_id_execution_id_domain' in params: + path_params['id.node_execution_id.execution_id.domain'] = params['id_node_execution_id_execution_id_domain'] # noqa: E501 + if 'id_node_execution_id_execution_id_name' in params: + path_params['id.node_execution_id.execution_id.name'] = params['id_node_execution_id_execution_id_name'] # noqa: E501 + if 'id_node_execution_id_node_id' in params: + path_params['id.node_execution_id.node_id'] = params['id_node_execution_id_node_id'] # noqa: E501 + if 'id_task_id_project' in params: + path_params['id.task_id.project'] = params['id_task_id_project'] # noqa: E501 + if 'id_task_id_domain' in params: + path_params['id.task_id.domain'] = params['id_task_id_domain'] # noqa: E501 + if 'id_task_id_name' in params: + path_params['id.task_id.name'] = params['id_task_id_name'] # noqa: E501 + if 'id_task_id_version' in params: + path_params['id.task_id.version'] = params['id_task_id_version'] # noqa: E501 + if 'id_retry_attempt' in params: + path_params['id.retry_attempt'] = params['id_retry_attempt'] # noqa: E501 + + query_params = [] + if 'id_task_id_resource_type' in params: + query_params.append(('id.task_id.resource_type', params['id_task_id_resource_type'])) # noqa: E501 + if 'id_task_id_org' in params: + query_params.append(('id.task_id.org', params['id_task_id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='FlyteidladminTaskExecution', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_task_execution_data(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution_data(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. + :return: AdminTaskExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_task_execution_data_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + else: + (data) = self.get_task_execution_data_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + return data + + def get_task_execution_data_with_http_info(self, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution_data_with_http_info(id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. + :return: AdminTaskExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_node_execution_id_execution_id_project', 'id_node_execution_id_execution_id_domain', 'id_node_execution_id_execution_id_name', 'id_node_execution_id_node_id', 'id_task_id_project', 'id_task_id_domain', 'id_task_id_name', 'id_task_id_version', 'id_retry_attempt', 'id_task_id_resource_type', 'id_task_id_org', 'id_node_execution_id_execution_id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_task_execution_data" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_node_execution_id_execution_id_project' is set + if ('id_node_execution_id_execution_id_project' not in params or + params['id_node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_project` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_domain' is set + if ('id_node_execution_id_execution_id_domain' not in params or + params['id_node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_domain` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_name' is set + if ('id_node_execution_id_execution_id_name' not in params or + params['id_node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_name` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_node_id' is set + if ('id_node_execution_id_node_id' not in params or + params['id_node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_node_id` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_task_id_project' is set + if ('id_task_id_project' not in params or + params['id_task_id_project'] is None): + raise ValueError("Missing the required parameter `id_task_id_project` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_task_id_domain' is set + if ('id_task_id_domain' not in params or + params['id_task_id_domain'] is None): + raise ValueError("Missing the required parameter `id_task_id_domain` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_task_id_name' is set + if ('id_task_id_name' not in params or + params['id_task_id_name'] is None): + raise ValueError("Missing the required parameter `id_task_id_name` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_task_id_version' is set + if ('id_task_id_version' not in params or + params['id_task_id_version'] is None): + raise ValueError("Missing the required parameter `id_task_id_version` when calling `get_task_execution_data`") # noqa: E501 + # verify the required parameter 'id_retry_attempt' is set + if ('id_retry_attempt' not in params or + params['id_retry_attempt'] is None): + raise ValueError("Missing the required parameter `id_retry_attempt` when calling `get_task_execution_data`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_node_execution_id_execution_id_project' in params: + path_params['id.node_execution_id.execution_id.project'] = params['id_node_execution_id_execution_id_project'] # noqa: E501 + if 'id_node_execution_id_execution_id_domain' in params: + path_params['id.node_execution_id.execution_id.domain'] = params['id_node_execution_id_execution_id_domain'] # noqa: E501 + if 'id_node_execution_id_execution_id_name' in params: + path_params['id.node_execution_id.execution_id.name'] = params['id_node_execution_id_execution_id_name'] # noqa: E501 + if 'id_node_execution_id_node_id' in params: + path_params['id.node_execution_id.node_id'] = params['id_node_execution_id_node_id'] # noqa: E501 + if 'id_task_id_project' in params: + path_params['id.task_id.project'] = params['id_task_id_project'] # noqa: E501 + if 'id_task_id_domain' in params: + path_params['id.task_id.domain'] = params['id_task_id_domain'] # noqa: E501 + if 'id_task_id_name' in params: + path_params['id.task_id.name'] = params['id_task_id_name'] # noqa: E501 + if 'id_task_id_version' in params: + path_params['id.task_id.version'] = params['id_task_id_version'] # noqa: E501 + if 'id_retry_attempt' in params: + path_params['id.retry_attempt'] = params['id_retry_attempt'] # noqa: E501 + + query_params = [] + if 'id_task_id_resource_type' in params: + query_params.append(('id.task_id.resource_type', params['id_task_id_resource_type'])) # noqa: E501 + if 'id_task_id_org' in params: + query_params.append(('id.task_id.org', params['id_task_id_org'])) # noqa: E501 + if 'id_node_execution_id_execution_id_org' in params: + query_params.append(('id.node_execution_id.execution_id.org', params['id_node_execution_id_execution_id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskExecutionGetDataResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_task_execution_data2(self, id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution_data2(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :return: AdminTaskExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_task_execution_data2_with_http_info(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + else: + (data) = self.get_task_execution_data2_with_http_info(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs) # noqa: E501 + return data + + def get_task_execution_data2_with_http_info(self, id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, **kwargs): # noqa: E501 + """Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_task_execution_data2_with_http_info(id_node_execution_id_execution_id_org, id_node_execution_id_execution_id_project, id_node_execution_id_execution_id_domain, id_node_execution_id_execution_id_name, id_node_execution_id_node_id, id_task_id_project, id_task_id_domain, id_task_id_name, id_task_id_version, id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str id_node_execution_id_node_id: (required) + :param str id_task_id_project: Name of the project the resource belongs to. (required) + :param str id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_task_id_name: User provided value for the resource. (required) + :param str id_task_id_version: Specific version of the resource. (required) + :param int id_retry_attempt: (required) + :param str id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_task_id_org: Optional, org key applied to the resource. + :return: AdminTaskExecutionGetDataResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_node_execution_id_execution_id_org', 'id_node_execution_id_execution_id_project', 'id_node_execution_id_execution_id_domain', 'id_node_execution_id_execution_id_name', 'id_node_execution_id_node_id', 'id_task_id_project', 'id_task_id_domain', 'id_task_id_name', 'id_task_id_version', 'id_retry_attempt', 'id_task_id_resource_type', 'id_task_id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_task_execution_data2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_node_execution_id_execution_id_org' is set + if ('id_node_execution_id_execution_id_org' not in params or + params['id_node_execution_id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_org` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_project' is set + if ('id_node_execution_id_execution_id_project' not in params or + params['id_node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_project` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_domain' is set + if ('id_node_execution_id_execution_id_domain' not in params or + params['id_node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_domain` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_execution_id_name' is set + if ('id_node_execution_id_execution_id_name' not in params or + params['id_node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_execution_id_name` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_node_execution_id_node_id' is set + if ('id_node_execution_id_node_id' not in params or + params['id_node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `id_node_execution_id_node_id` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_task_id_project' is set + if ('id_task_id_project' not in params or + params['id_task_id_project'] is None): + raise ValueError("Missing the required parameter `id_task_id_project` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_task_id_domain' is set + if ('id_task_id_domain' not in params or + params['id_task_id_domain'] is None): + raise ValueError("Missing the required parameter `id_task_id_domain` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_task_id_name' is set + if ('id_task_id_name' not in params or + params['id_task_id_name'] is None): + raise ValueError("Missing the required parameter `id_task_id_name` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_task_id_version' is set + if ('id_task_id_version' not in params or + params['id_task_id_version'] is None): + raise ValueError("Missing the required parameter `id_task_id_version` when calling `get_task_execution_data2`") # noqa: E501 + # verify the required parameter 'id_retry_attempt' is set + if ('id_retry_attempt' not in params or + params['id_retry_attempt'] is None): + raise ValueError("Missing the required parameter `id_retry_attempt` when calling `get_task_execution_data2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_node_execution_id_execution_id_org' in params: + path_params['id.node_execution_id.execution_id.org'] = params['id_node_execution_id_execution_id_org'] # noqa: E501 + if 'id_node_execution_id_execution_id_project' in params: + path_params['id.node_execution_id.execution_id.project'] = params['id_node_execution_id_execution_id_project'] # noqa: E501 + if 'id_node_execution_id_execution_id_domain' in params: + path_params['id.node_execution_id.execution_id.domain'] = params['id_node_execution_id_execution_id_domain'] # noqa: E501 + if 'id_node_execution_id_execution_id_name' in params: + path_params['id.node_execution_id.execution_id.name'] = params['id_node_execution_id_execution_id_name'] # noqa: E501 + if 'id_node_execution_id_node_id' in params: + path_params['id.node_execution_id.node_id'] = params['id_node_execution_id_node_id'] # noqa: E501 + if 'id_task_id_project' in params: + path_params['id.task_id.project'] = params['id_task_id_project'] # noqa: E501 + if 'id_task_id_domain' in params: + path_params['id.task_id.domain'] = params['id_task_id_domain'] # noqa: E501 + if 'id_task_id_name' in params: + path_params['id.task_id.name'] = params['id_task_id_name'] # noqa: E501 + if 'id_task_id_version' in params: + path_params['id.task_id.version'] = params['id_task_id_version'] # noqa: E501 + if 'id_retry_attempt' in params: + path_params['id.retry_attempt'] = params['id_retry_attempt'] # noqa: E501 + + query_params = [] + if 'id_task_id_resource_type' in params: + query_params.append(('id.task_id.resource_type', params['id_task_id_resource_type'])) # noqa: E501 + if 'id_task_id_org' in params: + query_params.append(('id.task_id.org', params['id_task_id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskExecutionGetDataResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_version(self, **kwargs): # noqa: E501 + """get_version # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_version(async_req=True) + >>> result = thread.get() + + :param async_req bool + :return: AdminGetVersionResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_version_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.get_version_with_http_info(**kwargs) # noqa: E501 + return data + + def get_version_with_http_info(self, **kwargs): # noqa: E501 + """get_version # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_version_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :return: AdminGetVersionResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = [] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_version" % key + ) + params[key] = val + del params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/version', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminGetVersionResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_workflow(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow(id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_org: Optional, org key applied to the resource. + :return: AdminWorkflow + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_workflow_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + else: + (data) = self.get_workflow_with_http_info(id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return data + + def get_workflow_with_http_info(self, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow_with_http_info(id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str id_org: Optional, org key applied to the resource. + :return: AdminWorkflow + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type', 'id_org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_workflow" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_workflow`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_workflow`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_workflow`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_workflow`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 + + query_params = [] + if 'id_resource_type' in params: + query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminWorkflow', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_workflow2(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow2(id_org, id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :return: AdminWorkflow + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_workflow2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + else: + (data) = self.get_workflow2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 + return data + + def get_workflow2_with_http_info(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow2_with_http_info(id_org, id_project, id_domain, id_name, id_version, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param str id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :return: AdminWorkflow + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'id_version', 'id_resource_type'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_workflow2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `get_workflow2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `get_workflow2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `get_workflow2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `get_workflow2`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `get_workflow2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 + + query_params = [] + if 'id_resource_type' in params: + query_params.append(('id.resource_type', params['id_resource_type'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminWorkflow', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_workflow_attributes(self, project, domain, workflow, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow_attributes(project, domain, workflow, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org key applied to the attributes. + :return: AdminWorkflowAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_workflow_attributes_with_http_info(project, domain, workflow, **kwargs) # noqa: E501 + else: + (data) = self.get_workflow_attributes_with_http_info(project, domain, workflow, **kwargs) # noqa: E501 + return data + + def get_workflow_attributes_with_http_info(self, project, domain, workflow, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow_attributes_with_http_info(project, domain, workflow, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org key applied to the attributes. + :return: AdminWorkflowAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['project', 'domain', 'workflow', 'resource_type', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_workflow_attributes" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `get_workflow_attributes`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `get_workflow_attributes`") # noqa: E501 + # verify the required parameter 'workflow' is set + if ('workflow' not in params or + params['workflow'] is None): + raise ValueError("Missing the required parameter `workflow` when calling `get_workflow_attributes`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + if 'workflow' in params: + path_params['workflow'] = params['workflow'] # noqa: E501 + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflow_attributes/{project}/{domain}/{workflow}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminWorkflowAttributesGetResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def get_workflow_attributes2(self, org, project, domain, workflow, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow_attributes2(org, project, domain, workflow, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the attributes. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminWorkflowAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.get_workflow_attributes2_with_http_info(org, project, domain, workflow, **kwargs) # noqa: E501 + else: + (data) = self.get_workflow_attributes2_with_http_info(org, project, domain, workflow, **kwargs) # noqa: E501 + return data + + def get_workflow_attributes2_with_http_info(self, org, project, domain, workflow, **kwargs): # noqa: E501 + """Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.get_workflow_attributes2_with_http_info(org, project, domain, workflow, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the attributes. (required) + :param str project: Unique project id which this set of attributes references. +required (required) + :param str domain: Unique domain id which this set of attributes references. +required (required) + :param str workflow: Workflow name which this set of attributes references. +required (required) + :param str resource_type: Which type of matchable attributes to return. +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminWorkflowAttributesGetResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'project', 'domain', 'workflow', 'resource_type'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method get_workflow_attributes2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `get_workflow_attributes2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `get_workflow_attributes2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `get_workflow_attributes2`") # noqa: E501 + # verify the required parameter 'workflow' is set + if ('workflow' not in params or + params['workflow'] is None): + raise ValueError("Missing the required parameter `workflow` when calling `get_workflow_attributes2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + if 'workflow' in params: + path_params['workflow'] = params['workflow'] # noqa: E501 + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminWorkflowAttributesGetResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_active_launch_plans(self, project, domain, **kwargs): # noqa: E501 + """List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_active_launch_plans(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required. (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required. (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str org: Optional, org key applied to the resource. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_active_launch_plans_with_http_info(project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_active_launch_plans_with_http_info(project, domain, **kwargs) # noqa: E501 + return data + + def list_active_launch_plans_with_http_info(self, project, domain, **kwargs): # noqa: E501 + """List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_active_launch_plans_with_http_info(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required. (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required. (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str org: Optional, org key applied to the resource. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_active_launch_plans" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_active_launch_plans`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_active_launch_plans`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/active_launch_plans/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminLaunchPlanList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_active_launch_plans2(self, org, project, domain, **kwargs): # noqa: E501 + """List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_active_launch_plans2(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required. (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required. (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_active_launch_plans2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_active_launch_plans2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + return data + + def list_active_launch_plans2_with_http_info(self, org, project, domain, **kwargs): # noqa: E501 + """List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_active_launch_plans2_with_http_info(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required. (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required. (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_active_launch_plans2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `list_active_launch_plans2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_active_launch_plans2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_active_launch_plans2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/active_launch_plans/org/{org}/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminLaunchPlanList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_description_entities(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities(resource_type, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_description_entities_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_description_entities_with_http_info(resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_description_entities_with_http_info(self, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities_with_http_info(resource_type, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['resource_type', 'id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_description_entities" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `list_description_entities`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_description_entities`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_description_entities`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_description_entities`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminDescriptionEntityList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_description_entities2(self, id_org, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities2(id_org, resource_type, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_description_entities2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_description_entities2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_description_entities2_with_http_info(self, id_org, resource_type, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'resource_type', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_description_entities2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_description_entities2`") # noqa: E501 + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `list_description_entities2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_description_entities2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_description_entities2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_description_entities2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminDescriptionEntityList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_description_entities3(self, resource_type, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities3(resource_type, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_description_entities3_with_http_info(resource_type, id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_description_entities3_with_http_info(resource_type, id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_description_entities3_with_http_info(self, resource_type, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities3_with_http_info(resource_type, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['resource_type', 'id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_description_entities3" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `list_description_entities3`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_description_entities3`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_description_entities3`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminDescriptionEntityList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_description_entities4(self, id_org, resource_type, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities4(id_org, resource_type, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_description_entities4_with_http_info(id_org, resource_type, id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_description_entities4_with_http_info(id_org, resource_type, id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_description_entities4_with_http_info(self, id_org, resource_type, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_description_entities4_with_http_info(id_org, resource_type, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Identifies the specific type of resource that this identifier corresponds to. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminDescriptionEntityList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'resource_type', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_description_entities4" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_description_entities4`") # noqa: E501 + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `list_description_entities4`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_description_entities4`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_description_entities4`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminDescriptionEntityList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_executions(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_executions(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_executions_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_executions_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_executions_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_executions_with_http_info(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_executions" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_executions`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_executions`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/executions/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_executions2(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_executions2(id_org, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_executions2_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_executions2_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_executions2_with_http_info(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_executions2_with_http_info(id_org, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_executions2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_executions2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_executions2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_executions2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/executions/org/{id.org}/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_launch_plan_ids(self, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plan_ids(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_launch_plan_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_launch_plan_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + return data + + def list_launch_plan_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plan_ids_with_http_info(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_launch_plan_ids" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_launch_plan_ids`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_launch_plan_ids`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/launch_plan_ids/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityIdentifierList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_launch_plan_ids2(self, org, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plan_ids2(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_launch_plan_ids2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_launch_plan_ids2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + return data + + def list_launch_plan_ids2_with_http_info(self, org, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plan_ids2_with_http_info(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_launch_plan_ids2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `list_launch_plan_ids2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_launch_plan_ids2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_launch_plan_ids2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/launch_plan_ids/org/{org}/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityIdentifierList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_launch_plans(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans(id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_launch_plans_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_launch_plans_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_launch_plans_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_launch_plans" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_launch_plans`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_launch_plans`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_launch_plans`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminLaunchPlanList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_launch_plans2(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans2(id_org, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_launch_plans2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_launch_plans2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_launch_plans2_with_http_info(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans2_with_http_info(id_org, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_launch_plans2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_launch_plans2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_launch_plans2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_launch_plans2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_launch_plans2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminLaunchPlanList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_launch_plans3(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans3(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_launch_plans3_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_launch_plans3_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_launch_plans3_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans3_with_http_info(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_launch_plans3" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_launch_plans3`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_launch_plans3`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/launch_plans/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminLaunchPlanList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_launch_plans4(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans4(id_org, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_launch_plans4_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_launch_plans4_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_launch_plans4_with_http_info(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_launch_plans4_with_http_info(id_org, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminLaunchPlanList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_launch_plans4" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_launch_plans4`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_launch_plans4`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_launch_plans4`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminLaunchPlanList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_matchable_attributes(self, **kwargs): # noqa: E501 + """Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_matchable_attributes(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org filter applied to list project requests. + :return: AdminListMatchableAttributesResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_matchable_attributes_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.list_matchable_attributes_with_http_info(**kwargs) # noqa: E501 + return data + + def list_matchable_attributes_with_http_info(self, **kwargs): # noqa: E501 + """Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_matchable_attributes_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :param str org: Optional, org filter applied to list project requests. + :return: AdminListMatchableAttributesResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['resource_type', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_matchable_attributes" % key + ) + params[key] = val + del params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 header_params = {} @@ -4120,7 +8663,3069 @@ def list_matchable_attributes_with_http_info(self, **kwargs): # noqa: E501 body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminListMatchableAttributesResponse', # noqa: E501 + response_type='AdminListMatchableAttributesResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_matchable_attributes2(self, org, **kwargs): # noqa: E501 + """Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_matchable_attributes2(org, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org filter applied to list project requests. (required) + :param str resource_type: +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminListMatchableAttributesResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_matchable_attributes2_with_http_info(org, **kwargs) # noqa: E501 + else: + (data) = self.list_matchable_attributes2_with_http_info(org, **kwargs) # noqa: E501 + return data + + def list_matchable_attributes2_with_http_info(self, org, **kwargs): # noqa: E501 + """Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_matchable_attributes2_with_http_info(org, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org filter applied to list project requests. (required) + :param str resource_type: +required. - TASK_RESOURCE: Applies to customizable task resource requests and limits. - CLUSTER_RESOURCE: Applies to configuring templated kubernetes cluster resources. - EXECUTION_QUEUE: Configures task and dynamic task execution queue assignment. - EXECUTION_CLUSTER_LABEL: Configures the K8s cluster label to be used for execution to be run - QUALITY_OF_SERVICE_SPECIFICATION: Configures default quality of service when undefined in an execution spec. - PLUGIN_OVERRIDE: Selects configurable plugin implementation behavior for a given task type. - WORKFLOW_EXECUTION_CONFIG: Adds defaults for customizable workflow-execution specifications and overrides. - CLUSTER_ASSIGNMENT: Controls how to select an available cluster on which this execution should run. + :return: AdminListMatchableAttributesResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'resource_type'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_matchable_attributes2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `list_matchable_attributes2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + + query_params = [] + if 'resource_type' in params: + query_params.append(('resource_type', params['resource_type'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/matchable_attributes/org/{org}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminListMatchableAttributesResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_named_entities(self, resource_type, project, domain, **kwargs): # noqa: E501 + """Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_named_entities(resource_type, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. (required) + :param int limit: Indicates the number of resources to be returned. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_named_entities_with_http_info(resource_type, project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_named_entities_with_http_info(resource_type, project, domain, **kwargs) # noqa: E501 + return data + + def list_named_entities_with_http_info(self, resource_type, project, domain, **kwargs): # noqa: E501 + """Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_named_entities_with_http_info(resource_type, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str resource_type: Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. (required) + :param int limit: Indicates the number of resources to be returned. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['resource_type', 'project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_named_entities" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `list_named_entities`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_named_entities`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_named_entities`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/named_entities/{resource_type}/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_named_entities2(self, org, resource_type, project, domain, **kwargs): # noqa: E501 + """Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_named_entities2(org, resource_type, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str resource_type: Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. (required) + :param int limit: Indicates the number of resources to be returned. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_named_entities2_with_http_info(org, resource_type, project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_named_entities2_with_http_info(org, resource_type, project, domain, **kwargs) # noqa: E501 + return data + + def list_named_entities2_with_http_info(self, org, resource_type, project, domain, **kwargs): # noqa: E501 + """Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_named_entities2_with_http_info(org, resource_type, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str resource_type: Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. (required) + :param int limit: Indicates the number of resources to be returned. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'resource_type', 'project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_named_entities2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `list_named_entities2`") # noqa: E501 + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `list_named_entities2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_named_entities2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_named_entities2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_node_executions(self, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str workflow_execution_id_project: Name of the project the resource belongs to. (required) + :param str workflow_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str workflow_execution_id_name: User or system provided value for the resource. (required) + :param str workflow_execution_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str unique_parent_id: Unique identifier of the parent node in the execution +optional. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_node_executions_with_http_info(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_node_executions_with_http_info(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs) # noqa: E501 + return data + + def list_node_executions_with_http_info(self, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions_with_http_info(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str workflow_execution_id_project: Name of the project the resource belongs to. (required) + :param str workflow_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str workflow_execution_id_name: User or system provided value for the resource. (required) + :param str workflow_execution_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str unique_parent_id: Unique identifier of the parent node in the execution +optional. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['workflow_execution_id_project', 'workflow_execution_id_domain', 'workflow_execution_id_name', 'workflow_execution_id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction', 'unique_parent_id'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_node_executions" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'workflow_execution_id_project' is set + if ('workflow_execution_id_project' not in params or + params['workflow_execution_id_project'] is None): + raise ValueError("Missing the required parameter `workflow_execution_id_project` when calling `list_node_executions`") # noqa: E501 + # verify the required parameter 'workflow_execution_id_domain' is set + if ('workflow_execution_id_domain' not in params or + params['workflow_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `workflow_execution_id_domain` when calling `list_node_executions`") # noqa: E501 + # verify the required parameter 'workflow_execution_id_name' is set + if ('workflow_execution_id_name' not in params or + params['workflow_execution_id_name'] is None): + raise ValueError("Missing the required parameter `workflow_execution_id_name` when calling `list_node_executions`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'workflow_execution_id_project' in params: + path_params['workflow_execution_id.project'] = params['workflow_execution_id_project'] # noqa: E501 + if 'workflow_execution_id_domain' in params: + path_params['workflow_execution_id.domain'] = params['workflow_execution_id_domain'] # noqa: E501 + if 'workflow_execution_id_name' in params: + path_params['workflow_execution_id.name'] = params['workflow_execution_id_name'] # noqa: E501 + + query_params = [] + if 'workflow_execution_id_org' in params: + query_params.append(('workflow_execution_id.org', params['workflow_execution_id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'unique_parent_id' in params: + query_params.append(('unique_parent_id', params['unique_parent_id'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNodeExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_node_executions2(self, workflow_execution_id_org, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions2(workflow_execution_id_org, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str workflow_execution_id_org: Optional, org key applied to the resource. (required) + :param str workflow_execution_id_project: Name of the project the resource belongs to. (required) + :param str workflow_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str workflow_execution_id_name: User or system provided value for the resource. (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str unique_parent_id: Unique identifier of the parent node in the execution +optional. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_node_executions2_with_http_info(workflow_execution_id_org, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_node_executions2_with_http_info(workflow_execution_id_org, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs) # noqa: E501 + return data + + def list_node_executions2_with_http_info(self, workflow_execution_id_org, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions2_with_http_info(workflow_execution_id_org, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str workflow_execution_id_org: Optional, org key applied to the resource. (required) + :param str workflow_execution_id_project: Name of the project the resource belongs to. (required) + :param str workflow_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str workflow_execution_id_name: User or system provided value for the resource. (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str unique_parent_id: Unique identifier of the parent node in the execution +optional. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['workflow_execution_id_org', 'workflow_execution_id_project', 'workflow_execution_id_domain', 'workflow_execution_id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction', 'unique_parent_id'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_node_executions2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'workflow_execution_id_org' is set + if ('workflow_execution_id_org' not in params or + params['workflow_execution_id_org'] is None): + raise ValueError("Missing the required parameter `workflow_execution_id_org` when calling `list_node_executions2`") # noqa: E501 + # verify the required parameter 'workflow_execution_id_project' is set + if ('workflow_execution_id_project' not in params or + params['workflow_execution_id_project'] is None): + raise ValueError("Missing the required parameter `workflow_execution_id_project` when calling `list_node_executions2`") # noqa: E501 + # verify the required parameter 'workflow_execution_id_domain' is set + if ('workflow_execution_id_domain' not in params or + params['workflow_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `workflow_execution_id_domain` when calling `list_node_executions2`") # noqa: E501 + # verify the required parameter 'workflow_execution_id_name' is set + if ('workflow_execution_id_name' not in params or + params['workflow_execution_id_name'] is None): + raise ValueError("Missing the required parameter `workflow_execution_id_name` when calling `list_node_executions2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'workflow_execution_id_org' in params: + path_params['workflow_execution_id.org'] = params['workflow_execution_id_org'] # noqa: E501 + if 'workflow_execution_id_project' in params: + path_params['workflow_execution_id.project'] = params['workflow_execution_id_project'] # noqa: E501 + if 'workflow_execution_id_domain' in params: + path_params['workflow_execution_id.domain'] = params['workflow_execution_id_domain'] # noqa: E501 + if 'workflow_execution_id_name' in params: + path_params['workflow_execution_id.name'] = params['workflow_execution_id_name'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'unique_parent_id' in params: + query_params.append(('unique_parent_id', params['unique_parent_id'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNodeExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_node_executions_for_task(self, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions_for_task(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_execution_id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str task_execution_id_node_execution_id_node_id: (required) + :param str task_execution_id_task_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_task_id_name: User provided value for the resource. (required) + :param str task_execution_id_task_id_version: Specific version of the resource. (required) + :param int task_execution_id_retry_attempt: (required) + :param str task_execution_id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str task_execution_id_task_id_org: Optional, org key applied to the resource. + :param str task_execution_id_node_execution_id_execution_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_node_executions_for_task_with_http_info(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs) # noqa: E501 + else: + (data) = self.list_node_executions_for_task_with_http_info(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs) # noqa: E501 + return data + + def list_node_executions_for_task_with_http_info(self, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions_for_task_with_http_info(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_execution_id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str task_execution_id_node_execution_id_node_id: (required) + :param str task_execution_id_task_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_task_id_name: User provided value for the resource. (required) + :param str task_execution_id_task_id_version: Specific version of the resource. (required) + :param int task_execution_id_retry_attempt: (required) + :param str task_execution_id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str task_execution_id_task_id_org: Optional, org key applied to the resource. + :param str task_execution_id_node_execution_id_execution_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['task_execution_id_node_execution_id_execution_id_project', 'task_execution_id_node_execution_id_execution_id_domain', 'task_execution_id_node_execution_id_execution_id_name', 'task_execution_id_node_execution_id_node_id', 'task_execution_id_task_id_project', 'task_execution_id_task_id_domain', 'task_execution_id_task_id_name', 'task_execution_id_task_id_version', 'task_execution_id_retry_attempt', 'task_execution_id_task_id_resource_type', 'task_execution_id_task_id_org', 'task_execution_id_node_execution_id_execution_id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_node_executions_for_task" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'task_execution_id_node_execution_id_execution_id_project' is set + if ('task_execution_id_node_execution_id_execution_id_project' not in params or + params['task_execution_id_node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_project` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_node_execution_id_execution_id_domain' is set + if ('task_execution_id_node_execution_id_execution_id_domain' not in params or + params['task_execution_id_node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_domain` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_node_execution_id_execution_id_name' is set + if ('task_execution_id_node_execution_id_execution_id_name' not in params or + params['task_execution_id_node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_name` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_node_execution_id_node_id' is set + if ('task_execution_id_node_execution_id_node_id' not in params or + params['task_execution_id_node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_node_id` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_project' is set + if ('task_execution_id_task_id_project' not in params or + params['task_execution_id_task_id_project'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_project` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_domain' is set + if ('task_execution_id_task_id_domain' not in params or + params['task_execution_id_task_id_domain'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_domain` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_name' is set + if ('task_execution_id_task_id_name' not in params or + params['task_execution_id_task_id_name'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_name` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_version' is set + if ('task_execution_id_task_id_version' not in params or + params['task_execution_id_task_id_version'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_version` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'task_execution_id_retry_attempt' is set + if ('task_execution_id_retry_attempt' not in params or + params['task_execution_id_retry_attempt'] is None): + raise ValueError("Missing the required parameter `task_execution_id_retry_attempt` when calling `list_node_executions_for_task`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'task_execution_id_node_execution_id_execution_id_project' in params: + path_params['task_execution_id.node_execution_id.execution_id.project'] = params['task_execution_id_node_execution_id_execution_id_project'] # noqa: E501 + if 'task_execution_id_node_execution_id_execution_id_domain' in params: + path_params['task_execution_id.node_execution_id.execution_id.domain'] = params['task_execution_id_node_execution_id_execution_id_domain'] # noqa: E501 + if 'task_execution_id_node_execution_id_execution_id_name' in params: + path_params['task_execution_id.node_execution_id.execution_id.name'] = params['task_execution_id_node_execution_id_execution_id_name'] # noqa: E501 + if 'task_execution_id_node_execution_id_node_id' in params: + path_params['task_execution_id.node_execution_id.node_id'] = params['task_execution_id_node_execution_id_node_id'] # noqa: E501 + if 'task_execution_id_task_id_project' in params: + path_params['task_execution_id.task_id.project'] = params['task_execution_id_task_id_project'] # noqa: E501 + if 'task_execution_id_task_id_domain' in params: + path_params['task_execution_id.task_id.domain'] = params['task_execution_id_task_id_domain'] # noqa: E501 + if 'task_execution_id_task_id_name' in params: + path_params['task_execution_id.task_id.name'] = params['task_execution_id_task_id_name'] # noqa: E501 + if 'task_execution_id_task_id_version' in params: + path_params['task_execution_id.task_id.version'] = params['task_execution_id_task_id_version'] # noqa: E501 + if 'task_execution_id_retry_attempt' in params: + path_params['task_execution_id.retry_attempt'] = params['task_execution_id_retry_attempt'] # noqa: E501 + + query_params = [] + if 'task_execution_id_task_id_resource_type' in params: + query_params.append(('task_execution_id.task_id.resource_type', params['task_execution_id_task_id_resource_type'])) # noqa: E501 + if 'task_execution_id_task_id_org' in params: + query_params.append(('task_execution_id.task_id.org', params['task_execution_id_task_id_org'])) # noqa: E501 + if 'task_execution_id_node_execution_id_execution_id_org' in params: + query_params.append(('task_execution_id.node_execution_id.execution_id.org', params['task_execution_id_node_execution_id_execution_id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNodeExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_node_executions_for_task2(self, task_execution_id_node_execution_id_execution_id_org, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions_for_task2(task_execution_id_node_execution_id_execution_id_org, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_execution_id_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str task_execution_id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str task_execution_id_node_execution_id_node_id: (required) + :param str task_execution_id_task_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_task_id_name: User provided value for the resource. (required) + :param str task_execution_id_task_id_version: Specific version of the resource. (required) + :param int task_execution_id_retry_attempt: (required) + :param str task_execution_id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str task_execution_id_task_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_node_executions_for_task2_with_http_info(task_execution_id_node_execution_id_execution_id_org, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs) # noqa: E501 + else: + (data) = self.list_node_executions_for_task2_with_http_info(task_execution_id_node_execution_id_execution_id_org, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs) # noqa: E501 + return data + + def list_node_executions_for_task2_with_http_info(self, task_execution_id_node_execution_id_execution_id_org, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_node_executions_for_task2_with_http_info(task_execution_id_node_execution_id_execution_id_org, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str task_execution_id_node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str task_execution_id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str task_execution_id_node_execution_id_node_id: (required) + :param str task_execution_id_task_id_project: Name of the project the resource belongs to. (required) + :param str task_execution_id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str task_execution_id_task_id_name: User provided value for the resource. (required) + :param str task_execution_id_task_id_version: Specific version of the resource. (required) + :param int task_execution_id_retry_attempt: (required) + :param str task_execution_id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects + :param str task_execution_id_task_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminNodeExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['task_execution_id_node_execution_id_execution_id_org', 'task_execution_id_node_execution_id_execution_id_project', 'task_execution_id_node_execution_id_execution_id_domain', 'task_execution_id_node_execution_id_execution_id_name', 'task_execution_id_node_execution_id_node_id', 'task_execution_id_task_id_project', 'task_execution_id_task_id_domain', 'task_execution_id_task_id_name', 'task_execution_id_task_id_version', 'task_execution_id_retry_attempt', 'task_execution_id_task_id_resource_type', 'task_execution_id_task_id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_node_executions_for_task2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'task_execution_id_node_execution_id_execution_id_org' is set + if ('task_execution_id_node_execution_id_execution_id_org' not in params or + params['task_execution_id_node_execution_id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_org` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_node_execution_id_execution_id_project' is set + if ('task_execution_id_node_execution_id_execution_id_project' not in params or + params['task_execution_id_node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_project` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_node_execution_id_execution_id_domain' is set + if ('task_execution_id_node_execution_id_execution_id_domain' not in params or + params['task_execution_id_node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_domain` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_node_execution_id_execution_id_name' is set + if ('task_execution_id_node_execution_id_execution_id_name' not in params or + params['task_execution_id_node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_name` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_node_execution_id_node_id' is set + if ('task_execution_id_node_execution_id_node_id' not in params or + params['task_execution_id_node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_node_id` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_project' is set + if ('task_execution_id_task_id_project' not in params or + params['task_execution_id_task_id_project'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_project` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_domain' is set + if ('task_execution_id_task_id_domain' not in params or + params['task_execution_id_task_id_domain'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_domain` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_name' is set + if ('task_execution_id_task_id_name' not in params or + params['task_execution_id_task_id_name'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_name` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_task_id_version' is set + if ('task_execution_id_task_id_version' not in params or + params['task_execution_id_task_id_version'] is None): + raise ValueError("Missing the required parameter `task_execution_id_task_id_version` when calling `list_node_executions_for_task2`") # noqa: E501 + # verify the required parameter 'task_execution_id_retry_attempt' is set + if ('task_execution_id_retry_attempt' not in params or + params['task_execution_id_retry_attempt'] is None): + raise ValueError("Missing the required parameter `task_execution_id_retry_attempt` when calling `list_node_executions_for_task2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'task_execution_id_node_execution_id_execution_id_org' in params: + path_params['task_execution_id.node_execution_id.execution_id.org'] = params['task_execution_id_node_execution_id_execution_id_org'] # noqa: E501 + if 'task_execution_id_node_execution_id_execution_id_project' in params: + path_params['task_execution_id.node_execution_id.execution_id.project'] = params['task_execution_id_node_execution_id_execution_id_project'] # noqa: E501 + if 'task_execution_id_node_execution_id_execution_id_domain' in params: + path_params['task_execution_id.node_execution_id.execution_id.domain'] = params['task_execution_id_node_execution_id_execution_id_domain'] # noqa: E501 + if 'task_execution_id_node_execution_id_execution_id_name' in params: + path_params['task_execution_id.node_execution_id.execution_id.name'] = params['task_execution_id_node_execution_id_execution_id_name'] # noqa: E501 + if 'task_execution_id_node_execution_id_node_id' in params: + path_params['task_execution_id.node_execution_id.node_id'] = params['task_execution_id_node_execution_id_node_id'] # noqa: E501 + if 'task_execution_id_task_id_project' in params: + path_params['task_execution_id.task_id.project'] = params['task_execution_id_task_id_project'] # noqa: E501 + if 'task_execution_id_task_id_domain' in params: + path_params['task_execution_id.task_id.domain'] = params['task_execution_id_task_id_domain'] # noqa: E501 + if 'task_execution_id_task_id_name' in params: + path_params['task_execution_id.task_id.name'] = params['task_execution_id_task_id_name'] # noqa: E501 + if 'task_execution_id_task_id_version' in params: + path_params['task_execution_id.task_id.version'] = params['task_execution_id_task_id_version'] # noqa: E501 + if 'task_execution_id_retry_attempt' in params: + path_params['task_execution_id.retry_attempt'] = params['task_execution_id_retry_attempt'] # noqa: E501 + + query_params = [] + if 'task_execution_id_task_id_resource_type' in params: + query_params.append(('task_execution_id.task_id.resource_type', params['task_execution_id_task_id_resource_type'])) # noqa: E501 + if 'task_execution_id_task_id_org' in params: + query_params.append(('task_execution_id.task_id.org', params['task_execution_id_task_id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNodeExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_projects(self, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.Project` # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_projects(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param int limit: Indicates the number of projects to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str org: Optional, org filter applied to list project requests. + :return: AdminProjects + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_projects_with_http_info(**kwargs) # noqa: E501 + else: + (data) = self.list_projects_with_http_info(**kwargs) # noqa: E501 + return data + + def list_projects_with_http_info(self, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.Project` # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_projects_with_http_info(async_req=True) + >>> result = thread.get() + + :param async_req bool + :param int limit: Indicates the number of projects to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str org: Optional, org filter applied to list project requests. + :return: AdminProjects + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_projects" % key + ) + params[key] = val + del params['kwargs'] + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/projects', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminProjects', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_projects2(self, org, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.Project` # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_projects2(org, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org filter applied to list project requests. (required) + :param int limit: Indicates the number of projects to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminProjects + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_projects2_with_http_info(org, **kwargs) # noqa: E501 + else: + (data) = self.list_projects2_with_http_info(org, **kwargs) # noqa: E501 + return data + + def list_projects2_with_http_info(self, org, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.Project` # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_projects2_with_http_info(org, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org filter applied to list project requests. (required) + :param int limit: Indicates the number of projects to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminProjects + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_projects2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `list_projects2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/projects/org/{org}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminProjects', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_task_executions(self, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_executions(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str node_execution_id_node_id: (required) + :param str node_execution_id_execution_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_task_executions_with_http_info(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs) # noqa: E501 + else: + (data) = self.list_task_executions_with_http_info(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs) # noqa: E501 + return data + + def list_task_executions_with_http_info(self, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_executions_with_http_info(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str node_execution_id_node_id: (required) + :param str node_execution_id_execution_id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['node_execution_id_execution_id_project', 'node_execution_id_execution_id_domain', 'node_execution_id_execution_id_name', 'node_execution_id_node_id', 'node_execution_id_execution_id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_task_executions" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'node_execution_id_execution_id_project' is set + if ('node_execution_id_execution_id_project' not in params or + params['node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `node_execution_id_execution_id_project` when calling `list_task_executions`") # noqa: E501 + # verify the required parameter 'node_execution_id_execution_id_domain' is set + if ('node_execution_id_execution_id_domain' not in params or + params['node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `node_execution_id_execution_id_domain` when calling `list_task_executions`") # noqa: E501 + # verify the required parameter 'node_execution_id_execution_id_name' is set + if ('node_execution_id_execution_id_name' not in params or + params['node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `node_execution_id_execution_id_name` when calling `list_task_executions`") # noqa: E501 + # verify the required parameter 'node_execution_id_node_id' is set + if ('node_execution_id_node_id' not in params or + params['node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `node_execution_id_node_id` when calling `list_task_executions`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'node_execution_id_execution_id_project' in params: + path_params['node_execution_id.execution_id.project'] = params['node_execution_id_execution_id_project'] # noqa: E501 + if 'node_execution_id_execution_id_domain' in params: + path_params['node_execution_id.execution_id.domain'] = params['node_execution_id_execution_id_domain'] # noqa: E501 + if 'node_execution_id_execution_id_name' in params: + path_params['node_execution_id.execution_id.name'] = params['node_execution_id_execution_id_name'] # noqa: E501 + if 'node_execution_id_node_id' in params: + path_params['node_execution_id.node_id'] = params['node_execution_id_node_id'] # noqa: E501 + + query_params = [] + if 'node_execution_id_execution_id_org' in params: + query_params.append(('node_execution_id.execution_id.org', params['node_execution_id_execution_id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_task_executions2(self, node_execution_id_execution_id_org, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_executions2(node_execution_id_execution_id_org, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str node_execution_id_node_id: (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskExecutionList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_task_executions2_with_http_info(node_execution_id_execution_id_org, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs) # noqa: E501 + else: + (data) = self.list_task_executions2_with_http_info(node_execution_id_execution_id_org, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs) # noqa: E501 + return data + + def list_task_executions2_with_http_info(self, node_execution_id_execution_id_org, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs): # noqa: E501 + """Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_executions2_with_http_info(node_execution_id_execution_id_org, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str node_execution_id_execution_id_org: Optional, org key applied to the resource. (required) + :param str node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) + :param str node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str node_execution_id_execution_id_name: User or system provided value for the resource. (required) + :param str node_execution_id_node_id: (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskExecutionList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['node_execution_id_execution_id_org', 'node_execution_id_execution_id_project', 'node_execution_id_execution_id_domain', 'node_execution_id_execution_id_name', 'node_execution_id_node_id', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_task_executions2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'node_execution_id_execution_id_org' is set + if ('node_execution_id_execution_id_org' not in params or + params['node_execution_id_execution_id_org'] is None): + raise ValueError("Missing the required parameter `node_execution_id_execution_id_org` when calling `list_task_executions2`") # noqa: E501 + # verify the required parameter 'node_execution_id_execution_id_project' is set + if ('node_execution_id_execution_id_project' not in params or + params['node_execution_id_execution_id_project'] is None): + raise ValueError("Missing the required parameter `node_execution_id_execution_id_project` when calling `list_task_executions2`") # noqa: E501 + # verify the required parameter 'node_execution_id_execution_id_domain' is set + if ('node_execution_id_execution_id_domain' not in params or + params['node_execution_id_execution_id_domain'] is None): + raise ValueError("Missing the required parameter `node_execution_id_execution_id_domain` when calling `list_task_executions2`") # noqa: E501 + # verify the required parameter 'node_execution_id_execution_id_name' is set + if ('node_execution_id_execution_id_name' not in params or + params['node_execution_id_execution_id_name'] is None): + raise ValueError("Missing the required parameter `node_execution_id_execution_id_name` when calling `list_task_executions2`") # noqa: E501 + # verify the required parameter 'node_execution_id_node_id' is set + if ('node_execution_id_node_id' not in params or + params['node_execution_id_node_id'] is None): + raise ValueError("Missing the required parameter `node_execution_id_node_id` when calling `list_task_executions2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'node_execution_id_execution_id_org' in params: + path_params['node_execution_id.execution_id.org'] = params['node_execution_id_execution_id_org'] # noqa: E501 + if 'node_execution_id_execution_id_project' in params: + path_params['node_execution_id.execution_id.project'] = params['node_execution_id_execution_id_project'] # noqa: E501 + if 'node_execution_id_execution_id_domain' in params: + path_params['node_execution_id.execution_id.domain'] = params['node_execution_id_execution_id_domain'] # noqa: E501 + if 'node_execution_id_execution_id_name' in params: + path_params['node_execution_id.execution_id.name'] = params['node_execution_id_execution_id_name'] # noqa: E501 + if 'node_execution_id_node_id' in params: + path_params['node_execution_id.node_id'] = params['node_execution_id_node_id'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskExecutionList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_task_ids(self, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_ids(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_task_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_task_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + return data + + def list_task_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_ids_with_http_info(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_task_ids" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_task_ids`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_task_ids`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/task_ids/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityIdentifierList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_task_ids2(self, org, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_ids2(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_task_ids2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_task_ids2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + return data + + def list_task_ids2_with_http_info(self, org, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_task_ids2_with_http_info(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_task_ids2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `list_task_ids2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_task_ids2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_task_ids2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/tasks/org/{org}/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityIdentifierList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_tasks(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks(id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_tasks_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_tasks_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_tasks_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_tasks" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_tasks`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_tasks`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_tasks`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/tasks/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_tasks2(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks2(id_org, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_tasks2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_tasks2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_tasks2_with_http_info(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks2_with_http_info(id_org, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_tasks2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_tasks2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_tasks2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_tasks2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_tasks2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_tasks3(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks3(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_tasks3_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_tasks3_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_tasks3_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks3_with_http_info(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_tasks3" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_tasks3`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_tasks3`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/tasks/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_tasks4(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks4(id_org, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_tasks4_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_tasks4_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_tasks4_with_http_info(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_tasks4_with_http_info(id_org, id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminTaskList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_tasks4" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_tasks4`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_tasks4`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_tasks4`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminTaskList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_workflow_ids(self, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflow_ids(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_workflow_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_workflow_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + return data + + def list_workflow_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflow_ids_with_http_info(project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :param str org: Optional, org key applied to the resource. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters', 'org'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_workflow_ids" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_workflow_ids`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_workflow_ids`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'org' in params: + query_params.append(('org', params['org'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflow_ids/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityIdentifierList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_workflow_ids2(self, org, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflow_ids2(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_workflow_ids2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + else: + (data) = self.list_workflow_ids2_with_http_info(org, project, domain, **kwargs) # noqa: E501 + return data + + def list_workflow_ids2_with_http_info(self, org, project, domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflow_ids2_with_http_info(org, project, domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str org: Optional, org key applied to the resource. (required) + :param str project: Name of the project that contains the identifiers. +required (required) + :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :param str filters: Indicates a list of filters passed as string. +optional. + :return: AdminNamedEntityIdentifierList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['org', 'project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_workflow_ids2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `list_workflow_ids2`") # noqa: E501 + # verify the required parameter 'project' is set + if ('project' not in params or + params['project'] is None): + raise ValueError("Missing the required parameter `project` when calling `list_workflow_ids2`") # noqa: E501 + # verify the required parameter 'domain' is set + if ('domain' not in params or + params['domain'] is None): + raise ValueError("Missing the required parameter `domain` when calling `list_workflow_ids2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'project' in params: + path_params['project'] = params['project'] # noqa: E501 + if 'domain' in params: + path_params['domain'] = params['domain'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflows/org/{org}/{project}/{domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminNamedEntityIdentifierList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_workflows(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflows(id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminWorkflowList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_workflows_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_workflows_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_workflows_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflows_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminWorkflowList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_workflows" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_workflows`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_workflows`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_workflows`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflows/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminWorkflowList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_workflows2(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflows2(id_org, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminWorkflowList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_workflows2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 + else: + (data) = self.list_workflows2_with_http_info(id_org, id_project, id_domain, id_name, **kwargs) # noqa: E501 + return data + + def list_workflows2_with_http_info(self, id_org, id_project, id_domain, id_name, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflows2_with_http_info(id_org, id_project, id_domain, id_name, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminWorkflowList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_workflows2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_workflows2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_workflows2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_workflows2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `list_workflows2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + + query_params = [] + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminWorkflowList', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def list_workflows3(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflows3(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminWorkflowList + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.list_workflows3_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + else: + (data) = self.list_workflows3_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return data + + def list_workflows3_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.list_workflows3_with_http_info(id_project, id_domain, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param str id_org: Optional, org key applied to the resource. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. + :param str sort_by_key: Indicates an attribute to sort the response values. +required. + :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. + :return: AdminWorkflowList + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_project', 'id_domain', 'id_name', 'id_org', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method list_workflows3" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_workflows3`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_workflows3`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + + query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 + if 'id_org' in params: + query_params.append(('id.org', params['id_org'])) # noqa: E501 + if 'limit' in params: + query_params.append(('limit', params['limit'])) # noqa: E501 + if 'token' in params: + query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 + if 'sort_by_key' in params: + query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 + if 'sort_by_direction' in params: + query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/workflows/{id.project}/{id.domain}', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminWorkflowList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4128,57 +11733,59 @@ def list_matchable_attributes_with_http_info(self, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_named_entities(self, resource_type, project, domain, **kwargs): # noqa: E501 - """Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. # noqa: E501 + def list_workflows4(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_named_entities(resource_type, project, domain, async_req=True) + >>> thread = api.list_workflows4(id_org, id_project, id_domain, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required (required) - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. (required) - :param int limit: Indicates the number of resources to be returned. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. :param str sort_by_key: Indicates an attribute to sort the response values. +required. :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityList + :return: AdminWorkflowList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_named_entities_with_http_info(resource_type, project, domain, **kwargs) # noqa: E501 + return self.list_workflows4_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 else: - (data) = self.list_named_entities_with_http_info(resource_type, project, domain, **kwargs) # noqa: E501 + (data) = self.list_workflows4_with_http_info(id_org, id_project, id_domain, **kwargs) # noqa: E501 return data - def list_named_entities_with_http_info(self, resource_type, project, domain, **kwargs): # noqa: E501 - """Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. # noqa: E501 + def list_workflows4_with_http_info(self, id_org, id_project, id_domain, **kwargs): # noqa: E501 + """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_named_entities_with_http_info(resource_type, project, domain, async_req=True) + >>> thread = api.list_workflows4_with_http_info(id_org, id_project, id_domain, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Resource type of the metadata to query. One of Task, Workflow or LaunchPlan. +required (required) - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. (required) - :param int limit: Indicates the number of resources to be returned. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. + :param int limit: Indicates the number of resources to be returned. +required. + :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. + :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. :param str sort_by_key: Indicates an attribute to sort the response values. +required. :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityList + :return: AdminWorkflowList If the method is called asynchronously, returns the request thread. """ - all_params = ['resource_type', 'project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -4189,44 +11796,46 @@ def list_named_entities_with_http_info(self, resource_type, project, domain, **k if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_named_entities" % key + " to method list_workflows4" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'resource_type' is set - if ('resource_type' not in params or - params['resource_type'] is None): - raise ValueError("Missing the required parameter `resource_type` when calling `list_named_entities`") # noqa: E501 - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `list_named_entities`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `list_named_entities`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `list_workflows4`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `list_workflows4`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `list_workflows4`") # noqa: E501 collection_formats = {} path_params = {} - if 'resource_type' in params: - path_params['resource_type'] = params['resource_type'] # noqa: E501 - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 query_params = [] + if 'id_name' in params: + query_params.append(('id.name', params['id_name'])) # noqa: E501 if 'limit' in params: query_params.append(('limit', params['limit'])) # noqa: E501 if 'token' in params: query_params.append(('token', params['token'])) # noqa: E501 + if 'filters' in params: + query_params.append(('filters', params['filters'])) # noqa: E501 if 'sort_by_key' in params: query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 if 'sort_by_direction' in params: query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 header_params = {} @@ -4246,14 +11855,14 @@ def list_named_entities_with_http_info(self, resource_type, project, domain, **k auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/named_entities/{resource_type}/{project}/{domain}', 'GET', + '/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNamedEntityList', # noqa: E501 + response_type='AdminWorkflowList', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4261,59 +11870,43 @@ def list_named_entities_with_http_info(self, resource_type, project, domain, **k _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_node_executions(self, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + def recover_execution(self, body, **kwargs): # noqa: E501 + """Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_node_executions(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, async_req=True) + >>> thread = api.recover_execution(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str workflow_execution_id_project: Name of the project the resource belongs to. (required) - :param str workflow_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str workflow_execution_id_name: User or system provided value for the resource. (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str unique_parent_id: Unique identifier of the parent node in the execution +optional. - :return: AdminNodeExecutionList + :param AdminExecutionRecoverRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_node_executions_with_http_info(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs) # noqa: E501 + return self.recover_execution_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.list_node_executions_with_http_info(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs) # noqa: E501 + (data) = self.recover_execution_with_http_info(body, **kwargs) # noqa: E501 return data - def list_node_executions_with_http_info(self, workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + def recover_execution_with_http_info(self, body, **kwargs): # noqa: E501 + """Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_node_executions_with_http_info(workflow_execution_id_project, workflow_execution_id_domain, workflow_execution_id_name, async_req=True) + >>> thread = api.recover_execution_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str workflow_execution_id_project: Name of the project the resource belongs to. (required) - :param str workflow_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str workflow_execution_id_name: User or system provided value for the resource. (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str unique_parent_id: Unique identifier of the parent node in the execution +optional. - :return: AdminNodeExecutionList + :param AdminExecutionRecoverRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['workflow_execution_id_project', 'workflow_execution_id_domain', 'workflow_execution_id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction', 'unique_parent_id'] # noqa: E501 + all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -4324,46 +11917,20 @@ def list_node_executions_with_http_info(self, workflow_execution_id_project, wor if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_node_executions" % key + " to method recover_execution" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'workflow_execution_id_project' is set - if ('workflow_execution_id_project' not in params or - params['workflow_execution_id_project'] is None): - raise ValueError("Missing the required parameter `workflow_execution_id_project` when calling `list_node_executions`") # noqa: E501 - # verify the required parameter 'workflow_execution_id_domain' is set - if ('workflow_execution_id_domain' not in params or - params['workflow_execution_id_domain'] is None): - raise ValueError("Missing the required parameter `workflow_execution_id_domain` when calling `list_node_executions`") # noqa: E501 - # verify the required parameter 'workflow_execution_id_name' is set - if ('workflow_execution_id_name' not in params or - params['workflow_execution_id_name'] is None): - raise ValueError("Missing the required parameter `workflow_execution_id_name` when calling `list_node_executions`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `recover_execution`") # noqa: E501 collection_formats = {} path_params = {} - if 'workflow_execution_id_project' in params: - path_params['workflow_execution_id.project'] = params['workflow_execution_id_project'] # noqa: E501 - if 'workflow_execution_id_domain' in params: - path_params['workflow_execution_id.domain'] = params['workflow_execution_id_domain'] # noqa: E501 - if 'workflow_execution_id_name' in params: - path_params['workflow_execution_id.name'] = params['workflow_execution_id_name'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 - if 'unique_parent_id' in params: - query_params.append(('unique_parent_id', params['unique_parent_id'])) # noqa: E501 header_params = {} @@ -4371,6 +11938,113 @@ def list_node_executions_with_http_info(self, workflow_execution_id_project, wor local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/api/v1/executions/recover', 'POST', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='AdminExecutionCreateResponse', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def recover_execution2(self, id_org, body, **kwargs): # noqa: E501 + """Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.recover_execution2(id_org, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminExecutionRecoverRequest body: (required) + :return: AdminExecutionCreateResponse + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.recover_execution2_with_http_info(id_org, body, **kwargs) # noqa: E501 + else: + (data) = self.recover_execution2_with_http_info(id_org, body, **kwargs) # noqa: E501 + return data + + def recover_execution2_with_http_info(self, id_org, body, **kwargs): # noqa: E501 + """Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.recover_execution2_with_http_info(id_org, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminExecutionRecoverRequest body: (required) + :return: AdminExecutionCreateResponse + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id_org', 'body'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method recover_execution2" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `recover_execution2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `recover_execution2`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -4383,14 +12057,14 @@ def list_node_executions_with_http_info(self, workflow_execution_id_project, wor auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}', 'GET', + '/api/v1/executions/org/{id.org}/recover', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNodeExecutionList', # noqa: E501 + response_type='AdminExecutionCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4398,71 +12072,43 @@ def list_node_executions_with_http_info(self, workflow_execution_id_project, wor _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_node_executions_for_task(self, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def register_project(self, body, **kwargs): # noqa: E501 + """Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_node_executions_for_task(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, async_req=True) + >>> thread = api.register_project(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str task_execution_id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str task_execution_id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str task_execution_id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str task_execution_id_node_execution_id_node_id: (required) - :param str task_execution_id_task_id_project: Name of the project the resource belongs to. (required) - :param str task_execution_id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str task_execution_id_task_id_name: User provided value for the resource. (required) - :param str task_execution_id_task_id_version: Specific version of the resource. (required) - :param int task_execution_id_retry_attempt: (required) - :param str task_execution_id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminNodeExecutionList + :param AdminProjectRegisterRequest body: (required) + :return: AdminProjectRegisterResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_node_executions_for_task_with_http_info(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs) # noqa: E501 + return self.register_project_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.list_node_executions_for_task_with_http_info(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs) # noqa: E501 + (data) = self.register_project_with_http_info(body, **kwargs) # noqa: E501 return data - def list_node_executions_for_task_with_http_info(self, task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def register_project_with_http_info(self, body, **kwargs): # noqa: E501 + """Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_node_executions_for_task_with_http_info(task_execution_id_node_execution_id_execution_id_project, task_execution_id_node_execution_id_execution_id_domain, task_execution_id_node_execution_id_execution_id_name, task_execution_id_node_execution_id_node_id, task_execution_id_task_id_project, task_execution_id_task_id_domain, task_execution_id_task_id_name, task_execution_id_task_id_version, task_execution_id_retry_attempt, async_req=True) + >>> thread = api.register_project_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str task_execution_id_node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str task_execution_id_node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str task_execution_id_node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str task_execution_id_node_execution_id_node_id: (required) - :param str task_execution_id_task_id_project: Name of the project the resource belongs to. (required) - :param str task_execution_id_task_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str task_execution_id_task_id_name: User provided value for the resource. (required) - :param str task_execution_id_task_id_version: Specific version of the resource. (required) - :param int task_execution_id_retry_attempt: (required) - :param str task_execution_id_task_id_resource_type: Identifies the specific type of resource that this identifier corresponds to. - DATASET: A dataset represents an entity modeled in Flyte DataCatalog. A Dataset is also a versioned entity and can be a compilation of multiple individual objects. Eventually all Catalog objects should be modeled similar to Flyte Objects. The Dataset entities makes it possible for the UI and CLI to act on the objects in a similar manner to other Flyte objects - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the, server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminNodeExecutionList + :param AdminProjectRegisterRequest body: (required) + :return: AdminProjectRegisterResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['task_execution_id_node_execution_id_execution_id_project', 'task_execution_id_node_execution_id_execution_id_domain', 'task_execution_id_node_execution_id_execution_id_name', 'task_execution_id_node_execution_id_node_id', 'task_execution_id_task_id_project', 'task_execution_id_task_id_domain', 'task_execution_id_task_id_name', 'task_execution_id_task_id_version', 'task_execution_id_retry_attempt', 'task_execution_id_task_id_resource_type', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -4473,82 +12119,20 @@ def list_node_executions_for_task_with_http_info(self, task_execution_id_node_ex if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_node_executions_for_task" % key + " to method register_project" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'task_execution_id_node_execution_id_execution_id_project' is set - if ('task_execution_id_node_execution_id_execution_id_project' not in params or - params['task_execution_id_node_execution_id_execution_id_project'] is None): - raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_project` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_node_execution_id_execution_id_domain' is set - if ('task_execution_id_node_execution_id_execution_id_domain' not in params or - params['task_execution_id_node_execution_id_execution_id_domain'] is None): - raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_domain` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_node_execution_id_execution_id_name' is set - if ('task_execution_id_node_execution_id_execution_id_name' not in params or - params['task_execution_id_node_execution_id_execution_id_name'] is None): - raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_execution_id_name` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_node_execution_id_node_id' is set - if ('task_execution_id_node_execution_id_node_id' not in params or - params['task_execution_id_node_execution_id_node_id'] is None): - raise ValueError("Missing the required parameter `task_execution_id_node_execution_id_node_id` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_task_id_project' is set - if ('task_execution_id_task_id_project' not in params or - params['task_execution_id_task_id_project'] is None): - raise ValueError("Missing the required parameter `task_execution_id_task_id_project` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_task_id_domain' is set - if ('task_execution_id_task_id_domain' not in params or - params['task_execution_id_task_id_domain'] is None): - raise ValueError("Missing the required parameter `task_execution_id_task_id_domain` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_task_id_name' is set - if ('task_execution_id_task_id_name' not in params or - params['task_execution_id_task_id_name'] is None): - raise ValueError("Missing the required parameter `task_execution_id_task_id_name` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_task_id_version' is set - if ('task_execution_id_task_id_version' not in params or - params['task_execution_id_task_id_version'] is None): - raise ValueError("Missing the required parameter `task_execution_id_task_id_version` when calling `list_node_executions_for_task`") # noqa: E501 - # verify the required parameter 'task_execution_id_retry_attempt' is set - if ('task_execution_id_retry_attempt' not in params or - params['task_execution_id_retry_attempt'] is None): - raise ValueError("Missing the required parameter `task_execution_id_retry_attempt` when calling `list_node_executions_for_task`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `register_project`") # noqa: E501 collection_formats = {} path_params = {} - if 'task_execution_id_node_execution_id_execution_id_project' in params: - path_params['task_execution_id.node_execution_id.execution_id.project'] = params['task_execution_id_node_execution_id_execution_id_project'] # noqa: E501 - if 'task_execution_id_node_execution_id_execution_id_domain' in params: - path_params['task_execution_id.node_execution_id.execution_id.domain'] = params['task_execution_id_node_execution_id_execution_id_domain'] # noqa: E501 - if 'task_execution_id_node_execution_id_execution_id_name' in params: - path_params['task_execution_id.node_execution_id.execution_id.name'] = params['task_execution_id_node_execution_id_execution_id_name'] # noqa: E501 - if 'task_execution_id_node_execution_id_node_id' in params: - path_params['task_execution_id.node_execution_id.node_id'] = params['task_execution_id_node_execution_id_node_id'] # noqa: E501 - if 'task_execution_id_task_id_project' in params: - path_params['task_execution_id.task_id.project'] = params['task_execution_id_task_id_project'] # noqa: E501 - if 'task_execution_id_task_id_domain' in params: - path_params['task_execution_id.task_id.domain'] = params['task_execution_id_task_id_domain'] # noqa: E501 - if 'task_execution_id_task_id_name' in params: - path_params['task_execution_id.task_id.name'] = params['task_execution_id_task_id_name'] # noqa: E501 - if 'task_execution_id_task_id_version' in params: - path_params['task_execution_id.task_id.version'] = params['task_execution_id_task_id_version'] # noqa: E501 - if 'task_execution_id_retry_attempt' in params: - path_params['task_execution_id.retry_attempt'] = params['task_execution_id_retry_attempt'] # noqa: E501 query_params = [] - if 'task_execution_id_task_id_resource_type' in params: - query_params.append(('task_execution_id.task_id.resource_type', params['task_execution_id_task_id_resource_type'])) # noqa: E501 - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -4556,6 +12140,8 @@ def list_node_executions_for_task_with_http_info(self, task_execution_id_node_ex local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -4568,14 +12154,14 @@ def list_node_executions_for_task_with_http_info(self, task_execution_id_node_ex auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}', 'GET', + '/api/v1/projects', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNodeExecutionList', # noqa: E501 + response_type='AdminProjectRegisterResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4583,51 +12169,45 @@ def list_node_executions_for_task_with_http_info(self, task_execution_id_node_ex _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_projects(self, **kwargs): # noqa: E501 - """Fetches a list of :ref:`ref_flyteidl.admin.Project` # noqa: E501 + def register_project2(self, project_org, body, **kwargs): # noqa: E501 + """Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_projects(async_req=True) + >>> thread = api.register_project2(project_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param int limit: Indicates the number of projects to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminProjects + :param str project_org: Optional, org key applied to the resource. (required) + :param AdminProjectRegisterRequest body: (required) + :return: AdminProjectRegisterResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_projects_with_http_info(**kwargs) # noqa: E501 + return self.register_project2_with_http_info(project_org, body, **kwargs) # noqa: E501 else: - (data) = self.list_projects_with_http_info(**kwargs) # noqa: E501 + (data) = self.register_project2_with_http_info(project_org, body, **kwargs) # noqa: E501 return data - def list_projects_with_http_info(self, **kwargs): # noqa: E501 - """Fetches a list of :ref:`ref_flyteidl.admin.Project` # noqa: E501 + def register_project2_with_http_info(self, project_org, body, **kwargs): # noqa: E501 + """Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_projects_with_http_info(async_req=True) + >>> thread = api.register_project2_with_http_info(project_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param int limit: Indicates the number of projects to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminProjects + :param str project_org: Optional, org key applied to the resource. (required) + :param AdminProjectRegisterRequest body: (required) + :return: AdminProjectRegisterResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['project_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -4638,26 +12218,26 @@ def list_projects_with_http_info(self, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_projects" % key + " to method register_project2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'project_org' is set + if ('project_org' not in params or + params['project_org'] is None): + raise ValueError("Missing the required parameter `project_org` when calling `register_project2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `register_project2`") # noqa: E501 collection_formats = {} path_params = {} + if 'project_org' in params: + path_params['project.org'] = params['project_org'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -4665,6 +12245,8 @@ def list_projects_with_http_info(self, **kwargs): # noqa: E501 local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -4677,14 +12259,14 @@ def list_projects_with_http_info(self, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/projects', 'GET', + '/api/v1/projects/org/{project.org}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjects', # noqa: E501 + response_type='AdminProjectRegisterResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4692,113 +12274,67 @@ def list_projects_with_http_info(self, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_task_executions(self, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs): # noqa: E501 - """Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def relaunch_execution(self, body, **kwargs): # noqa: E501 + """Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_task_executions(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, async_req=True) + >>> thread = api.relaunch_execution(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str node_execution_id_node_id: (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminTaskExecutionList + :param AdminExecutionRelaunchRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_task_executions_with_http_info(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs) # noqa: E501 + return self.relaunch_execution_with_http_info(body, **kwargs) # noqa: E501 else: - (data) = self.list_task_executions_with_http_info(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs) # noqa: E501 + (data) = self.relaunch_execution_with_http_info(body, **kwargs) # noqa: E501 return data - def list_task_executions_with_http_info(self, node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, **kwargs): # noqa: E501 - """Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + def relaunch_execution_with_http_info(self, body, **kwargs): # noqa: E501 + """Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_task_executions_with_http_info(node_execution_id_execution_id_project, node_execution_id_execution_id_domain, node_execution_id_execution_id_name, node_execution_id_node_id, async_req=True) + >>> thread = api.relaunch_execution_with_http_info(body, async_req=True) >>> result = thread.get() :param async_req bool - :param str node_execution_id_execution_id_project: Name of the project the resource belongs to. (required) - :param str node_execution_id_execution_id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str node_execution_id_execution_id_name: User or system provided value for the resource. (required) - :param str node_execution_id_node_id: (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminTaskExecutionList + :param AdminExecutionRelaunchRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['node_execution_id_execution_id_project', 'node_execution_id_execution_id_domain', 'node_execution_id_execution_id_name', 'node_execution_id_node_id', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method list_task_executions" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'node_execution_id_execution_id_project' is set - if ('node_execution_id_execution_id_project' not in params or - params['node_execution_id_execution_id_project'] is None): - raise ValueError("Missing the required parameter `node_execution_id_execution_id_project` when calling `list_task_executions`") # noqa: E501 - # verify the required parameter 'node_execution_id_execution_id_domain' is set - if ('node_execution_id_execution_id_domain' not in params or - params['node_execution_id_execution_id_domain'] is None): - raise ValueError("Missing the required parameter `node_execution_id_execution_id_domain` when calling `list_task_executions`") # noqa: E501 - # verify the required parameter 'node_execution_id_execution_id_name' is set - if ('node_execution_id_execution_id_name' not in params or - params['node_execution_id_execution_id_name'] is None): - raise ValueError("Missing the required parameter `node_execution_id_execution_id_name` when calling `list_task_executions`") # noqa: E501 - # verify the required parameter 'node_execution_id_node_id' is set - if ('node_execution_id_node_id' not in params or - params['node_execution_id_node_id'] is None): - raise ValueError("Missing the required parameter `node_execution_id_node_id` when calling `list_task_executions`") # noqa: E501 - + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method relaunch_execution" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `relaunch_execution`") # noqa: E501 + collection_formats = {} path_params = {} - if 'node_execution_id_execution_id_project' in params: - path_params['node_execution_id.execution_id.project'] = params['node_execution_id_execution_id_project'] # noqa: E501 - if 'node_execution_id_execution_id_domain' in params: - path_params['node_execution_id.execution_id.domain'] = params['node_execution_id_execution_id_domain'] # noqa: E501 - if 'node_execution_id_execution_id_name' in params: - path_params['node_execution_id.execution_id.name'] = params['node_execution_id_execution_id_name'] # noqa: E501 - if 'node_execution_id_node_id' in params: - path_params['node_execution_id.node_id'] = params['node_execution_id_node_id'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -4806,6 +12342,8 @@ def list_task_executions_with_http_info(self, node_execution_id_execution_id_pro local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -4818,14 +12356,14 @@ def list_task_executions_with_http_info(self, node_execution_id_execution_id_pro auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}', 'GET', + '/api/v1/executions/relaunch', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminTaskExecutionList', # noqa: E501 + response_type='AdminExecutionCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4833,55 +12371,45 @@ def list_task_executions_with_http_info(self, node_execution_id_execution_id_pro _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_task_ids(self, project, domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. # noqa: E501 + def relaunch_execution2(self, id_org, body, **kwargs): # noqa: E501 + """Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_task_ids(project, domain, async_req=True) + >>> thread = api.relaunch_execution2(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityIdentifierList + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminExecutionRelaunchRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_task_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + return self.relaunch_execution2_with_http_info(id_org, body, **kwargs) # noqa: E501 else: - (data) = self.list_task_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + (data) = self.relaunch_execution2_with_http_info(id_org, body, **kwargs) # noqa: E501 return data - def list_task_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. # noqa: E501 + def relaunch_execution2_with_http_info(self, id_org, body, **kwargs): # noqa: E501 + """Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_task_ids_with_http_info(project, domain, async_req=True) + >>> thread = api.relaunch_execution2_with_http_info(id_org, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityIdentifierList + :param str id_org: Optional, org key applied to the resource. (required) + :param AdminExecutionRelaunchRequest body: (required) + :return: AdminExecutionCreateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params = ['id_org', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -4892,38 +12420,26 @@ def list_task_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_task_ids" % key + " to method relaunch_execution2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `list_task_ids`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `list_task_ids`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `relaunch_execution2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `relaunch_execution2`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 header_params = {} @@ -4931,6 +12447,8 @@ def list_task_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -4943,14 +12461,14 @@ def list_task_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/task_ids/{project}/{domain}', 'GET', + '/api/v1/executions/org/{id.org}/relaunch', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNamedEntityIdentifierList', # noqa: E501 + response_type='AdminExecutionCreateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -4958,57 +12476,49 @@ def list_task_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_tasks(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + def terminate_execution(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_tasks(id_project, id_domain, id_name, async_req=True) + >>> thread = api.terminate_execution(id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminTaskList + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionTerminateRequest body: (required) + :return: AdminExecutionTerminateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_tasks_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.terminate_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 else: - (data) = self.list_tasks_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.terminate_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 return data - def list_tasks_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + def terminate_execution_with_http_info(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_tasks_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> thread = api.terminate_execution_with_http_info(id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminTaskList + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionTerminateRequest body: (required) + :return: AdminExecutionTerminateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5019,22 +12529,26 @@ def list_tasks_with_http_info(self, id_project, id_domain, id_name, **kwargs): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_tasks" % key + " to method terminate_execution" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_tasks`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `terminate_execution`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_tasks`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `terminate_execution`") # noqa: E501 # verify the required parameter 'id_name' is set if ('id_name' not in params or params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `list_tasks`") # noqa: E501 + raise ValueError("Missing the required parameter `id_name` when calling `terminate_execution`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `terminate_execution`") # noqa: E501 collection_formats = {} @@ -5047,16 +12561,6 @@ def list_tasks_with_http_info(self, id_project, id_domain, id_name, **kwargs): path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -5064,6 +12568,8 @@ def list_tasks_with_http_info(self, id_project, id_domain, id_name, **kwargs): local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -5076,14 +12582,14 @@ def list_tasks_with_http_info(self, id_project, id_domain, id_name, **kwargs): auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/tasks/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/executions/{id.project}/{id.domain}/{id.name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminTaskList', # noqa: E501 + response_type='AdminExecutionTerminateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5091,57 +12597,51 @@ def list_tasks_with_http_info(self, id_project, id_domain, id_name, **kwargs): _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_tasks2(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + def terminate_execution2(self, id_org, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_tasks2(id_project, id_domain, async_req=True) + >>> thread = api.terminate_execution2(id_org, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminTaskList + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionTerminateRequest body: (required) + :return: AdminExecutionTerminateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_tasks2_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return self.terminate_execution2_with_http_info(id_org, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 else: - (data) = self.list_tasks2_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + (data) = self.terminate_execution2_with_http_info(id_org, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 return data - def list_tasks2_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + def terminate_execution2_with_http_info(self, id_org, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_tasks2_with_http_info(id_project, id_domain, async_req=True) + >>> thread = api.terminate_execution2_with_http_info(id_org, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminTaskList + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionTerminateRequest body: (required) + :return: AdminExecutionTerminateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5152,40 +12652,44 @@ def list_tasks2_with_http_info(self, id_project, id_domain, **kwargs): # noqa: if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_tasks2" % key + " to method terminate_execution2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `terminate_execution2`") # noqa: E501 # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_tasks2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `terminate_execution2`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_tasks2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `terminate_execution2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `terminate_execution2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `terminate_execution2`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 if 'id_project' in params: path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'id_name' in params: - query_params.append(('id.name', params['id_name'])) # noqa: E501 - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -5193,6 +12697,8 @@ def list_tasks2_with_http_info(self, id_project, id_domain, **kwargs): # noqa: local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -5205,14 +12711,14 @@ def list_tasks2_with_http_info(self, id_project, id_domain, **kwargs): # noqa: auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/tasks/{id.project}/{id.domain}', 'GET', + '/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'DELETE', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminTaskList', # noqa: E501 + response_type='AdminExecutionTerminateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5220,55 +12726,49 @@ def list_tasks2_with_http_info(self, id_project, id_domain, **kwargs): # noqa: _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_workflow_ids(self, project, domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. # noqa: E501 + def update_execution(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_workflow_ids(project, domain, async_req=True) + >>> thread = api.update_execution(id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityIdentifierList + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionUpdateRequest body: (required) + :return: AdminExecutionUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_workflow_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + return self.update_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 else: - (data) = self.list_workflow_ids_with_http_info(project, domain, **kwargs) # noqa: E501 + (data) = self.update_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 return data - def list_workflow_ids_with_http_info(self, project, domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. # noqa: E501 + def update_execution_with_http_info(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_workflow_ids_with_http_info(project, domain, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str project: Name of the project that contains the identifiers. +required (required) - :param str domain: Name of the domain the identifiers belongs to within the project. +required (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, the server-provided token can be used to fetch the next page in a query. +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :param str filters: Indicates a list of filters passed as string. +optional. - :return: AdminNamedEntityIdentifierList + asynchronous HTTP request, please pass async_req=True + >>> thread = api.update_execution_with_http_info(id_project, id_domain, id_name, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionUpdateRequest body: (required) + :return: AdminExecutionUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['project', 'domain', 'limit', 'token', 'sort_by_key', 'sort_by_direction', 'filters'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5279,38 +12779,38 @@ def list_workflow_ids_with_http_info(self, project, domain, **kwargs): # noqa: if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_workflow_ids" % key + " to method update_execution" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'project' is set - if ('project' not in params or - params['project'] is None): - raise ValueError("Missing the required parameter `project` when calling `list_workflow_ids`") # noqa: E501 - # verify the required parameter 'domain' is set - if ('domain' not in params or - params['domain'] is None): - raise ValueError("Missing the required parameter `domain` when calling `list_workflow_ids`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `update_execution`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `update_execution`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `update_execution`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `update_execution`") # noqa: E501 collection_formats = {} path_params = {} - if 'project' in params: - path_params['project'] = params['project'] # noqa: E501 - if 'domain' in params: - path_params['domain'] = params['domain'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 header_params = {} @@ -5318,6 +12818,8 @@ def list_workflow_ids_with_http_info(self, project, domain, **kwargs): # noqa: local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -5330,14 +12832,14 @@ def list_workflow_ids_with_http_info(self, project, domain, **kwargs): # noqa: auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflow_ids/{project}/{domain}', 'GET', + '/api/v1/executions/{id.project}/{id.domain}/{id.name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNamedEntityIdentifierList', # noqa: E501 + response_type='AdminExecutionUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5345,57 +12847,51 @@ def list_workflow_ids_with_http_info(self, project, domain, **kwargs): # noqa: _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_workflows(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + def update_execution2(self, id_org, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_workflows(id_project, id_domain, id_name, async_req=True) + >>> thread = api.update_execution2(id_org, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminWorkflowList + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionUpdateRequest body: (required) + :return: AdminExecutionUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_workflows_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + return self.update_execution2_with_http_info(id_org, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 else: - (data) = self.list_workflows_with_http_info(id_project, id_domain, id_name, **kwargs) # noqa: E501 + (data) = self.update_execution2_with_http_info(id_org, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 return data - def list_workflows_with_http_info(self, id_project, id_domain, id_name, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + def update_execution2_with_http_info(self, id_org, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_workflows_with_http_info(id_project, id_domain, id_name, async_req=True) + >>> thread = api.update_execution2_with_http_info(id_org, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str id_org: Optional, org key applied to the resource. (required) :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminWorkflowList + :param str id_name: User or system provided value for the resource. (required) + :param AdminExecutionUpdateRequest body: (required) + :return: AdminExecutionUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5406,26 +12902,36 @@ def list_workflows_with_http_info(self, id_project, id_domain, id_name, **kwargs if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_workflows" % key + " to method update_execution2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `update_execution2`") # noqa: E501 # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_workflows`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `update_execution2`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_workflows`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `update_execution2`") # noqa: E501 # verify the required parameter 'id_name' is set if ('id_name' not in params or params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `list_workflows`") # noqa: E501 + raise ValueError("Missing the required parameter `id_name` when calling `update_execution2`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `update_execution2`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 if 'id_project' in params: path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: @@ -5434,16 +12940,6 @@ def list_workflows_with_http_info(self, id_project, id_domain, id_name, **kwargs path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -5451,6 +12947,8 @@ def list_workflows_with_http_info(self, id_project, id_domain, id_name, **kwargs local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -5463,14 +12961,14 @@ def list_workflows_with_http_info(self, id_project, id_domain, id_name, **kwargs auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflows/{id.project}/{id.domain}/{id.name}', 'GET', + '/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowList', # noqa: E501 + response_type='AdminExecutionUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5478,57 +12976,51 @@ def list_workflows_with_http_info(self, id_project, id_domain, id_name, **kwargs _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def list_workflows2(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + def update_launch_plan(self, id_project, id_domain, id_name, id_version, body, **kwargs): # noqa: E501 + """Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_workflows2(id_project, id_domain, async_req=True) + >>> thread = api.update_launch_plan(id_project, id_domain, id_name, id_version, body, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminWorkflowList + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param AdminLaunchPlanUpdateRequest body: (required) + :return: AdminLaunchPlanUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.list_workflows2_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + return self.update_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, body, **kwargs) # noqa: E501 else: - (data) = self.list_workflows2_with_http_info(id_project, id_domain, **kwargs) # noqa: E501 + (data) = self.update_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, body, **kwargs) # noqa: E501 return data - def list_workflows2_with_http_info(self, id_project, id_domain, **kwargs): # noqa: E501 - """Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + def update_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_version, body, **kwargs): # noqa: E501 + """Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.list_workflows2_with_http_info(id_project, id_domain, async_req=True) + >>> thread = api.update_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, body, async_req=True) >>> result = thread.get() :param async_req bool :param str id_project: Name of the project the resource belongs to. (required) :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans'. - :param int limit: Indicates the number of resources to be returned. +required. - :param str token: In the case of multiple pages of results, this server-provided token can be used to fetch the next page in a query. +optional. - :param str filters: Indicates a list of filters passed as string. More info on constructing filters : +optional. - :param str sort_by_key: Indicates an attribute to sort the response values. +required. - :param str sort_by_direction: Indicates the direction to apply sort key for response values. +optional. - DESCENDING: By default, fields are sorted in descending order. - :return: AdminWorkflowList + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :param AdminLaunchPlanUpdateRequest body: (required) + :return: AdminLaunchPlanUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'limit', 'token', 'filters', 'sort_by_key', 'sort_by_direction'] # noqa: E501 + all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5539,18 +13031,30 @@ def list_workflows2_with_http_info(self, id_project, id_domain, **kwargs): # no if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method list_workflows2" % key + " to method update_launch_plan" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'id_project' is set if ('id_project' not in params or params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `list_workflows2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_project` when calling `update_launch_plan`") # noqa: E501 # verify the required parameter 'id_domain' is set if ('id_domain' not in params or params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `list_workflows2`") # noqa: E501 + raise ValueError("Missing the required parameter `id_domain` when calling `update_launch_plan`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `update_launch_plan`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `update_launch_plan`") # noqa: E501 + # verify the required parameter 'body' is set + if ('body' not in params or + params['body'] is None): + raise ValueError("Missing the required parameter `body` when calling `update_launch_plan`") # noqa: E501 collection_formats = {} @@ -5559,20 +13063,12 @@ def list_workflows2_with_http_info(self, id_project, id_domain, **kwargs): # no path_params['id.project'] = params['id_project'] # noqa: E501 if 'id_domain' in params: path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 query_params = [] - if 'id_name' in params: - query_params.append(('id.name', params['id_name'])) # noqa: E501 - if 'limit' in params: - query_params.append(('limit', params['limit'])) # noqa: E501 - if 'token' in params: - query_params.append(('token', params['token'])) # noqa: E501 - if 'filters' in params: - query_params.append(('filters', params['filters'])) # noqa: E501 - if 'sort_by_key' in params: - query_params.append(('sort_by.key', params['sort_by_key'])) # noqa: E501 - if 'sort_by_direction' in params: - query_params.append(('sort_by.direction', params['sort_by_direction'])) # noqa: E501 header_params = {} @@ -5580,6 +13076,8 @@ def list_workflows2_with_http_info(self, id_project, id_domain, **kwargs): # no local_var_files = {} body_params = None + if 'body' in params: + body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -5592,14 +13090,14 @@ def list_workflows2_with_http_info(self, id_project, id_domain, **kwargs): # no auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflows/{id.project}/{id.domain}', 'GET', + '/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminWorkflowList', # noqa: E501 + response_type='AdminLaunchPlanUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5607,43 +13105,51 @@ def list_workflows2_with_http_info(self, id_project, id_domain, **kwargs): # no _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def recover_execution(self, body, **kwargs): # noqa: E501 - """Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. # noqa: E501 + def update_launch_plan2(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.recover_execution(body, async_req=True) + >>> thread = api.update_launch_plan2(id_org, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminExecutionRecoverRequest body: (required) - :return: AdminExecutionCreateResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :return: AdminLaunchPlanUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.recover_execution_with_http_info(body, **kwargs) # noqa: E501 + return self.update_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 else: - (data) = self.recover_execution_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.update_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, id_version, **kwargs) # noqa: E501 return data - def recover_execution_with_http_info(self, body, **kwargs): # noqa: E501 - """Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. # noqa: E501 + def update_launch_plan2_with_http_info(self, id_org, id_project, id_domain, id_name, id_version, **kwargs): # noqa: E501 + """Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.recover_execution_with_http_info(body, async_req=True) + >>> thread = api.update_launch_plan2_with_http_info(id_org, id_project, id_domain, id_name, id_version, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminExecutionRecoverRequest body: (required) - :return: AdminExecutionCreateResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. (required) + :param str id_version: Specific version of the resource. (required) + :return: AdminLaunchPlanUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['body'] # noqa: E501 + all_params = ['id_org', 'id_project', 'id_domain', 'id_name', 'id_version'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5654,18 +13160,44 @@ def recover_execution_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method recover_execution" % key + " to method update_launch_plan2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'body' is set - if ('body' not in params or - params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `recover_execution`") # noqa: E501 + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `update_launch_plan2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `update_launch_plan2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `update_launch_plan2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `update_launch_plan2`") # noqa: E501 + # verify the required parameter 'id_version' is set + if ('id_version' not in params or + params['id_version'] is None): + raise ValueError("Missing the required parameter `id_version` when calling `update_launch_plan2`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id_version' in params: + path_params['id.version'] = params['id_version'] # noqa: E501 query_params = [] @@ -5675,8 +13207,6 @@ def recover_execution_with_http_info(self, body, **kwargs): # noqa: E501 local_var_files = {} body_params = None - if 'body' in params: - body_params = params['body'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) # noqa: E501 @@ -5689,14 +13219,14 @@ def recover_execution_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/executions/recover', 'POST', + '/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminExecutionCreateResponse', # noqa: E501 + response_type='AdminLaunchPlanUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5704,43 +13234,51 @@ def recover_execution_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def register_project(self, body, **kwargs): # noqa: E501 - """Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. # noqa: E501 + def update_named_entity(self, resource_type, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.register_project(body, async_req=True) + >>> thread = api.update_named_entity(resource_type, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminProjectRegisterRequest body: (required) - :return: AdminProjectRegisterResponse + :param str resource_type: Resource type of the metadata to update +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param AdminNamedEntityUpdateRequest body: (required) + :return: AdminNamedEntityUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.register_project_with_http_info(body, **kwargs) # noqa: E501 + return self.update_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 else: - (data) = self.register_project_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.update_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 return data - def register_project_with_http_info(self, body, **kwargs): # noqa: E501 - """Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. # noqa: E501 + def update_named_entity_with_http_info(self, resource_type, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.register_project_with_http_info(body, async_req=True) + >>> thread = api.update_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminProjectRegisterRequest body: (required) - :return: AdminProjectRegisterResponse + :param str resource_type: Resource type of the metadata to update +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param AdminNamedEntityUpdateRequest body: (required) + :return: AdminNamedEntityUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['body'] # noqa: E501 + all_params = ['resource_type', 'id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5751,18 +13289,42 @@ def register_project_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method register_project" % key + " to method update_named_entity" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `update_named_entity`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `update_named_entity`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `update_named_entity`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `update_named_entity`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `register_project`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_named_entity`") # noqa: E501 collection_formats = {} path_params = {} + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] @@ -5786,14 +13348,14 @@ def register_project_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/projects', 'POST', + '/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectRegisterResponse', # noqa: E501 + response_type='AdminNamedEntityUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5801,43 +13363,53 @@ def register_project_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def relaunch_execution(self, body, **kwargs): # noqa: E501 - """Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` # noqa: E501 + def update_named_entity2(self, id_org, resource_type, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.relaunch_execution(body, async_req=True) + >>> thread = api.update_named_entity2(id_org, resource_type, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminExecutionRelaunchRequest body: (required) - :return: AdminExecutionCreateResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Resource type of the metadata to update +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param AdminNamedEntityUpdateRequest body: (required) + :return: AdminNamedEntityUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.relaunch_execution_with_http_info(body, **kwargs) # noqa: E501 + return self.update_named_entity2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 else: - (data) = self.relaunch_execution_with_http_info(body, **kwargs) # noqa: E501 + (data) = self.update_named_entity2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 return data - def relaunch_execution_with_http_info(self, body, **kwargs): # noqa: E501 - """Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` # noqa: E501 + def update_named_entity2_with_http_info(self, id_org, resource_type, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 + """Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.relaunch_execution_with_http_info(body, async_req=True) + >>> thread = api.update_named_entity2_with_http_info(id_org, resource_type, id_project, id_domain, id_name, body, async_req=True) >>> result = thread.get() :param async_req bool - :param AdminExecutionRelaunchRequest body: (required) - :return: AdminExecutionCreateResponse + :param str id_org: Optional, org key applied to the resource. (required) + :param str resource_type: Resource type of the metadata to update +required (required) + :param str id_project: Name of the project the resource belongs to. (required) + :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) + :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) + :param AdminNamedEntityUpdateRequest body: (required) + :return: AdminNamedEntityUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['body'] # noqa: E501 + all_params = ['id_org', 'resource_type', 'id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5848,18 +13420,48 @@ def relaunch_execution_with_http_info(self, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method relaunch_execution" % key + " to method update_named_entity2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'id_org' is set + if ('id_org' not in params or + params['id_org'] is None): + raise ValueError("Missing the required parameter `id_org` when calling `update_named_entity2`") # noqa: E501 + # verify the required parameter 'resource_type' is set + if ('resource_type' not in params or + params['resource_type'] is None): + raise ValueError("Missing the required parameter `resource_type` when calling `update_named_entity2`") # noqa: E501 + # verify the required parameter 'id_project' is set + if ('id_project' not in params or + params['id_project'] is None): + raise ValueError("Missing the required parameter `id_project` when calling `update_named_entity2`") # noqa: E501 + # verify the required parameter 'id_domain' is set + if ('id_domain' not in params or + params['id_domain'] is None): + raise ValueError("Missing the required parameter `id_domain` when calling `update_named_entity2`") # noqa: E501 + # verify the required parameter 'id_name' is set + if ('id_name' not in params or + params['id_name'] is None): + raise ValueError("Missing the required parameter `id_name` when calling `update_named_entity2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `relaunch_execution`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_named_entity2`") # noqa: E501 collection_formats = {} path_params = {} + if 'id_org' in params: + path_params['id.org'] = params['id_org'] # noqa: E501 + if 'resource_type' in params: + path_params['resource_type'] = params['resource_type'] # noqa: E501 + if 'id_project' in params: + path_params['id.project'] = params['id_project'] # noqa: E501 + if 'id_domain' in params: + path_params['id.domain'] = params['id_domain'] # noqa: E501 + if 'id_name' in params: + path_params['id.name'] = params['id_name'] # noqa: E501 query_params = [] @@ -5883,14 +13485,14 @@ def relaunch_execution_with_http_info(self, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/executions/relaunch', 'POST', + '/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminExecutionCreateResponse', # noqa: E501 + response_type='AdminNamedEntityUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -5898,49 +13500,45 @@ def relaunch_execution_with_http_info(self, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def terminate_execution(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 - """Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def update_project(self, id, body, **kwargs): # noqa: E501 + """Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.terminate_execution(id_project, id_domain, id_name, body, async_req=True) + >>> thread = api.update_project(id, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :param AdminExecutionTerminateRequest body: (required) - :return: AdminExecutionTerminateResponse + :param str id: Globally unique project name. (required) + :param AdminProject body: (required) + :return: AdminProjectUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.terminate_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 + return self.update_project_with_http_info(id, body, **kwargs) # noqa: E501 else: - (data) = self.terminate_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 + (data) = self.update_project_with_http_info(id, body, **kwargs) # noqa: E501 return data - def terminate_execution_with_http_info(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 - """Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def update_project_with_http_info(self, id, body, **kwargs): # noqa: E501 + """Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.terminate_execution_with_http_info(id_project, id_domain, id_name, body, async_req=True) + >>> thread = api.update_project_with_http_info(id, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :param AdminExecutionTerminateRequest body: (required) - :return: AdminExecutionTerminateResponse + :param str id: Globally unique project name. (required) + :param AdminProject body: (required) + :return: AdminProjectUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 + all_params = ['id', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -5951,36 +13549,24 @@ def terminate_execution_with_http_info(self, id_project, id_domain, id_name, bod if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method terminate_execution" % key + " to method update_project" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `terminate_execution`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `terminate_execution`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `terminate_execution`") # noqa: E501 + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `update_project`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `terminate_execution`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_project`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 query_params = [] @@ -6004,14 +13590,14 @@ def terminate_execution_with_http_info(self, id_project, id_domain, id_name, bod auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/executions/{id.project}/{id.domain}/{id.name}', 'DELETE', + '/api/v1/projects/{id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminExecutionTerminateResponse', # noqa: E501 + response_type='AdminProjectUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -6019,49 +13605,47 @@ def terminate_execution_with_http_info(self, id_project, id_domain, id_name, bod _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def update_execution(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 - """Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def update_project2(self, org, id, body, **kwargs): # noqa: E501 + """Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_execution(id_project, id_domain, id_name, body, async_req=True) + >>> thread = api.update_project2(org, id, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :param AdminExecutionUpdateRequest body: (required) - :return: AdminExecutionUpdateResponse + :param str org: Optional, org key applied to the resource. (required) + :param str id: Globally unique project name. (required) + :param AdminProject body: (required) + :return: AdminProjectUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.update_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 + return self.update_project2_with_http_info(org, id, body, **kwargs) # noqa: E501 else: - (data) = self.update_execution_with_http_info(id_project, id_domain, id_name, body, **kwargs) # noqa: E501 + (data) = self.update_project2_with_http_info(org, id, body, **kwargs) # noqa: E501 return data - def update_execution_with_http_info(self, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 - """Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + def update_project2_with_http_info(self, org, id, body, **kwargs): # noqa: E501 + """Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_execution_with_http_info(id_project, id_domain, id_name, body, async_req=True) + >>> thread = api.update_project2_with_http_info(org, id, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User or system provided value for the resource. (required) - :param AdminExecutionUpdateRequest body: (required) - :return: AdminExecutionUpdateResponse + :param str org: Optional, org key applied to the resource. (required) + :param str id: Globally unique project name. (required) + :param AdminProject body: (required) + :return: AdminProjectUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 + all_params = ['org', 'id', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -6072,36 +13656,30 @@ def update_execution_with_http_info(self, id_project, id_domain, id_name, body, if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method update_execution" % key + " to method update_project2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `update_execution`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `update_execution`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `update_execution`") # noqa: E501 + # verify the required parameter 'org' is set + if ('org' not in params or + params['org'] is None): + raise ValueError("Missing the required parameter `org` when calling `update_project2`") # noqa: E501 + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `update_project2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_execution`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_project2`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 + if 'org' in params: + path_params['org'] = params['org'] # noqa: E501 + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 query_params = [] @@ -6125,14 +13703,14 @@ def update_execution_with_http_info(self, id_project, id_domain, id_name, body, auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/executions/{id.project}/{id.domain}/{id.name}', 'PUT', + '/api/v1/projects/org/{org}/{id}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminExecutionUpdateResponse', # noqa: E501 + response_type='AdminProjectUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -6140,51 +13718,45 @@ def update_execution_with_http_info(self, id_project, id_domain, id_name, body, _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def update_launch_plan(self, id_project, id_domain, id_name, id_version, body, **kwargs): # noqa: E501 - """Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + def update_project_attributes(self, attributes_project, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_launch_plan(id_project, id_domain, id_name, id_version, body, async_req=True) + >>> thread = api.update_project_attributes(attributes_project, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param AdminLaunchPlanUpdateRequest body: (required) - :return: AdminLaunchPlanUpdateResponse + :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) + :param AdminProjectAttributesUpdateRequest body: (required) + :return: AdminProjectAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.update_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, body, **kwargs) # noqa: E501 + return self.update_project_attributes_with_http_info(attributes_project, body, **kwargs) # noqa: E501 else: - (data) = self.update_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, body, **kwargs) # noqa: E501 + (data) = self.update_project_attributes_with_http_info(attributes_project, body, **kwargs) # noqa: E501 return data - def update_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_version, body, **kwargs): # noqa: E501 - """Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + def update_project_attributes_with_http_info(self, attributes_project, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_launch_plan_with_http_info(id_project, id_domain, id_name, id_version, body, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. (required) - :param str id_version: Specific version of the resource. (required) - :param AdminLaunchPlanUpdateRequest body: (required) - :return: AdminLaunchPlanUpdateResponse + >>> thread = api.update_project_attributes_with_http_info(attributes_project, body, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) + :param AdminProjectAttributesUpdateRequest body: (required) + :return: AdminProjectAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id_project', 'id_domain', 'id_name', 'id_version', 'body'] # noqa: E501 + all_params = ['attributes_project', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -6195,42 +13767,24 @@ def update_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_v if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method update_launch_plan" % key + " to method update_project_attributes" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `update_launch_plan`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `update_launch_plan`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `update_launch_plan`") # noqa: E501 - # verify the required parameter 'id_version' is set - if ('id_version' not in params or - params['id_version'] is None): - raise ValueError("Missing the required parameter `id_version` when calling `update_launch_plan`") # noqa: E501 + # verify the required parameter 'attributes_project' is set + if ('attributes_project' not in params or + params['attributes_project'] is None): + raise ValueError("Missing the required parameter `attributes_project` when calling `update_project_attributes`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_launch_plan`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_project_attributes`") # noqa: E501 collection_formats = {} path_params = {} - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 - if 'id_version' in params: - path_params['id.version'] = params['id_version'] # noqa: E501 + if 'attributes_project' in params: + path_params['attributes.project'] = params['attributes_project'] # noqa: E501 query_params = [] @@ -6254,14 +13808,14 @@ def update_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_v auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}', 'PUT', + '/api/v1/project_attributes/{attributes.project}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminLaunchPlanUpdateResponse', # noqa: E501 + response_type='AdminProjectAttributesUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -6269,51 +13823,47 @@ def update_launch_plan_with_http_info(self, id_project, id_domain, id_name, id_v _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def update_named_entity(self, resource_type, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 - """Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + def update_project_attributes2(self, attributes_org, attributes_project, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_named_entity(resource_type, id_project, id_domain, id_name, body, async_req=True) + >>> thread = api.update_project_attributes2(attributes_org, attributes_project, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Resource type of the metadata to update +required (required) - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param AdminNamedEntityUpdateRequest body: (required) - :return: AdminNamedEntityUpdateResponse + :param str attributes_org: Optional, org key applied to the project. (required) + :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) + :param AdminProjectAttributesUpdateRequest body: (required) + :return: AdminProjectAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.update_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 + return self.update_project_attributes2_with_http_info(attributes_org, attributes_project, body, **kwargs) # noqa: E501 else: - (data) = self.update_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, body, **kwargs) # noqa: E501 + (data) = self.update_project_attributes2_with_http_info(attributes_org, attributes_project, body, **kwargs) # noqa: E501 return data - def update_named_entity_with_http_info(self, resource_type, id_project, id_domain, id_name, body, **kwargs): # noqa: E501 - """Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + def update_project_attributes2_with_http_info(self, attributes_org, attributes_project, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_named_entity_with_http_info(resource_type, id_project, id_domain, id_name, body, async_req=True) + >>> thread = api.update_project_attributes2_with_http_info(attributes_org, attributes_project, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str resource_type: Resource type of the metadata to update +required (required) - :param str id_project: Name of the project the resource belongs to. (required) - :param str id_domain: Name of the domain the resource belongs to. A domain can be considered as a subset within a specific project. (required) - :param str id_name: User provided value for the resource. The combination of project + domain + name uniquely identifies the resource. +optional - in certain contexts - like 'List API', 'Launch plans' (required) - :param AdminNamedEntityUpdateRequest body: (required) - :return: AdminNamedEntityUpdateResponse + :param str attributes_org: Optional, org key applied to the project. (required) + :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) + :param AdminProjectAttributesUpdateRequest body: (required) + :return: AdminProjectAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['resource_type', 'id_project', 'id_domain', 'id_name', 'body'] # noqa: E501 + all_params = ['attributes_org', 'attributes_project', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -6324,42 +13874,30 @@ def update_named_entity_with_http_info(self, resource_type, id_project, id_domai if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method update_named_entity" % key + " to method update_project_attributes2" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'resource_type' is set - if ('resource_type' not in params or - params['resource_type'] is None): - raise ValueError("Missing the required parameter `resource_type` when calling `update_named_entity`") # noqa: E501 - # verify the required parameter 'id_project' is set - if ('id_project' not in params or - params['id_project'] is None): - raise ValueError("Missing the required parameter `id_project` when calling `update_named_entity`") # noqa: E501 - # verify the required parameter 'id_domain' is set - if ('id_domain' not in params or - params['id_domain'] is None): - raise ValueError("Missing the required parameter `id_domain` when calling `update_named_entity`") # noqa: E501 - # verify the required parameter 'id_name' is set - if ('id_name' not in params or - params['id_name'] is None): - raise ValueError("Missing the required parameter `id_name` when calling `update_named_entity`") # noqa: E501 + # verify the required parameter 'attributes_org' is set + if ('attributes_org' not in params or + params['attributes_org'] is None): + raise ValueError("Missing the required parameter `attributes_org` when calling `update_project_attributes2`") # noqa: E501 + # verify the required parameter 'attributes_project' is set + if ('attributes_project' not in params or + params['attributes_project'] is None): + raise ValueError("Missing the required parameter `attributes_project` when calling `update_project_attributes2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_named_entity`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_project_attributes2`") # noqa: E501 collection_formats = {} path_params = {} - if 'resource_type' in params: - path_params['resource_type'] = params['resource_type'] # noqa: E501 - if 'id_project' in params: - path_params['id.project'] = params['id_project'] # noqa: E501 - if 'id_domain' in params: - path_params['id.domain'] = params['id_domain'] # noqa: E501 - if 'id_name' in params: - path_params['id.name'] = params['id_name'] # noqa: E501 + if 'attributes_org' in params: + path_params['attributes.org'] = params['attributes_org'] # noqa: E501 + if 'attributes_project' in params: + path_params['attributes.project'] = params['attributes_project'] # noqa: E501 query_params = [] @@ -6383,14 +13921,14 @@ def update_named_entity_with_http_info(self, resource_type, id_project, id_domai auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}', 'PUT', + '/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminNamedEntityUpdateResponse', # noqa: E501 + response_type='AdminProjectAttributesUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -6398,45 +13936,47 @@ def update_named_entity_with_http_info(self, resource_type, id_project, id_domai _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def update_project(self, id, body, **kwargs): # noqa: E501 - """Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 + def update_project_domain_attributes(self, attributes_project, attributes_domain, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_project(id, body, async_req=True) + >>> thread = api.update_project_domain_attributes(attributes_project, attributes_domain, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id: Globally unique project name. (required) - :param AdminProject body: (required) - :return: AdminProjectUpdateResponse + :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) + :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) + :param AdminProjectDomainAttributesUpdateRequest body: (required) + :return: AdminProjectDomainAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.update_project_with_http_info(id, body, **kwargs) # noqa: E501 + return self.update_project_domain_attributes_with_http_info(attributes_project, attributes_domain, body, **kwargs) # noqa: E501 else: - (data) = self.update_project_with_http_info(id, body, **kwargs) # noqa: E501 + (data) = self.update_project_domain_attributes_with_http_info(attributes_project, attributes_domain, body, **kwargs) # noqa: E501 return data - def update_project_with_http_info(self, id, body, **kwargs): # noqa: E501 - """Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 + def update_project_domain_attributes_with_http_info(self, attributes_project, attributes_domain, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_project_with_http_info(id, body, async_req=True) + >>> thread = api.update_project_domain_attributes_with_http_info(attributes_project, attributes_domain, body, async_req=True) >>> result = thread.get() :param async_req bool - :param str id: Globally unique project name. (required) - :param AdminProject body: (required) - :return: AdminProjectUpdateResponse + :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) + :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) + :param AdminProjectDomainAttributesUpdateRequest body: (required) + :return: AdminProjectDomainAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['id', 'body'] # noqa: E501 + all_params = ['attributes_project', 'attributes_domain', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -6447,24 +13987,30 @@ def update_project_with_http_info(self, id, body, **kwargs): # noqa: E501 if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method update_project" % key + " to method update_project_domain_attributes" % key ) params[key] = val del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `update_project`") # noqa: E501 + # verify the required parameter 'attributes_project' is set + if ('attributes_project' not in params or + params['attributes_project'] is None): + raise ValueError("Missing the required parameter `attributes_project` when calling `update_project_domain_attributes`") # noqa: E501 + # verify the required parameter 'attributes_domain' is set + if ('attributes_domain' not in params or + params['attributes_domain'] is None): + raise ValueError("Missing the required parameter `attributes_domain` when calling `update_project_domain_attributes`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_project`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_project_domain_attributes`") # noqa: E501 collection_formats = {} path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 + if 'attributes_project' in params: + path_params['attributes.project'] = params['attributes_project'] # noqa: E501 + if 'attributes_domain' in params: + path_params['attributes.domain'] = params['attributes_domain'] # noqa: E501 query_params = [] @@ -6488,14 +14034,14 @@ def update_project_with_http_info(self, id, body, **kwargs): # noqa: E501 auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/projects/{id}', 'PUT', + '/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectUpdateResponse', # noqa: E501 + response_type='AdminProjectDomainAttributesUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -6503,45 +14049,49 @@ def update_project_with_http_info(self, id, body, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def update_project_attributes(self, attributes_project, body, **kwargs): # noqa: E501 - """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level # noqa: E501 + def update_project_domain_attributes2(self, attributes_org, attributes_project, attributes_domain, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_project_attributes(attributes_project, body, async_req=True) + >>> thread = api.update_project_domain_attributes2(attributes_org, attributes_project, attributes_domain, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str attributes_org: Optional, org key applied to the attributes. (required) :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) - :param AdminProjectAttributesUpdateRequest body: (required) - :return: AdminProjectAttributesUpdateResponse + :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) + :param AdminProjectDomainAttributesUpdateRequest body: (required) + :return: AdminProjectDomainAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.update_project_attributes_with_http_info(attributes_project, body, **kwargs) # noqa: E501 + return self.update_project_domain_attributes2_with_http_info(attributes_org, attributes_project, attributes_domain, body, **kwargs) # noqa: E501 else: - (data) = self.update_project_attributes_with_http_info(attributes_project, body, **kwargs) # noqa: E501 + (data) = self.update_project_domain_attributes2_with_http_info(attributes_org, attributes_project, attributes_domain, body, **kwargs) # noqa: E501 return data - def update_project_attributes_with_http_info(self, attributes_project, body, **kwargs): # noqa: E501 - """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level # noqa: E501 + def update_project_domain_attributes2_with_http_info(self, attributes_org, attributes_project, attributes_domain, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_project_attributes_with_http_info(attributes_project, body, async_req=True) + >>> thread = api.update_project_domain_attributes2_with_http_info(attributes_org, attributes_project, attributes_domain, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str attributes_org: Optional, org key applied to the attributes. (required) :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) - :param AdminProjectAttributesUpdateRequest body: (required) - :return: AdminProjectAttributesUpdateResponse + :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) + :param AdminProjectDomainAttributesUpdateRequest body: (required) + :return: AdminProjectDomainAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['attributes_project', 'body'] # noqa: E501 + all_params = ['attributes_org', 'attributes_project', 'attributes_domain', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -6552,24 +14102,36 @@ def update_project_attributes_with_http_info(self, attributes_project, body, **k if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method update_project_attributes" % key + " to method update_project_domain_attributes2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'attributes_org' is set + if ('attributes_org' not in params or + params['attributes_org'] is None): + raise ValueError("Missing the required parameter `attributes_org` when calling `update_project_domain_attributes2`") # noqa: E501 # verify the required parameter 'attributes_project' is set if ('attributes_project' not in params or params['attributes_project'] is None): - raise ValueError("Missing the required parameter `attributes_project` when calling `update_project_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `attributes_project` when calling `update_project_domain_attributes2`") # noqa: E501 + # verify the required parameter 'attributes_domain' is set + if ('attributes_domain' not in params or + params['attributes_domain'] is None): + raise ValueError("Missing the required parameter `attributes_domain` when calling `update_project_domain_attributes2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_project_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_project_domain_attributes2`") # noqa: E501 collection_formats = {} path_params = {} + if 'attributes_org' in params: + path_params['attributes.org'] = params['attributes_org'] # noqa: E501 if 'attributes_project' in params: path_params['attributes.project'] = params['attributes_project'] # noqa: E501 + if 'attributes_domain' in params: + path_params['attributes.domain'] = params['attributes_domain'] # noqa: E501 query_params = [] @@ -6593,14 +14155,14 @@ def update_project_attributes_with_http_info(self, attributes_project, body, **k auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/project_attributes/{attributes.project}', 'PUT', + '/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectAttributesUpdateResponse', # noqa: E501 + response_type='AdminProjectDomainAttributesUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -6608,47 +14170,49 @@ def update_project_attributes_with_http_info(self, attributes_project, body, **k _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def update_project_domain_attributes(self, attributes_project, attributes_domain, body, **kwargs): # noqa: E501 - """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def update_workflow_attributes(self, attributes_project, attributes_domain, attributes_workflow, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_project_domain_attributes(attributes_project, attributes_domain, body, async_req=True) + >>> thread = api.update_workflow_attributes(attributes_project, attributes_domain, attributes_workflow, body, async_req=True) >>> result = thread.get() :param async_req bool :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) - :param AdminProjectDomainAttributesUpdateRequest body: (required) - :return: AdminProjectDomainAttributesUpdateResponse + :param str attributes_workflow: Workflow name for which this set of attributes will be applied. (required) + :param AdminWorkflowAttributesUpdateRequest body: (required) + :return: AdminWorkflowAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.update_project_domain_attributes_with_http_info(attributes_project, attributes_domain, body, **kwargs) # noqa: E501 + return self.update_workflow_attributes_with_http_info(attributes_project, attributes_domain, attributes_workflow, body, **kwargs) # noqa: E501 else: - (data) = self.update_project_domain_attributes_with_http_info(attributes_project, attributes_domain, body, **kwargs) # noqa: E501 + (data) = self.update_workflow_attributes_with_http_info(attributes_project, attributes_domain, attributes_workflow, body, **kwargs) # noqa: E501 return data - def update_project_domain_attributes_with_http_info(self, attributes_project, attributes_domain, body, **kwargs): # noqa: E501 - """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + def update_workflow_attributes_with_http_info(self, attributes_project, attributes_domain, attributes_workflow, body, **kwargs): # noqa: E501 + """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_project_domain_attributes_with_http_info(attributes_project, attributes_domain, body, async_req=True) + >>> thread = api.update_workflow_attributes_with_http_info(attributes_project, attributes_domain, attributes_workflow, body, async_req=True) >>> result = thread.get() :param async_req bool :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) - :param AdminProjectDomainAttributesUpdateRequest body: (required) - :return: AdminProjectDomainAttributesUpdateResponse + :param str attributes_workflow: Workflow name for which this set of attributes will be applied. (required) + :param AdminWorkflowAttributesUpdateRequest body: (required) + :return: AdminWorkflowAttributesUpdateResponse If the method is called asynchronously, returns the request thread. """ - all_params = ['attributes_project', 'attributes_domain', 'body'] # noqa: E501 + all_params = ['attributes_project', 'attributes_domain', 'attributes_workflow', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -6659,22 +14223,26 @@ def update_project_domain_attributes_with_http_info(self, attributes_project, at if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method update_project_domain_attributes" % key + " to method update_workflow_attributes" % key ) params[key] = val del params['kwargs'] # verify the required parameter 'attributes_project' is set if ('attributes_project' not in params or params['attributes_project'] is None): - raise ValueError("Missing the required parameter `attributes_project` when calling `update_project_domain_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `attributes_project` when calling `update_workflow_attributes`") # noqa: E501 # verify the required parameter 'attributes_domain' is set if ('attributes_domain' not in params or params['attributes_domain'] is None): - raise ValueError("Missing the required parameter `attributes_domain` when calling `update_project_domain_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `attributes_domain` when calling `update_workflow_attributes`") # noqa: E501 + # verify the required parameter 'attributes_workflow' is set + if ('attributes_workflow' not in params or + params['attributes_workflow'] is None): + raise ValueError("Missing the required parameter `attributes_workflow` when calling `update_workflow_attributes`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_project_domain_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_workflow_attributes`") # noqa: E501 collection_formats = {} @@ -6683,6 +14251,8 @@ def update_project_domain_attributes_with_http_info(self, attributes_project, at path_params['attributes.project'] = params['attributes_project'] # noqa: E501 if 'attributes_domain' in params: path_params['attributes.domain'] = params['attributes_domain'] # noqa: E501 + if 'attributes_workflow' in params: + path_params['attributes.workflow'] = params['attributes_workflow'] # noqa: E501 query_params = [] @@ -6706,14 +14276,14 @@ def update_project_domain_attributes_with_http_info(self, attributes_project, at auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}', 'PUT', + '/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}', 'PUT', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, - response_type='AdminProjectDomainAttributesUpdateResponse', # noqa: E501 + response_type='AdminWorkflowAttributesUpdateResponse', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), @@ -6721,15 +14291,16 @@ def update_project_domain_attributes_with_http_info(self, attributes_project, at _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) - def update_workflow_attributes(self, attributes_project, attributes_domain, attributes_workflow, body, **kwargs): # noqa: E501 + def update_workflow_attributes2(self, attributes_org, attributes_project, attributes_domain, attributes_workflow, body, **kwargs): # noqa: E501 """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_workflow_attributes(attributes_project, attributes_domain, attributes_workflow, body, async_req=True) + >>> thread = api.update_workflow_attributes2(attributes_org, attributes_project, attributes_domain, attributes_workflow, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str attributes_org: Optional, org key applied to the attributes. (required) :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) :param str attributes_workflow: Workflow name for which this set of attributes will be applied. (required) @@ -6740,20 +14311,21 @@ def update_workflow_attributes(self, attributes_project, attributes_domain, attr """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): - return self.update_workflow_attributes_with_http_info(attributes_project, attributes_domain, attributes_workflow, body, **kwargs) # noqa: E501 + return self.update_workflow_attributes2_with_http_info(attributes_org, attributes_project, attributes_domain, attributes_workflow, body, **kwargs) # noqa: E501 else: - (data) = self.update_workflow_attributes_with_http_info(attributes_project, attributes_domain, attributes_workflow, body, **kwargs) # noqa: E501 + (data) = self.update_workflow_attributes2_with_http_info(attributes_org, attributes_project, attributes_domain, attributes_workflow, body, **kwargs) # noqa: E501 return data - def update_workflow_attributes_with_http_info(self, attributes_project, attributes_domain, attributes_workflow, body, **kwargs): # noqa: E501 + def update_workflow_attributes2_with_http_info(self, attributes_org, attributes_project, attributes_domain, attributes_workflow, body, **kwargs): # noqa: E501 """Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_workflow_attributes_with_http_info(attributes_project, attributes_domain, attributes_workflow, body, async_req=True) + >>> thread = api.update_workflow_attributes2_with_http_info(attributes_org, attributes_project, attributes_domain, attributes_workflow, body, async_req=True) >>> result = thread.get() :param async_req bool + :param str attributes_org: Optional, org key applied to the attributes. (required) :param str attributes_project: Unique project id for which this set of attributes will be applied. (required) :param str attributes_domain: Unique domain id for which this set of attributes will be applied. (required) :param str attributes_workflow: Workflow name for which this set of attributes will be applied. (required) @@ -6763,7 +14335,7 @@ def update_workflow_attributes_with_http_info(self, attributes_project, attribut returns the request thread. """ - all_params = ['attributes_project', 'attributes_domain', 'attributes_workflow', 'body'] # noqa: E501 + all_params = ['attributes_org', 'attributes_project', 'attributes_domain', 'attributes_workflow', 'body'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -6774,30 +14346,36 @@ def update_workflow_attributes_with_http_info(self, attributes_project, attribut if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" - " to method update_workflow_attributes" % key + " to method update_workflow_attributes2" % key ) params[key] = val del params['kwargs'] + # verify the required parameter 'attributes_org' is set + if ('attributes_org' not in params or + params['attributes_org'] is None): + raise ValueError("Missing the required parameter `attributes_org` when calling `update_workflow_attributes2`") # noqa: E501 # verify the required parameter 'attributes_project' is set if ('attributes_project' not in params or params['attributes_project'] is None): - raise ValueError("Missing the required parameter `attributes_project` when calling `update_workflow_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `attributes_project` when calling `update_workflow_attributes2`") # noqa: E501 # verify the required parameter 'attributes_domain' is set if ('attributes_domain' not in params or params['attributes_domain'] is None): - raise ValueError("Missing the required parameter `attributes_domain` when calling `update_workflow_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `attributes_domain` when calling `update_workflow_attributes2`") # noqa: E501 # verify the required parameter 'attributes_workflow' is set if ('attributes_workflow' not in params or params['attributes_workflow'] is None): - raise ValueError("Missing the required parameter `attributes_workflow` when calling `update_workflow_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `attributes_workflow` when calling `update_workflow_attributes2`") # noqa: E501 # verify the required parameter 'body' is set if ('body' not in params or params['body'] is None): - raise ValueError("Missing the required parameter `body` when calling `update_workflow_attributes`") # noqa: E501 + raise ValueError("Missing the required parameter `body` when calling `update_workflow_attributes2`") # noqa: E501 collection_formats = {} path_params = {} + if 'attributes_org' in params: + path_params['attributes.org'] = params['attributes_org'] # noqa: E501 if 'attributes_project' in params: path_params['attributes.project'] = params['attributes_project'] # noqa: E501 if 'attributes_domain' in params: @@ -6827,7 +14405,7 @@ def update_workflow_attributes_with_http_info(self, attributes_project, attribut auth_settings = [] # noqa: E501 return self.api_client.call_api( - '/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}', 'PUT', + '/api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow}', 'PUT', path_params, query_params, header_params, diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/__init__.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/__init__.py index f2604bcd01..3089fff468 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/__init__.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/__init__.py @@ -27,6 +27,7 @@ from flyteadmin.models.admin_description_entity_list import AdminDescriptionEntityList from flyteadmin.models.admin_description_format import AdminDescriptionFormat from flyteadmin.models.admin_domain import AdminDomain +from flyteadmin.models.admin_dynamic_node_workflow_response import AdminDynamicNodeWorkflowResponse from flyteadmin.models.admin_email_notification import AdminEmailNotification from flyteadmin.models.admin_envs import AdminEnvs from flyteadmin.models.admin_execution import AdminExecution @@ -235,6 +236,7 @@ from flyteadmin.models.core_task_node import CoreTaskNode from flyteadmin.models.core_task_node_overrides import CoreTaskNodeOverrides from flyteadmin.models.core_task_template import CoreTaskTemplate +from flyteadmin.models.core_time_partition import CoreTimePartition from flyteadmin.models.core_type_annotation import CoreTypeAnnotation from flyteadmin.models.core_type_structure import CoreTypeStructure from flyteadmin.models.core_typed_interface import CoreTypedInterface diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_dynamic_node_workflow_response.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_dynamic_node_workflow_response.py new file mode 100644 index 0000000000..e721c762bc --- /dev/null +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_dynamic_node_workflow_response.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + flyteidl/service/admin.proto + + No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 + + OpenAPI spec version: version not set + + Generated by: https://github.com/swagger-api/swagger-codegen.git +""" + + +import pprint +import re # noqa: F401 + +import six + +from flyteadmin.models.core_compiled_workflow_closure import CoreCompiledWorkflowClosure # noqa: F401,E501 + + +class AdminDynamicNodeWorkflowResponse(object): + """NOTE: This class is auto generated by the swagger code generator program. + + Do not edit the class manually. + """ + + """ + Attributes: + swagger_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + swagger_types = { + 'compiled_workflow': 'CoreCompiledWorkflowClosure' + } + + attribute_map = { + 'compiled_workflow': 'compiled_workflow' + } + + def __init__(self, compiled_workflow=None): # noqa: E501 + """AdminDynamicNodeWorkflowResponse - a model defined in Swagger""" # noqa: E501 + + self._compiled_workflow = None + self.discriminator = None + + if compiled_workflow is not None: + self.compiled_workflow = compiled_workflow + + @property + def compiled_workflow(self): + """Gets the compiled_workflow of this AdminDynamicNodeWorkflowResponse. # noqa: E501 + + + :return: The compiled_workflow of this AdminDynamicNodeWorkflowResponse. # noqa: E501 + :rtype: CoreCompiledWorkflowClosure + """ + return self._compiled_workflow + + @compiled_workflow.setter + def compiled_workflow(self, compiled_workflow): + """Sets the compiled_workflow of this AdminDynamicNodeWorkflowResponse. + + + :param compiled_workflow: The compiled_workflow of this AdminDynamicNodeWorkflowResponse. # noqa: E501 + :type: CoreCompiledWorkflowClosure + """ + + self._compiled_workflow = compiled_workflow + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.swagger_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + if issubclass(AdminDynamicNodeWorkflowResponse, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, AdminDynamicNodeWorkflowResponse): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_execution_create_request.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_execution_create_request.py index 5c5373a88d..228a1c5847 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_execution_create_request.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_execution_create_request.py @@ -38,7 +38,8 @@ class AdminExecutionCreateRequest(object): 'domain': 'str', 'name': 'str', 'spec': 'AdminExecutionSpec', - 'inputs': 'CoreLiteralMap' + 'inputs': 'CoreLiteralMap', + 'org': 'str' } attribute_map = { @@ -46,10 +47,11 @@ class AdminExecutionCreateRequest(object): 'domain': 'domain', 'name': 'name', 'spec': 'spec', - 'inputs': 'inputs' + 'inputs': 'inputs', + 'org': 'org' } - def __init__(self, project=None, domain=None, name=None, spec=None, inputs=None): # noqa: E501 + def __init__(self, project=None, domain=None, name=None, spec=None, inputs=None, org=None): # noqa: E501 """AdminExecutionCreateRequest - a model defined in Swagger""" # noqa: E501 self._project = None @@ -57,6 +59,7 @@ def __init__(self, project=None, domain=None, name=None, spec=None, inputs=None) self._name = None self._spec = None self._inputs = None + self._org = None self.discriminator = None if project is not None: @@ -69,6 +72,8 @@ def __init__(self, project=None, domain=None, name=None, spec=None, inputs=None) self.spec = spec if inputs is not None: self.inputs = inputs + if org is not None: + self.org = org @property def project(self): @@ -175,6 +180,29 @@ def inputs(self, inputs): self._inputs = inputs + @property + def org(self): + """Gets the org of this AdminExecutionCreateRequest. # noqa: E501 + + Optional, org key applied to the resource. # noqa: E501 + + :return: The org of this AdminExecutionCreateRequest. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminExecutionCreateRequest. + + Optional, org key applied to the resource. # noqa: E501 + + :param org: The org of this AdminExecutionCreateRequest. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_matchable_attributes_configuration.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_matchable_attributes_configuration.py index a9be455f25..58e05cbdb4 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_matchable_attributes_configuration.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_matchable_attributes_configuration.py @@ -37,7 +37,8 @@ class AdminMatchableAttributesConfiguration(object): 'domain': 'str', 'project': 'str', 'workflow': 'str', - 'launch_plan': 'str' + 'launch_plan': 'str', + 'org': 'str' } attribute_map = { @@ -45,10 +46,11 @@ class AdminMatchableAttributesConfiguration(object): 'domain': 'domain', 'project': 'project', 'workflow': 'workflow', - 'launch_plan': 'launch_plan' + 'launch_plan': 'launch_plan', + 'org': 'org' } - def __init__(self, attributes=None, domain=None, project=None, workflow=None, launch_plan=None): # noqa: E501 + def __init__(self, attributes=None, domain=None, project=None, workflow=None, launch_plan=None, org=None): # noqa: E501 """AdminMatchableAttributesConfiguration - a model defined in Swagger""" # noqa: E501 self._attributes = None @@ -56,6 +58,7 @@ def __init__(self, attributes=None, domain=None, project=None, workflow=None, la self._project = None self._workflow = None self._launch_plan = None + self._org = None self.discriminator = None if attributes is not None: @@ -68,6 +71,8 @@ def __init__(self, attributes=None, domain=None, project=None, workflow=None, la self.workflow = workflow if launch_plan is not None: self.launch_plan = launch_plan + if org is not None: + self.org = org @property def attributes(self): @@ -174,6 +179,29 @@ def launch_plan(self, launch_plan): self._launch_plan = launch_plan + @property + def org(self): + """Gets the org of this AdminMatchableAttributesConfiguration. # noqa: E501 + + Optional, org key applied to the resource. # noqa: E501 + + :return: The org of this AdminMatchableAttributesConfiguration. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminMatchableAttributesConfiguration. + + Optional, org key applied to the resource. # noqa: E501 + + :param org: The org of this AdminMatchableAttributesConfiguration. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_named_entity_identifier.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_named_entity_identifier.py index 8a06241403..a38944ceec 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_named_entity_identifier.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_named_entity_identifier.py @@ -33,21 +33,24 @@ class AdminNamedEntityIdentifier(object): swagger_types = { 'project': 'str', 'domain': 'str', - 'name': 'str' + 'name': 'str', + 'org': 'str' } attribute_map = { 'project': 'project', 'domain': 'domain', - 'name': 'name' + 'name': 'name', + 'org': 'org' } - def __init__(self, project=None, domain=None, name=None): # noqa: E501 + def __init__(self, project=None, domain=None, name=None, org=None): # noqa: E501 """AdminNamedEntityIdentifier - a model defined in Swagger""" # noqa: E501 self._project = None self._domain = None self._name = None + self._org = None self.discriminator = None if project is not None: @@ -56,6 +59,8 @@ def __init__(self, project=None, domain=None, name=None): # noqa: E501 self.domain = domain if name is not None: self.name = name + if org is not None: + self.org = org @property def project(self): @@ -124,6 +129,29 @@ def name(self, name): self._name = name + @property + def org(self): + """Gets the org of this AdminNamedEntityIdentifier. # noqa: E501 + + Optional, org key applied to the resource. # noqa: E501 + + :return: The org of this AdminNamedEntityIdentifier. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminNamedEntityIdentifier. + + Optional, org key applied to the resource. # noqa: E501 + + :param org: The org of this AdminNamedEntityIdentifier. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project.py index 1d64553b8f..7e62124a77 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project.py @@ -40,7 +40,8 @@ class AdminProject(object): 'domains': 'list[AdminDomain]', 'description': 'str', 'labels': 'AdminLabels', - 'state': 'ProjectProjectState' + 'state': 'ProjectProjectState', + 'org': 'str' } attribute_map = { @@ -49,10 +50,11 @@ class AdminProject(object): 'domains': 'domains', 'description': 'description', 'labels': 'labels', - 'state': 'state' + 'state': 'state', + 'org': 'org' } - def __init__(self, id=None, name=None, domains=None, description=None, labels=None, state=None): # noqa: E501 + def __init__(self, id=None, name=None, domains=None, description=None, labels=None, state=None, org=None): # noqa: E501 """AdminProject - a model defined in Swagger""" # noqa: E501 self._id = None @@ -61,6 +63,7 @@ def __init__(self, id=None, name=None, domains=None, description=None, labels=No self._description = None self._labels = None self._state = None + self._org = None self.discriminator = None if id is not None: @@ -75,6 +78,8 @@ def __init__(self, id=None, name=None, domains=None, description=None, labels=No self.labels = labels if state is not None: self.state = state + if org is not None: + self.org = org @property def id(self): @@ -208,6 +213,29 @@ def state(self, state): self._state = state + @property + def org(self): + """Gets the org of this AdminProject. # noqa: E501 + + Optional, org key applied to the resource. # noqa: E501 + + :return: The org of this AdminProject. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminProject. + + Optional, org key applied to the resource. # noqa: E501 + + :param org: The org of this AdminProject. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes.py index a892f96441..c0c913f4fd 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes.py @@ -34,25 +34,30 @@ class AdminProjectAttributes(object): """ swagger_types = { 'project': 'str', - 'matching_attributes': 'AdminMatchingAttributes' + 'matching_attributes': 'AdminMatchingAttributes', + 'org': 'str' } attribute_map = { 'project': 'project', - 'matching_attributes': 'matching_attributes' + 'matching_attributes': 'matching_attributes', + 'org': 'org' } - def __init__(self, project=None, matching_attributes=None): # noqa: E501 + def __init__(self, project=None, matching_attributes=None, org=None): # noqa: E501 """AdminProjectAttributes - a model defined in Swagger""" # noqa: E501 self._project = None self._matching_attributes = None + self._org = None self.discriminator = None if project is not None: self.project = project if matching_attributes is not None: self.matching_attributes = matching_attributes + if org is not None: + self.org = org @property def project(self): @@ -98,6 +103,29 @@ def matching_attributes(self, matching_attributes): self._matching_attributes = matching_attributes + @property + def org(self): + """Gets the org of this AdminProjectAttributes. # noqa: E501 + + Optional, org key applied to the project. # noqa: E501 + + :return: The org of this AdminProjectAttributes. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminProjectAttributes. + + Optional, org key applied to the project. # noqa: E501 + + :param org: The org of this AdminProjectAttributes. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes_delete_request.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes_delete_request.py index 5b41e2c590..439707074f 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes_delete_request.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_attributes_delete_request.py @@ -34,25 +34,30 @@ class AdminProjectAttributesDeleteRequest(object): """ swagger_types = { 'project': 'str', - 'resource_type': 'AdminMatchableResource' + 'resource_type': 'AdminMatchableResource', + 'org': 'str' } attribute_map = { 'project': 'project', - 'resource_type': 'resource_type' + 'resource_type': 'resource_type', + 'org': 'org' } - def __init__(self, project=None, resource_type=None): # noqa: E501 + def __init__(self, project=None, resource_type=None, org=None): # noqa: E501 """AdminProjectAttributesDeleteRequest - a model defined in Swagger""" # noqa: E501 self._project = None self._resource_type = None + self._org = None self.discriminator = None if project is not None: self.project = project if resource_type is not None: self.resource_type = resource_type + if org is not None: + self.org = org @property def project(self): @@ -96,6 +101,29 @@ def resource_type(self, resource_type): self._resource_type = resource_type + @property + def org(self): + """Gets the org of this AdminProjectAttributesDeleteRequest. # noqa: E501 + + Optional, org key applied to the project. # noqa: E501 + + :return: The org of this AdminProjectAttributesDeleteRequest. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminProjectAttributesDeleteRequest. + + Optional, org key applied to the project. # noqa: E501 + + :param org: The org of this AdminProjectAttributesDeleteRequest. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes.py index 0c7d6e4559..2a8abfe98e 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes.py @@ -35,21 +35,24 @@ class AdminProjectDomainAttributes(object): swagger_types = { 'project': 'str', 'domain': 'str', - 'matching_attributes': 'AdminMatchingAttributes' + 'matching_attributes': 'AdminMatchingAttributes', + 'org': 'str' } attribute_map = { 'project': 'project', 'domain': 'domain', - 'matching_attributes': 'matching_attributes' + 'matching_attributes': 'matching_attributes', + 'org': 'org' } - def __init__(self, project=None, domain=None, matching_attributes=None): # noqa: E501 + def __init__(self, project=None, domain=None, matching_attributes=None, org=None): # noqa: E501 """AdminProjectDomainAttributes - a model defined in Swagger""" # noqa: E501 self._project = None self._domain = None self._matching_attributes = None + self._org = None self.discriminator = None if project is not None: @@ -58,6 +61,8 @@ def __init__(self, project=None, domain=None, matching_attributes=None): # noqa self.domain = domain if matching_attributes is not None: self.matching_attributes = matching_attributes + if org is not None: + self.org = org @property def project(self): @@ -126,6 +131,29 @@ def matching_attributes(self, matching_attributes): self._matching_attributes = matching_attributes + @property + def org(self): + """Gets the org of this AdminProjectDomainAttributes. # noqa: E501 + + Optional, org key applied to the attributes. # noqa: E501 + + :return: The org of this AdminProjectDomainAttributes. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminProjectDomainAttributes. + + Optional, org key applied to the attributes. # noqa: E501 + + :param org: The org of this AdminProjectDomainAttributes. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes_delete_request.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes_delete_request.py index aef8bfa14f..bafad3aa40 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes_delete_request.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_project_domain_attributes_delete_request.py @@ -35,21 +35,24 @@ class AdminProjectDomainAttributesDeleteRequest(object): swagger_types = { 'project': 'str', 'domain': 'str', - 'resource_type': 'AdminMatchableResource' + 'resource_type': 'AdminMatchableResource', + 'org': 'str' } attribute_map = { 'project': 'project', 'domain': 'domain', - 'resource_type': 'resource_type' + 'resource_type': 'resource_type', + 'org': 'org' } - def __init__(self, project=None, domain=None, resource_type=None): # noqa: E501 + def __init__(self, project=None, domain=None, resource_type=None, org=None): # noqa: E501 """AdminProjectDomainAttributesDeleteRequest - a model defined in Swagger""" # noqa: E501 self._project = None self._domain = None self._resource_type = None + self._org = None self.discriminator = None if project is not None: @@ -58,6 +61,8 @@ def __init__(self, project=None, domain=None, resource_type=None): # noqa: E501 self.domain = domain if resource_type is not None: self.resource_type = resource_type + if org is not None: + self.org = org @property def project(self): @@ -122,6 +127,29 @@ def resource_type(self, resource_type): self._resource_type = resource_type + @property + def org(self): + """Gets the org of this AdminProjectDomainAttributesDeleteRequest. # noqa: E501 + + Optional, org key applied to the attributes. # noqa: E501 + + :return: The org of this AdminProjectDomainAttributesDeleteRequest. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminProjectDomainAttributesDeleteRequest. + + Optional, org key applied to the attributes. # noqa: E501 + + :param org: The org of this AdminProjectDomainAttributesDeleteRequest. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes.py index e5dc389073..4add43a463 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes.py @@ -36,23 +36,26 @@ class AdminWorkflowAttributes(object): 'project': 'str', 'domain': 'str', 'workflow': 'str', - 'matching_attributes': 'AdminMatchingAttributes' + 'matching_attributes': 'AdminMatchingAttributes', + 'org': 'str' } attribute_map = { 'project': 'project', 'domain': 'domain', 'workflow': 'workflow', - 'matching_attributes': 'matching_attributes' + 'matching_attributes': 'matching_attributes', + 'org': 'org' } - def __init__(self, project=None, domain=None, workflow=None, matching_attributes=None): # noqa: E501 + def __init__(self, project=None, domain=None, workflow=None, matching_attributes=None, org=None): # noqa: E501 """AdminWorkflowAttributes - a model defined in Swagger""" # noqa: E501 self._project = None self._domain = None self._workflow = None self._matching_attributes = None + self._org = None self.discriminator = None if project is not None: @@ -63,6 +66,8 @@ def __init__(self, project=None, domain=None, workflow=None, matching_attributes self.workflow = workflow if matching_attributes is not None: self.matching_attributes = matching_attributes + if org is not None: + self.org = org @property def project(self): @@ -154,6 +159,29 @@ def matching_attributes(self, matching_attributes): self._matching_attributes = matching_attributes + @property + def org(self): + """Gets the org of this AdminWorkflowAttributes. # noqa: E501 + + Optional, org key applied to the attributes. # noqa: E501 + + :return: The org of this AdminWorkflowAttributes. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminWorkflowAttributes. + + Optional, org key applied to the attributes. # noqa: E501 + + :param org: The org of this AdminWorkflowAttributes. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes_delete_request.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes_delete_request.py index 348cd98213..445583cf96 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes_delete_request.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/admin_workflow_attributes_delete_request.py @@ -36,23 +36,26 @@ class AdminWorkflowAttributesDeleteRequest(object): 'project': 'str', 'domain': 'str', 'workflow': 'str', - 'resource_type': 'AdminMatchableResource' + 'resource_type': 'AdminMatchableResource', + 'org': 'str' } attribute_map = { 'project': 'project', 'domain': 'domain', 'workflow': 'workflow', - 'resource_type': 'resource_type' + 'resource_type': 'resource_type', + 'org': 'org' } - def __init__(self, project=None, domain=None, workflow=None, resource_type=None): # noqa: E501 + def __init__(self, project=None, domain=None, workflow=None, resource_type=None, org=None): # noqa: E501 """AdminWorkflowAttributesDeleteRequest - a model defined in Swagger""" # noqa: E501 self._project = None self._domain = None self._workflow = None self._resource_type = None + self._org = None self.discriminator = None if project is not None: @@ -63,6 +66,8 @@ def __init__(self, project=None, domain=None, workflow=None, resource_type=None) self.workflow = workflow if resource_type is not None: self.resource_type = resource_type + if org is not None: + self.org = org @property def project(self): @@ -148,6 +153,29 @@ def resource_type(self, resource_type): self._resource_type = resource_type + @property + def org(self): + """Gets the org of this AdminWorkflowAttributesDeleteRequest. # noqa: E501 + + Optional, org key applied to the attributes. # noqa: E501 + + :return: The org of this AdminWorkflowAttributesDeleteRequest. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this AdminWorkflowAttributesDeleteRequest. + + Optional, org key applied to the attributes. # noqa: E501 + + :param org: The org of this AdminWorkflowAttributesDeleteRequest. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_binding_data.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_binding_data.py index 01bdc123bd..0375fc40bb 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_binding_data.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_binding_data.py @@ -33,20 +33,23 @@ class CoreArtifactBindingData(object): swagger_types = { 'index': 'int', 'partition_key': 'str', + 'bind_to_time_partition': 'bool', 'transform': 'str' } attribute_map = { 'index': 'index', 'partition_key': 'partition_key', + 'bind_to_time_partition': 'bind_to_time_partition', 'transform': 'transform' } - def __init__(self, index=None, partition_key=None, transform=None): # noqa: E501 + def __init__(self, index=None, partition_key=None, bind_to_time_partition=None, transform=None): # noqa: E501 """CoreArtifactBindingData - a model defined in Swagger""" # noqa: E501 self._index = None self._partition_key = None + self._bind_to_time_partition = None self._transform = None self.discriminator = None @@ -54,6 +57,8 @@ def __init__(self, index=None, partition_key=None, transform=None): # noqa: E50 self.index = index if partition_key is not None: self.partition_key = partition_key + if bind_to_time_partition is not None: + self.bind_to_time_partition = bind_to_time_partition if transform is not None: self.transform = transform @@ -99,6 +104,27 @@ def partition_key(self, partition_key): self._partition_key = partition_key + @property + def bind_to_time_partition(self): + """Gets the bind_to_time_partition of this CoreArtifactBindingData. # noqa: E501 + + + :return: The bind_to_time_partition of this CoreArtifactBindingData. # noqa: E501 + :rtype: bool + """ + return self._bind_to_time_partition + + @bind_to_time_partition.setter + def bind_to_time_partition(self, bind_to_time_partition): + """Sets the bind_to_time_partition of this CoreArtifactBindingData. + + + :param bind_to_time_partition: The bind_to_time_partition of this CoreArtifactBindingData. # noqa: E501 + :type: bool + """ + + self._bind_to_time_partition = bind_to_time_partition + @property def transform(self): """Gets the transform of this CoreArtifactBindingData. # noqa: E501 diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_id.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_id.py index 3d643ce5b9..26a7a4f3fb 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_id.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_artifact_id.py @@ -18,6 +18,7 @@ from flyteadmin.models.core_artifact_key import CoreArtifactKey # noqa: F401,E501 from flyteadmin.models.core_partitions import CorePartitions # noqa: F401,E501 +from flyteadmin.models.core_time_partition import CoreTimePartition # noqa: F401,E501 class CoreArtifactID(object): @@ -36,21 +37,24 @@ class CoreArtifactID(object): swagger_types = { 'artifact_key': 'CoreArtifactKey', 'version': 'str', - 'partitions': 'CorePartitions' + 'partitions': 'CorePartitions', + 'time_partition': 'CoreTimePartition' } attribute_map = { 'artifact_key': 'artifact_key', 'version': 'version', - 'partitions': 'partitions' + 'partitions': 'partitions', + 'time_partition': 'time_partition' } - def __init__(self, artifact_key=None, version=None, partitions=None): # noqa: E501 + def __init__(self, artifact_key=None, version=None, partitions=None, time_partition=None): # noqa: E501 """CoreArtifactID - a model defined in Swagger""" # noqa: E501 self._artifact_key = None self._version = None self._partitions = None + self._time_partition = None self.discriminator = None if artifact_key is not None: @@ -59,6 +63,8 @@ def __init__(self, artifact_key=None, version=None, partitions=None): # noqa: E self.version = version if partitions is not None: self.partitions = partitions + if time_partition is not None: + self.time_partition = time_partition @property def artifact_key(self): @@ -106,6 +112,7 @@ def version(self, version): def partitions(self): """Gets the partitions of this CoreArtifactID. # noqa: E501 + Think of a partition as a tag on an Artifact, except it's a key-value pair. Different partitions naturally have different versions (execution ids). # noqa: E501 :return: The partitions of this CoreArtifactID. # noqa: E501 :rtype: CorePartitions @@ -116,6 +123,7 @@ def partitions(self): def partitions(self, partitions): """Sets the partitions of this CoreArtifactID. + Think of a partition as a tag on an Artifact, except it's a key-value pair. Different partitions naturally have different versions (execution ids). # noqa: E501 :param partitions: The partitions of this CoreArtifactID. # noqa: E501 :type: CorePartitions @@ -123,6 +131,29 @@ def partitions(self, partitions): self._partitions = partitions + @property + def time_partition(self): + """Gets the time_partition of this CoreArtifactID. # noqa: E501 + + There is no such thing as an empty time partition - if it's not set, then there is no time partition. # noqa: E501 + + :return: The time_partition of this CoreArtifactID. # noqa: E501 + :rtype: CoreTimePartition + """ + return self._time_partition + + @time_partition.setter + def time_partition(self, time_partition): + """Sets the time_partition of this CoreArtifactID. + + There is no such thing as an empty time partition - if it's not set, then there is no time partition. # noqa: E501 + + :param time_partition: The time_partition of this CoreArtifactID. # noqa: E501 + :type: CoreTimePartition + """ + + self._time_partition = time_partition + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_catalog_cache_status.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_catalog_cache_status.py index 03e6e78ab8..34da1d78af 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_catalog_cache_status.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_catalog_cache_status.py @@ -33,6 +33,7 @@ class CoreCatalogCacheStatus(object): LOOKUP_FAILURE = "CACHE_LOOKUP_FAILURE" PUT_FAILURE = "CACHE_PUT_FAILURE" SKIPPED = "CACHE_SKIPPED" + EVICTED = "CACHE_EVICTED" """ Attributes: diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_identifier.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_identifier.py index ae03fc69e6..1770753b3e 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_identifier.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_identifier.py @@ -37,7 +37,8 @@ class CoreIdentifier(object): 'project': 'str', 'domain': 'str', 'name': 'str', - 'version': 'str' + 'version': 'str', + 'org': 'str' } attribute_map = { @@ -45,10 +46,11 @@ class CoreIdentifier(object): 'project': 'project', 'domain': 'domain', 'name': 'name', - 'version': 'version' + 'version': 'version', + 'org': 'org' } - def __init__(self, resource_type=None, project=None, domain=None, name=None, version=None): # noqa: E501 + def __init__(self, resource_type=None, project=None, domain=None, name=None, version=None, org=None): # noqa: E501 """CoreIdentifier - a model defined in Swagger""" # noqa: E501 self._resource_type = None @@ -56,6 +58,7 @@ def __init__(self, resource_type=None, project=None, domain=None, name=None, ver self._domain = None self._name = None self._version = None + self._org = None self.discriminator = None if resource_type is not None: @@ -68,6 +71,8 @@ def __init__(self, resource_type=None, project=None, domain=None, name=None, ver self.name = name if version is not None: self.version = version + if org is not None: + self.org = org @property def resource_type(self): @@ -184,6 +189,29 @@ def version(self, version): self._version = version + @property + def org(self): + """Gets the org of this CoreIdentifier. # noqa: E501 + + Optional, org key applied to the resource. # noqa: E501 + + :return: The org of this CoreIdentifier. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this CoreIdentifier. + + Optional, org key applied to the resource. # noqa: E501 + + :param org: The org of this CoreIdentifier. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_label_value.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_label_value.py index a15f0fba07..915c128b6e 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_label_value.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_label_value.py @@ -35,26 +35,31 @@ class CoreLabelValue(object): """ swagger_types = { 'static_value': 'str', + 'time_value': 'datetime', 'triggered_binding': 'CoreArtifactBindingData', 'input_binding': 'CoreInputBindingData' } attribute_map = { 'static_value': 'static_value', + 'time_value': 'time_value', 'triggered_binding': 'triggered_binding', 'input_binding': 'input_binding' } - def __init__(self, static_value=None, triggered_binding=None, input_binding=None): # noqa: E501 + def __init__(self, static_value=None, time_value=None, triggered_binding=None, input_binding=None): # noqa: E501 """CoreLabelValue - a model defined in Swagger""" # noqa: E501 self._static_value = None + self._time_value = None self._triggered_binding = None self._input_binding = None self.discriminator = None if static_value is not None: self.static_value = static_value + if time_value is not None: + self.time_value = time_value if triggered_binding is not None: self.triggered_binding = triggered_binding if input_binding is not None: @@ -81,6 +86,27 @@ def static_value(self, static_value): self._static_value = static_value + @property + def time_value(self): + """Gets the time_value of this CoreLabelValue. # noqa: E501 + + + :return: The time_value of this CoreLabelValue. # noqa: E501 + :rtype: datetime + """ + return self._time_value + + @time_value.setter + def time_value(self, time_value): + """Sets the time_value of this CoreLabelValue. + + + :param time_value: The time_value of this CoreLabelValue. # noqa: E501 + :type: datetime + """ + + self._time_value = time_value + @property def triggered_binding(self): """Gets the triggered_binding of this CoreLabelValue. # noqa: E501 diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_time_partition.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_time_partition.py new file mode 100644 index 0000000000..5f8dfa515f --- /dev/null +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_time_partition.py @@ -0,0 +1,117 @@ +# coding: utf-8 + +""" + flyteidl/service/admin.proto + + No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 + + OpenAPI spec version: version not set + + Generated by: https://github.com/swagger-api/swagger-codegen.git +""" + + +import pprint +import re # noqa: F401 + +import six + +from flyteadmin.models.core_label_value import CoreLabelValue # noqa: F401,E501 + + +class CoreTimePartition(object): + """NOTE: This class is auto generated by the swagger code generator program. + + Do not edit the class manually. + """ + + """ + Attributes: + swagger_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + swagger_types = { + 'value': 'CoreLabelValue' + } + + attribute_map = { + 'value': 'value' + } + + def __init__(self, value=None): # noqa: E501 + """CoreTimePartition - a model defined in Swagger""" # noqa: E501 + + self._value = None + self.discriminator = None + + if value is not None: + self.value = value + + @property + def value(self): + """Gets the value of this CoreTimePartition. # noqa: E501 + + + :return: The value of this CoreTimePartition. # noqa: E501 + :rtype: CoreLabelValue + """ + return self._value + + @value.setter + def value(self, value): + """Sets the value of this CoreTimePartition. + + + :param value: The value of this CoreTimePartition. # noqa: E501 + :type: CoreLabelValue + """ + + self._value = value + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.swagger_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + if issubclass(CoreTimePartition, dict): + for key, value in self.items(): + result[key] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, CoreTimePartition): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_workflow_execution_identifier.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_workflow_execution_identifier.py index 09bd015a36..aad31c1687 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_workflow_execution_identifier.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/flyteadmin/models/core_workflow_execution_identifier.py @@ -33,21 +33,24 @@ class CoreWorkflowExecutionIdentifier(object): swagger_types = { 'project': 'str', 'domain': 'str', - 'name': 'str' + 'name': 'str', + 'org': 'str' } attribute_map = { 'project': 'project', 'domain': 'domain', - 'name': 'name' + 'name': 'name', + 'org': 'org' } - def __init__(self, project=None, domain=None, name=None): # noqa: E501 + def __init__(self, project=None, domain=None, name=None, org=None): # noqa: E501 """CoreWorkflowExecutionIdentifier - a model defined in Swagger""" # noqa: E501 self._project = None self._domain = None self._name = None + self._org = None self.discriminator = None if project is not None: @@ -56,6 +59,8 @@ def __init__(self, project=None, domain=None, name=None): # noqa: E501 self.domain = domain if name is not None: self.name = name + if org is not None: + self.org = org @property def project(self): @@ -126,6 +131,29 @@ def name(self, name): self._name = name + @property + def org(self): + """Gets the org of this CoreWorkflowExecutionIdentifier. # noqa: E501 + + Optional, org key applied to the resource. # noqa: E501 + + :return: The org of this CoreWorkflowExecutionIdentifier. # noqa: E501 + :rtype: str + """ + return self._org + + @org.setter + def org(self, org): + """Sets the org of this CoreWorkflowExecutionIdentifier. + + Optional, org key applied to the resource. # noqa: E501 + + :param org: The org of this CoreWorkflowExecutionIdentifier. # noqa: E501 + :type: str + """ + + self._org = org + def to_dict(self): """Returns the model properties as a dict""" result = {} diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_dynamic_node_workflow_response.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_dynamic_node_workflow_response.py new file mode 100644 index 0000000000..c6b3cb0860 --- /dev/null +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_dynamic_node_workflow_response.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +""" + flyteidl/service/admin.proto + + No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 + + OpenAPI spec version: version not set + + Generated by: https://github.com/swagger-api/swagger-codegen.git +""" + + +from __future__ import absolute_import + +import unittest + +import flyteadmin +from flyteadmin.models.admin_dynamic_node_workflow_response import AdminDynamicNodeWorkflowResponse # noqa: E501 +from flyteadmin.rest import ApiException + + +class TestAdminDynamicNodeWorkflowResponse(unittest.TestCase): + """AdminDynamicNodeWorkflowResponse unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testAdminDynamicNodeWorkflowResponse(self): + """Test AdminDynamicNodeWorkflowResponse""" + # FIXME: construct object with mandatory attributes with example values + # model = flyteadmin.models.admin_dynamic_node_workflow_response.AdminDynamicNodeWorkflowResponse() # noqa: E501 + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_service_api.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_service_api.py index 680b2cd5f5..03e412d036 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_service_api.py +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_admin_service_api.py @@ -36,6 +36,13 @@ def test_create_execution(self): """ pass + def test_create_execution2(self): + """Test case for create_execution2 + + Triggers the creation of a :ref:`ref_flyteidl.admin.Execution` # noqa: E501 + """ + pass + def test_create_launch_plan(self): """Test case for create_launch_plan @@ -43,6 +50,13 @@ def test_create_launch_plan(self): """ pass + def test_create_launch_plan2(self): + """Test case for create_launch_plan2 + + Create and upload a :ref:`ref_flyteidl.admin.LaunchPlan` definition # noqa: E501 + """ + pass + def test_create_node_event(self): """Test case for create_node_event @@ -50,6 +64,13 @@ def test_create_node_event(self): """ pass + def test_create_node_event2(self): + """Test case for create_node_event2 + + Indicates a :ref:`ref_flyteidl.event.NodeExecutionEvent` has occurred. # noqa: E501 + """ + pass + def test_create_task(self): """Test case for create_task @@ -57,6 +78,13 @@ def test_create_task(self): """ pass + def test_create_task2(self): + """Test case for create_task2 + + Create and upload a :ref:`ref_flyteidl.admin.Task` definition # noqa: E501 + """ + pass + def test_create_task_event(self): """Test case for create_task_event @@ -64,6 +92,13 @@ def test_create_task_event(self): """ pass + def test_create_task_event2(self): + """Test case for create_task_event2 + + Indicates a :ref:`ref_flyteidl.event.TaskExecutionEvent` has occurred. # noqa: E501 + """ + pass + def test_create_workflow(self): """Test case for create_workflow @@ -71,6 +106,13 @@ def test_create_workflow(self): """ pass + def test_create_workflow2(self): + """Test case for create_workflow2 + + Create and upload a :ref:`ref_flyteidl.admin.Workflow` definition # noqa: E501 + """ + pass + def test_create_workflow_event(self): """Test case for create_workflow_event @@ -78,6 +120,13 @@ def test_create_workflow_event(self): """ pass + def test_create_workflow_event2(self): + """Test case for create_workflow_event2 + + Indicates a :ref:`ref_flyteidl.event.WorkflowExecutionEvent` has occurred. # noqa: E501 + """ + pass + def test_delete_project_attributes(self): """Test case for delete_project_attributes @@ -85,6 +134,13 @@ def test_delete_project_attributes(self): """ pass + def test_delete_project_attributes2(self): + """Test case for delete_project_attributes2 + + Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + """ + pass + def test_delete_project_domain_attributes(self): """Test case for delete_project_domain_attributes @@ -92,6 +148,13 @@ def test_delete_project_domain_attributes(self): """ pass + def test_delete_project_domain_attributes2(self): + """Test case for delete_project_domain_attributes2 + + Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + """ + pass + def test_delete_workflow_attributes(self): """Test case for delete_workflow_attributes @@ -99,6 +162,13 @@ def test_delete_workflow_attributes(self): """ pass + def test_delete_workflow_attributes2(self): + """Test case for delete_workflow_attributes2 + + Deletes custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + """ + pass + def test_get_active_launch_plan(self): """Test case for get_active_launch_plan @@ -106,6 +176,13 @@ def test_get_active_launch_plan(self): """ pass + def test_get_active_launch_plan2(self): + """Test case for get_active_launch_plan2 + + Fetch the active version of a :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + """ + pass + def test_get_description_entity(self): """Test case for get_description_entity @@ -113,6 +190,27 @@ def test_get_description_entity(self): """ pass + def test_get_description_entity2(self): + """Test case for get_description_entity2 + + Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. # noqa: E501 + """ + pass + + def test_get_dynamic_node_workflow(self): + """Test case for get_dynamic_node_workflow + + Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. # noqa: E501 + """ + pass + + def test_get_dynamic_node_workflow2(self): + """Test case for get_dynamic_node_workflow2 + + Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. # noqa: E501 + """ + pass + def test_get_execution(self): """Test case for get_execution @@ -120,6 +218,13 @@ def test_get_execution(self): """ pass + def test_get_execution2(self): + """Test case for get_execution2 + + Fetches a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + """ + pass + def test_get_execution_data(self): """Test case for get_execution_data @@ -127,6 +232,13 @@ def test_get_execution_data(self): """ pass + def test_get_execution_data2(self): + """Test case for get_execution_data2 + + Fetches input and output data for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + """ + pass + def test_get_execution_metrics(self): """Test case for get_execution_metrics @@ -134,6 +246,13 @@ def test_get_execution_metrics(self): """ pass + def test_get_execution_metrics2(self): + """Test case for get_execution_metrics2 + + Fetches runtime metrics for a :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + """ + pass + def test_get_launch_plan(self): """Test case for get_launch_plan @@ -141,6 +260,13 @@ def test_get_launch_plan(self): """ pass + def test_get_launch_plan2(self): + """Test case for get_launch_plan2 + + Fetch a :ref:`ref_flyteidl.admin.LaunchPlan` definition. # noqa: E501 + """ + pass + def test_get_named_entity(self): """Test case for get_named_entity @@ -148,6 +274,13 @@ def test_get_named_entity(self): """ pass + def test_get_named_entity2(self): + """Test case for get_named_entity2 + + Returns a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + """ + pass + def test_get_node_execution(self): """Test case for get_node_execution @@ -155,6 +288,13 @@ def test_get_node_execution(self): """ pass + def test_get_node_execution2(self): + """Test case for get_node_execution2 + + Fetches a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + """ + pass + def test_get_node_execution_data(self): """Test case for get_node_execution_data @@ -162,6 +302,13 @@ def test_get_node_execution_data(self): """ pass + def test_get_node_execution_data2(self): + """Test case for get_node_execution_data2 + + Fetches input and output data for a :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + """ + pass + def test_get_project_attributes(self): """Test case for get_project_attributes @@ -169,6 +316,13 @@ def test_get_project_attributes(self): """ pass + def test_get_project_attributes2(self): + """Test case for get_project_attributes2 + + Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + """ + pass + def test_get_project_domain_attributes(self): """Test case for get_project_domain_attributes @@ -176,6 +330,13 @@ def test_get_project_domain_attributes(self): """ pass + def test_get_project_domain_attributes2(self): + """Test case for get_project_domain_attributes2 + + Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + """ + pass + def test_get_task(self): """Test case for get_task @@ -183,6 +344,13 @@ def test_get_task(self): """ pass + def test_get_task2(self): + """Test case for get_task2 + + Fetch a :ref:`ref_flyteidl.admin.Task` definition. # noqa: E501 + """ + pass + def test_get_task_execution(self): """Test case for get_task_execution @@ -190,6 +358,13 @@ def test_get_task_execution(self): """ pass + def test_get_task_execution2(self): + """Test case for get_task_execution2 + + Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + """ + pass + def test_get_task_execution_data(self): """Test case for get_task_execution_data @@ -197,6 +372,13 @@ def test_get_task_execution_data(self): """ pass + def test_get_task_execution_data2(self): + """Test case for get_task_execution_data2 + + Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + """ + pass + def test_get_version(self): """Test case for get_version @@ -210,6 +392,13 @@ def test_get_workflow(self): """ pass + def test_get_workflow2(self): + """Test case for get_workflow2 + + Fetch a :ref:`ref_flyteidl.admin.Workflow` definition. # noqa: E501 + """ + pass + def test_get_workflow_attributes(self): """Test case for get_workflow_attributes @@ -217,6 +406,13 @@ def test_get_workflow_attributes(self): """ pass + def test_get_workflow_attributes2(self): + """Test case for get_workflow_attributes2 + + Fetches custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + """ + pass + def test_list_active_launch_plans(self): """Test case for list_active_launch_plans @@ -224,6 +420,13 @@ def test_list_active_launch_plans(self): """ pass + def test_list_active_launch_plans2(self): + """Test case for list_active_launch_plans2 + + List active versions of :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + """ + pass + def test_list_description_entities(self): """Test case for list_description_entities @@ -238,6 +441,20 @@ def test_list_description_entities2(self): """ pass + def test_list_description_entities3(self): + """Test case for list_description_entities3 + + Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + """ + pass + + def test_list_description_entities4(self): + """Test case for list_description_entities4 + + Fetch a list of :ref:`ref_flyteidl.admin.DescriptionEntity` definitions. # noqa: E501 + """ + pass + def test_list_executions(self): """Test case for list_executions @@ -245,6 +462,13 @@ def test_list_executions(self): """ pass + def test_list_executions2(self): + """Test case for list_executions2 + + Fetch a list of :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + """ + pass + def test_list_launch_plan_ids(self): """Test case for list_launch_plan_ids @@ -252,6 +476,13 @@ def test_list_launch_plan_ids(self): """ pass + def test_list_launch_plan_ids2(self): + """Test case for list_launch_plan_ids2 + + Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of launch plan objects. # noqa: E501 + """ + pass + def test_list_launch_plans(self): """Test case for list_launch_plans @@ -266,6 +497,20 @@ def test_list_launch_plans2(self): """ pass + def test_list_launch_plans3(self): + """Test case for list_launch_plans3 + + Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + """ + pass + + def test_list_launch_plans4(self): + """Test case for list_launch_plans4 + + Fetch a list of :ref:`ref_flyteidl.admin.LaunchPlan` definitions. # noqa: E501 + """ + pass + def test_list_matchable_attributes(self): """Test case for list_matchable_attributes @@ -273,6 +518,13 @@ def test_list_matchable_attributes(self): """ pass + def test_list_matchable_attributes2(self): + """Test case for list_matchable_attributes2 + + Lists custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a specific resource type. # noqa: E501 + """ + pass + def test_list_named_entities(self): """Test case for list_named_entities @@ -280,6 +532,13 @@ def test_list_named_entities(self): """ pass + def test_list_named_entities2(self): + """Test case for list_named_entities2 + + Returns a list of :ref:`ref_flyteidl.admin.NamedEntity` objects. # noqa: E501 + """ + pass + def test_list_node_executions(self): """Test case for list_node_executions @@ -287,6 +546,13 @@ def test_list_node_executions(self): """ pass + def test_list_node_executions2(self): + """Test case for list_node_executions2 + + Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. # noqa: E501 + """ + pass + def test_list_node_executions_for_task(self): """Test case for list_node_executions_for_task @@ -294,6 +560,13 @@ def test_list_node_executions_for_task(self): """ pass + def test_list_node_executions_for_task2(self): + """Test case for list_node_executions_for_task2 + + Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution` launched by the reference :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + """ + pass + def test_list_projects(self): """Test case for list_projects @@ -301,6 +574,13 @@ def test_list_projects(self): """ pass + def test_list_projects2(self): + """Test case for list_projects2 + + Fetches a list of :ref:`ref_flyteidl.admin.Project` # noqa: E501 + """ + pass + def test_list_task_executions(self): """Test case for list_task_executions @@ -308,6 +588,13 @@ def test_list_task_executions(self): """ pass + def test_list_task_executions2(self): + """Test case for list_task_executions2 + + Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. # noqa: E501 + """ + pass + def test_list_task_ids(self): """Test case for list_task_ids @@ -315,6 +602,13 @@ def test_list_task_ids(self): """ pass + def test_list_task_ids2(self): + """Test case for list_task_ids2 + + Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. # noqa: E501 + """ + pass + def test_list_tasks(self): """Test case for list_tasks @@ -329,6 +623,20 @@ def test_list_tasks2(self): """ pass + def test_list_tasks3(self): + """Test case for list_tasks3 + + Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + """ + pass + + def test_list_tasks4(self): + """Test case for list_tasks4 + + Fetch a list of :ref:`ref_flyteidl.admin.Task` definitions. # noqa: E501 + """ + pass + def test_list_workflow_ids(self): """Test case for list_workflow_ids @@ -336,6 +644,13 @@ def test_list_workflow_ids(self): """ pass + def test_list_workflow_ids2(self): + """Test case for list_workflow_ids2 + + Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of workflow objects. # noqa: E501 + """ + pass + def test_list_workflows(self): """Test case for list_workflows @@ -350,6 +665,20 @@ def test_list_workflows2(self): """ pass + def test_list_workflows3(self): + """Test case for list_workflows3 + + Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + """ + pass + + def test_list_workflows4(self): + """Test case for list_workflows4 + + Fetch a list of :ref:`ref_flyteidl.admin.Workflow` definitions. # noqa: E501 + """ + pass + def test_recover_execution(self): """Test case for recover_execution @@ -357,6 +686,13 @@ def test_recover_execution(self): """ pass + def test_recover_execution2(self): + """Test case for recover_execution2 + + Recreates a previously-run workflow execution that will only start executing from the last known failure point. In Recover mode, users cannot change any input parameters or update the version of the execution. This is extremely useful to recover from system errors and byzantine faults like - Loss of K8s cluster, bugs in platform or instability, machine failures, downstream system failures (downstream services), or simply to recover executions that failed because of retry exhaustion and should complete if tried again. See :ref:`ref_flyteidl.admin.ExecutionRecoverRequest` for more details. # noqa: E501 + """ + pass + def test_register_project(self): """Test case for register_project @@ -364,6 +700,13 @@ def test_register_project(self): """ pass + def test_register_project2(self): + """Test case for register_project2 + + Registers a :ref:`ref_flyteidl.admin.Project` with the Flyte deployment. # noqa: E501 + """ + pass + def test_relaunch_execution(self): """Test case for relaunch_execution @@ -371,6 +714,13 @@ def test_relaunch_execution(self): """ pass + def test_relaunch_execution2(self): + """Test case for relaunch_execution2 + + Triggers the creation of an identical :ref:`ref_flyteidl.admin.Execution` # noqa: E501 + """ + pass + def test_terminate_execution(self): """Test case for terminate_execution @@ -378,6 +728,13 @@ def test_terminate_execution(self): """ pass + def test_terminate_execution2(self): + """Test case for terminate_execution2 + + Terminates an in-progress :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + """ + pass + def test_update_execution(self): """Test case for update_execution @@ -385,6 +742,13 @@ def test_update_execution(self): """ pass + def test_update_execution2(self): + """Test case for update_execution2 + + Update execution belonging to project domain :ref:`ref_flyteidl.admin.Execution`. # noqa: E501 + """ + pass + def test_update_launch_plan(self): """Test case for update_launch_plan @@ -392,6 +756,13 @@ def test_update_launch_plan(self): """ pass + def test_update_launch_plan2(self): + """Test case for update_launch_plan2 + + Updates the status of a registered :ref:`ref_flyteidl.admin.LaunchPlan`. # noqa: E501 + """ + pass + def test_update_named_entity(self): """Test case for update_named_entity @@ -399,10 +770,24 @@ def test_update_named_entity(self): """ pass + def test_update_named_entity2(self): + """Test case for update_named_entity2 + + Updates a :ref:`ref_flyteidl.admin.NamedEntity` object. # noqa: E501 + """ + pass + def test_update_project(self): """Test case for update_project - Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 + Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 + """ + pass + + def test_update_project2(self): + """Test case for update_project2 + + Updates an existing :ref:`ref_flyteidl.admin.Project` flyteidl.admin.Project should be passed but the domains property should be empty; it will be ignored in the handler as domains cannot be updated via this API. # noqa: E501 """ pass @@ -413,6 +798,13 @@ def test_update_project_attributes(self): """ pass + def test_update_project_attributes2(self): + """Test case for update_project_attributes2 + + Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` at the project level # noqa: E501 + """ + pass + def test_update_project_domain_attributes(self): """Test case for update_project_domain_attributes @@ -420,6 +812,13 @@ def test_update_project_domain_attributes(self): """ pass + def test_update_project_domain_attributes2(self): + """Test case for update_project_domain_attributes2 + + Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project and domain. # noqa: E501 + """ + pass + def test_update_workflow_attributes(self): """Test case for update_workflow_attributes @@ -427,6 +826,13 @@ def test_update_workflow_attributes(self): """ pass + def test_update_workflow_attributes2(self): + """Test case for update_workflow_attributes2 + + Creates or updates custom :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for a project, domain and workflow. # noqa: E501 + """ + pass + if __name__ == '__main__': unittest.main() diff --git a/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_core_time_partition.py b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_core_time_partition.py new file mode 100644 index 0000000000..11a3aba894 --- /dev/null +++ b/flyteidl/gen/pb_python/flyteidl/service/flyteadmin/test/test_core_time_partition.py @@ -0,0 +1,40 @@ +# coding: utf-8 + +""" + flyteidl/service/admin.proto + + No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 + + OpenAPI spec version: version not set + + Generated by: https://github.com/swagger-api/swagger-codegen.git +""" + + +from __future__ import absolute_import + +import unittest + +import flyteadmin +from flyteadmin.models.core_time_partition import CoreTimePartition # noqa: E501 +from flyteadmin.rest import ApiException + + +class TestCoreTimePartition(unittest.TestCase): + """CoreTimePartition unit test stubs""" + + def setUp(self): + pass + + def tearDown(self): + pass + + def testCoreTimePartition(self): + """Test CoreTimePartition""" + # FIXME: construct object with mandatory attributes with example values + # model = flyteadmin.models.core_time_partition.CoreTimePartition() # noqa: E501 + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/flyteidl/gen/pb_python/flyteidl/service/signal_pb2.py b/flyteidl/gen/pb_python/flyteidl/service/signal_pb2.py index 20e9fe2071..de5abc2b16 100644 --- a/flyteidl/gen/pb_python/flyteidl/service/signal_pb2.py +++ b/flyteidl/gen/pb_python/flyteidl/service/signal_pb2.py @@ -15,7 +15,7 @@ from flyteidl.admin import signal_pb2 as flyteidl_dot_admin_dot_signal__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66lyteidl/service/signal.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a\x1b\x66lyteidl/admin/signal.proto2\x9a\x03\n\rSignalService\x12W\n\x11GetOrCreateSignal\x12(.flyteidl.admin.SignalGetOrCreateRequest\x1a\x16.flyteidl.admin.Signal\"\x00\x12\xc1\x01\n\x0bListSignals\x12!.flyteidl.admin.SignalListRequest\x1a\x1a.flyteidl.admin.SignalList\"s\x82\xd3\xe4\x93\x02m\x12k/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\x12l\n\tSetSignal\x12 .flyteidl.admin.SignalSetRequest\x1a!.flyteidl.admin.SignalSetResponse\"\x1a\x82\xd3\xe4\x93\x02\x14:\x01*\"\x0f/api/v1/signalsB\xc3\x01\n\x14\x63om.flyteidl.serviceB\x0bSignalProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1d\x66lyteidl/service/signal.proto\x12\x10\x66lyteidl.service\x1a\x1cgoogle/api/annotations.proto\x1a\x1b\x66lyteidl/admin/signal.proto2\xde\x04\n\rSignalService\x12W\n\x11GetOrCreateSignal\x12(.flyteidl.admin.SignalGetOrCreateRequest\x1a\x16.flyteidl.admin.Signal\"\x00\x12\xd4\x02\n\x0bListSignals\x12!.flyteidl.admin.SignalListRequest\x1a\x1a.flyteidl.admin.SignalList\"\x85\x02\x82\xd3\xe4\x93\x02\xfe\x01Z\x8e\x01\x12\x8b\x01/api/v1/signals/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\x12k/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\x12\x9c\x01\n\tSetSignal\x12 .flyteidl.admin.SignalSetRequest\x1a!.flyteidl.admin.SignalSetResponse\"J\x82\xd3\xe4\x93\x02\x44:\x01*Z.:\x01*\")/api/v1/signals/org/{id.execution_id.org}\"\x0f/api/v1/signalsB\xc3\x01\n\x14\x63om.flyteidl.serviceB\x0bSignalProtoP\x01Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\xa2\x02\x03\x46SX\xaa\x02\x10\x46lyteidl.Service\xca\x02\x10\x46lyteidl\\Service\xe2\x02\x1c\x46lyteidl\\Service\\GPBMetadata\xea\x02\x11\x46lyteidl::Serviceb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,9 +25,9 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'\n\024com.flyteidl.serviceB\013SignalProtoP\001Z=github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service\242\002\003FSX\252\002\020Flyteidl.Service\312\002\020Flyteidl\\Service\342\002\034Flyteidl\\Service\\GPBMetadata\352\002\021Flyteidl::Service' _SIGNALSERVICE.methods_by_name['ListSignals']._options = None - _SIGNALSERVICE.methods_by_name['ListSignals']._serialized_options = b'\202\323\344\223\002m\022k/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}' + _SIGNALSERVICE.methods_by_name['ListSignals']._serialized_options = b'\202\323\344\223\002\376\001Z\216\001\022\213\001/api/v1/signals/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}\022k/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}' _SIGNALSERVICE.methods_by_name['SetSignal']._options = None - _SIGNALSERVICE.methods_by_name['SetSignal']._serialized_options = b'\202\323\344\223\002\024:\001*\"\017/api/v1/signals' + _SIGNALSERVICE.methods_by_name['SetSignal']._serialized_options = b'\202\323\344\223\002D:\001*Z.:\001*\")/api/v1/signals/org/{id.execution_id.org}\"\017/api/v1/signals' _globals['_SIGNALSERVICE']._serialized_start=111 - _globals['_SIGNALSERVICE']._serialized_end=521 + _globals['_SIGNALSERVICE']._serialized_end=717 # @@protoc_insertion_point(module_scope) diff --git a/flyteidl/gen/pb_rust/datacatalog.rs b/flyteidl/gen/pb_rust/datacatalog.rs index 8c8a79dcc7..ac2c695cab 100644 --- a/flyteidl/gen/pb_rust/datacatalog.rs +++ b/flyteidl/gen/pb_rust/datacatalog.rs @@ -290,6 +290,9 @@ pub struct DatasetId { /// UUID for the dataset (if set the above fields are optional) #[prost(string, tag="5")] pub uuid: ::prost::alloc::string::String, + /// Optional, org key applied to the resource. + #[prost(string, tag="6")] + pub org: ::prost::alloc::string::String, } /// /// Artifact message. It is composed of several string fields. @@ -469,7 +472,7 @@ pub struct KeyValuePair { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DatasetPropertyFilter { - #[prost(oneof="dataset_property_filter::Property", tags="1, 2, 3, 4")] + #[prost(oneof="dataset_property_filter::Property", tags="1, 2, 3, 4, 5")] pub property: ::core::option::Option, } /// Nested message and enum types in `DatasetPropertyFilter`. @@ -485,6 +488,9 @@ pub mod dataset_property_filter { Domain(::prost::alloc::string::String), #[prost(string, tag="4")] Version(::prost::alloc::string::String), + /// Optional, org key applied to the dataset. + #[prost(string, tag="5")] + Org(::prost::alloc::string::String), } } /// Pagination options for making list requests diff --git a/flyteidl/gen/pb_rust/flyteidl.admin.rs b/flyteidl/gen/pb_rust/flyteidl.admin.rs index dc1b065bfa..4f61389a42 100644 --- a/flyteidl/gen/pb_rust/flyteidl.admin.rs +++ b/flyteidl/gen/pb_rust/flyteidl.admin.rs @@ -87,7 +87,8 @@ pub struct GetTaskResponse { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Resource { - /// The state of the execution is used to control its visibility in the UI/CLI. + /// DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI. + #[deprecated] #[prost(enumeration="State", tag="1")] pub state: i32, /// The outputs of the execution. It's typically used by sql task. Agent service will create a @@ -101,6 +102,9 @@ pub struct Resource { /// log information for the task execution. #[prost(message, repeated, tag="4")] pub log_links: ::prost::alloc::vec::Vec, + /// The phase of the execution is used to determine the phase of the plugin's execution. + #[prost(enumeration="super::core::task_execution::Phase", tag="5")] + pub phase: i32, } /// A message used to delete a task. #[allow(clippy::derive_partial_eq_without_eq)] @@ -156,6 +160,68 @@ pub struct ListAgentsResponse { #[prost(message, repeated, tag="1")] pub agents: ::prost::alloc::vec::Vec, } +/// A request to get the metrics from a task execution. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetTaskMetricsRequest { + /// A predefined yet extensible Task type identifier. + #[prost(string, tag="1")] + pub task_type: ::prost::alloc::string::String, + /// Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + #[prost(bytes="vec", tag="2")] + pub resource_meta: ::prost::alloc::vec::Vec, + /// The metrics to query. If empty, will return a default set of metrics. + /// e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG + #[prost(string, repeated, tag="3")] + pub queries: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// Start timestamp, inclusive. + #[prost(message, optional, tag="4")] + pub start_time: ::core::option::Option<::prost_types::Timestamp>, + /// End timestamp, inclusive.. + #[prost(message, optional, tag="5")] + pub end_time: ::core::option::Option<::prost_types::Timestamp>, + /// Query resolution step width in duration format or float number of seconds. + #[prost(message, optional, tag="6")] + pub step: ::core::option::Option<::prost_types::Duration>, +} +/// A response containing a list of metrics for a task execution. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetTaskMetricsResponse { + /// The execution metric results. + #[prost(message, repeated, tag="1")] + pub results: ::prost::alloc::vec::Vec, +} +/// A request to get the log from a task execution. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetTaskLogsRequest { + /// A predefined yet extensible Task type identifier. + #[prost(string, tag="1")] + pub task_type: ::prost::alloc::string::String, + /// Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + #[prost(bytes="vec", tag="2")] + pub resource_meta: ::prost::alloc::vec::Vec, + /// Number of lines to return. + #[prost(uint64, tag="3")] + pub lines: u64, + /// In the case of multiple pages of results, the server-provided token can be used to fetch the next page + /// in a query. If there are no more results, this value will be empty. + #[prost(string, tag="4")] + pub token: ::prost::alloc::string::String, +} +/// A response containing the logs for a task execution. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetTaskLogsResponse { + /// The execution log results. + #[prost(string, repeated, tag="1")] + pub results: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// In the case of multiple pages of results, the server-provided token can be used to fetch the next page + /// in a query. If there are no more results, this value will be empty. + #[prost(string, tag="2")] + pub token: ::prost::alloc::string::String, +} /// The state of the execution is used to control its visibility in the UI/CLI. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] @@ -218,6 +284,9 @@ pub struct NamedEntityIdentifier { /// +optional - in certain contexts - like 'List API', 'Launch plans' #[prost(string, tag="3")] pub name: ::prost::alloc::string::String, + /// Optional, org key applied to the resource. + #[prost(string, tag="4")] + pub org: ::prost::alloc::string::String, } /// Additional metadata around a named entity. #[allow(clippy::derive_partial_eq_without_eq)] @@ -319,6 +388,9 @@ pub struct NamedEntityIdentifierListRequest { /// +optional #[prost(string, tag="6")] pub filters: ::prost::alloc::string::String, + /// Optional, org key applied to the resource. + #[prost(string, tag="7")] + pub org: ::prost::alloc::string::String, } /// Represents a request structure to list NamedEntity objects #[allow(clippy::derive_partial_eq_without_eq)] @@ -351,6 +423,9 @@ pub struct NamedEntityListRequest { /// +optional #[prost(string, tag="7")] pub filters: ::prost::alloc::string::String, + /// Optional, org key applied to the resource. + #[prost(string, tag="8")] + pub org: ::prost::alloc::string::String, } /// Represents a list of NamedEntityIdentifiers. #[allow(clippy::derive_partial_eq_without_eq)] @@ -865,6 +940,9 @@ pub struct ExecutionCreateRequest { /// +optional #[prost(message, optional, tag="5")] pub inputs: ::core::option::Option, + /// Optional, org key applied to the resource. + #[prost(string, tag="6")] + pub org: ::prost::alloc::string::String, } /// Request to relaunch the referenced execution. #[allow(clippy::derive_partial_eq_without_eq)] @@ -1617,7 +1695,7 @@ pub struct ActiveLaunchPlanRequest { #[prost(message, optional, tag="1")] pub id: ::core::option::Option, } -/// Represents a request structure to list active launch plans within a project/domain. +/// Represents a request structure to list active launch plans within a project/domain and optional org. /// See :ref:`ref_flyteidl.admin.LaunchPlan` for more details #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -1643,6 +1721,9 @@ pub struct ActiveLaunchPlanListRequest { /// +optional #[prost(message, optional, tag="5")] pub sort_by: ::core::option::Option, + /// Optional, org key applied to the resource. + #[prost(string, tag="6")] + pub org: ::prost::alloc::string::String, } /// By default any launch plan regardless of state can be used to launch a workflow execution. /// However, at most one version of a launch plan @@ -1839,8 +1920,8 @@ pub mod matching_attributes { ClusterAssignment(super::ClusterAssignment), } } -/// Represents a custom set of attributes applied for either a domain; a domain and project; or -/// domain, project and workflow name. +/// Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org); +/// or domain, project and workflow name (and optional org). /// These are used to override system level defaults for kubernetes cluster resource management, /// default execution values, and more all across different levels of specificity. #[allow(clippy::derive_partial_eq_without_eq)] @@ -1856,6 +1937,9 @@ pub struct MatchableAttributesConfiguration { pub workflow: ::prost::alloc::string::String, #[prost(string, tag="5")] pub launch_plan: ::prost::alloc::string::String, + /// Optional, org key applied to the resource. + #[prost(string, tag="6")] + pub org: ::prost::alloc::string::String, } /// Request all matching resource attributes for a resource type. /// See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details @@ -1865,6 +1949,9 @@ pub struct ListMatchableAttributesRequest { /// +required #[prost(enumeration="MatchableResource", tag="1")] pub resource_type: i32, + /// Optional, org filter applied to list project requests. + #[prost(string, tag="2")] + pub org: ::prost::alloc::string::String, } /// Response for a request for all matching resource attributes for a resource type. /// See :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` for more details @@ -2193,6 +2280,18 @@ pub struct NodeExecutionGetDataResponse { #[prost(message, optional, tag="17")] pub flyte_urls: ::core::option::Option, } +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetDynamicNodeWorkflowRequest { + #[prost(message, optional, tag="1")] + pub id: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DynamicNodeWorkflowResponse { + #[prost(message, optional, tag="1")] + pub compiled_workflow: ::core::option::Option, +} /// Represents the Email object that is sent to a publisher/subscriber /// to forward the notification. /// Note: This is internal to Admin and doesn't need to be exposed to other components. @@ -2249,6 +2348,9 @@ pub struct Project { pub labels: ::core::option::Option, #[prost(enumeration="project::ProjectState", tag="6")] pub state: i32, + /// Optional, org key applied to the resource. + #[prost(string, tag="7")] + pub org: ::prost::alloc::string::String, } /// Nested message and enum types in `Project`. pub mod project { @@ -2321,6 +2423,9 @@ pub struct ProjectListRequest { /// +optional #[prost(message, optional, tag="4")] pub sort_by: ::core::option::Option, + /// Optional, org filter applied to list project requests. + #[prost(string, tag="5")] + pub org: ::prost::alloc::string::String, } /// Adds a new user-project within the Flyte deployment. /// See :ref:`ref_flyteidl.admin.Project` for more details @@ -2351,6 +2456,9 @@ pub struct ProjectAttributes { pub project: ::prost::alloc::string::String, #[prost(message, optional, tag="2")] pub matching_attributes: ::core::option::Option, + /// Optional, org key applied to the project. + #[prost(string, tag="3")] + pub org: ::prost::alloc::string::String, } /// Sets custom attributes for a project /// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` @@ -2379,6 +2487,9 @@ pub struct ProjectAttributesGetRequest { /// +required #[prost(enumeration="MatchableResource", tag="2")] pub resource_type: i32, + /// Optional, org key applied to the project. + #[prost(string, tag="3")] + pub org: ::prost::alloc::string::String, } /// Response to get an individual project level attribute override. /// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` @@ -2401,6 +2512,9 @@ pub struct ProjectAttributesDeleteRequest { /// +required #[prost(enumeration="MatchableResource", tag="2")] pub resource_type: i32, + /// Optional, org key applied to the project. + #[prost(string, tag="3")] + pub org: ::prost::alloc::string::String, } /// Purposefully empty, may be populated in the future. #[allow(clippy::derive_partial_eq_without_eq)] @@ -2420,6 +2534,9 @@ pub struct ProjectDomainAttributes { pub domain: ::prost::alloc::string::String, #[prost(message, optional, tag="3")] pub matching_attributes: ::core::option::Option, + /// Optional, org key applied to the attributes. + #[prost(string, tag="4")] + pub org: ::prost::alloc::string::String, } /// Sets custom attributes for a project-domain combination. /// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` @@ -2452,6 +2569,9 @@ pub struct ProjectDomainAttributesGetRequest { /// +required #[prost(enumeration="MatchableResource", tag="3")] pub resource_type: i32, + /// Optional, org key applied to the attributes. + #[prost(string, tag="4")] + pub org: ::prost::alloc::string::String, } /// Response to get an individual project domain attribute override. /// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` @@ -2478,6 +2598,9 @@ pub struct ProjectDomainAttributesDeleteRequest { /// +required #[prost(enumeration="MatchableResource", tag="3")] pub resource_type: i32, + /// Optional, org key applied to the attributes. + #[prost(string, tag="4")] + pub org: ::prost::alloc::string::String, } /// Purposefully empty, may be populated in the future. #[allow(clippy::derive_partial_eq_without_eq)] @@ -2977,6 +3100,9 @@ pub struct WorkflowAttributes { pub workflow: ::prost::alloc::string::String, #[prost(message, optional, tag="4")] pub matching_attributes: ::core::option::Option, + /// Optional, org key applied to the attributes. + #[prost(string, tag="5")] + pub org: ::prost::alloc::string::String, } /// Sets custom attributes for a project, domain and workflow combination. /// For more info on matchable attributes, see :ref:`ref_flyteidl.admin.MatchableAttributesConfiguration` @@ -3012,6 +3138,9 @@ pub struct WorkflowAttributesGetRequest { /// +required #[prost(enumeration="MatchableResource", tag="4")] pub resource_type: i32, + /// Optional, org key applied to the attributes. + #[prost(string, tag="5")] + pub org: ::prost::alloc::string::String, } /// Response to get an individual workflow attribute override. #[allow(clippy::derive_partial_eq_without_eq)] @@ -3041,6 +3170,9 @@ pub struct WorkflowAttributesDeleteRequest { /// +required #[prost(enumeration="MatchableResource", tag="4")] pub resource_type: i32, + /// Optional, org key applied to the attributes. + #[prost(string, tag="5")] + pub org: ::prost::alloc::string::String, } /// Purposefully empty, may be populated in the future. #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/flyteidl/gen/pb_rust/flyteidl.artifact.rs b/flyteidl/gen/pb_rust/flyteidl.artifact.rs deleted file mode 100644 index e3bc2c72fa..0000000000 --- a/flyteidl/gen/pb_rust/flyteidl.artifact.rs +++ /dev/null @@ -1,248 +0,0 @@ -// @generated -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct Artifact { - #[prost(message, optional, tag="1")] - pub artifact_id: ::core::option::Option, - #[prost(message, optional, tag="2")] - pub spec: ::core::option::Option, - /// references the tag field in ArtifactTag - #[prost(string, repeated, tag="3")] - pub tags: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, - #[prost(message, optional, tag="4")] - pub source: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct CreateArtifactRequest { - /// Specify just project/domain on creation - #[prost(message, optional, tag="1")] - pub artifact_key: ::core::option::Option, - #[prost(string, tag="3")] - pub version: ::prost::alloc::string::String, - #[prost(message, optional, tag="2")] - pub spec: ::core::option::Option, - #[prost(map="string, string", tag="4")] - pub partitions: ::std::collections::HashMap<::prost::alloc::string::String, ::prost::alloc::string::String>, - #[prost(string, tag="5")] - pub tag: ::prost::alloc::string::String, - #[prost(message, optional, tag="6")] - pub source: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct ArtifactSource { - #[prost(message, optional, tag="1")] - pub workflow_execution: ::core::option::Option, - #[prost(string, tag="2")] - pub node_id: ::prost::alloc::string::String, - #[prost(message, optional, tag="3")] - pub task_id: ::core::option::Option, - #[prost(uint32, tag="4")] - pub retry_attempt: u32, - /// Uploads, either from the UI or from the CLI, or FlyteRemote, will have this. - #[prost(string, tag="5")] - pub principal: ::prost::alloc::string::String, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct ArtifactSpec { - #[prost(message, optional, tag="1")] - pub value: ::core::option::Option, - /// This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but - /// forgets to change the name, that is okay. And the reason why this is a separate field is because adding the - /// type to all Literals is a lot of work. - #[prost(message, optional, tag="2")] - pub r#type: ::core::option::Option, - #[prost(string, tag="3")] - pub short_description: ::prost::alloc::string::String, - /// Additional user metadata - #[prost(message, optional, tag="4")] - pub user_metadata: ::core::option::Option<::prost_types::Any>, - #[prost(string, tag="5")] - pub metadata_type: ::prost::alloc::string::String, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct CreateArtifactResponse { - #[prost(message, optional, tag="1")] - pub artifact: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct GetArtifactRequest { - #[prost(message, optional, tag="1")] - pub query: ::core::option::Option, - /// If false, then long_description is not returned. - #[prost(bool, tag="2")] - pub details: bool, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct GetArtifactResponse { - #[prost(message, optional, tag="1")] - pub artifact: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct SearchOptions { - /// If true, this means a strict partition search. meaning if you don't specify the partition - /// field, that will mean, non-partitioned, rather than any partition. - #[prost(bool, tag="1")] - pub strict_partitions: bool, - /// If true, only one artifact per key will be returned. It will be the latest one by creation time. - #[prost(bool, tag="2")] - pub latest_by_key: bool, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct SearchArtifactsRequest { - #[prost(message, optional, tag="1")] - pub artifact_key: ::core::option::Option, - #[prost(message, optional, tag="2")] - pub partitions: ::core::option::Option, - #[prost(string, tag="3")] - pub principal: ::prost::alloc::string::String, - #[prost(string, tag="4")] - pub version: ::prost::alloc::string::String, - #[prost(message, optional, tag="5")] - pub options: ::core::option::Option, - #[prost(string, tag="6")] - pub token: ::prost::alloc::string::String, - #[prost(int32, tag="7")] - pub limit: i32, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct SearchArtifactsResponse { - /// If artifact specs are not requested, the resultant artifacts may be empty. - #[prost(message, repeated, tag="1")] - pub artifacts: ::prost::alloc::vec::Vec, - /// continuation token if relevant. - #[prost(string, tag="2")] - pub token: ::prost::alloc::string::String, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct FindByWorkflowExecRequest { - #[prost(message, optional, tag="1")] - pub exec_id: ::core::option::Option, - #[prost(enumeration="find_by_workflow_exec_request::Direction", tag="2")] - pub direction: i32, -} -/// Nested message and enum types in `FindByWorkflowExecRequest`. -pub mod find_by_workflow_exec_request { - #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] - #[repr(i32)] - pub enum Direction { - Inputs = 0, - Outputs = 1, - } - impl Direction { - /// String value of the enum field names used in the ProtoBuf definition. - /// - /// The values are not transformed in any way and thus are considered stable - /// (if the ProtoBuf definition does not change) and safe for programmatic use. - pub fn as_str_name(&self) -> &'static str { - match self { - Direction::Inputs => "INPUTS", - Direction::Outputs => "OUTPUTS", - } - } - /// Creates an enum from field names used in the ProtoBuf definition. - pub fn from_str_name(value: &str) -> ::core::option::Option { - match value { - "INPUTS" => Some(Self::Inputs), - "OUTPUTS" => Some(Self::Outputs), - _ => None, - } - } - } -} -/// Aliases identify a particular version of an artifact. They are different than tags in that they -/// have to be unique for a given artifact project/domain/name. That is, for a given project/domain/name/kind, -/// at most one version can have any given value at any point. -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct AddTagRequest { - #[prost(message, optional, tag="1")] - pub artifact_id: ::core::option::Option, - #[prost(string, tag="2")] - pub value: ::prost::alloc::string::String, - /// If true, and another version already has the specified kind/value, set this version instead - #[prost(bool, tag="3")] - pub overwrite: bool, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct AddTagResponse { -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct CreateTriggerRequest { - #[prost(message, optional, tag="1")] - pub trigger_launch_plan: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct CreateTriggerResponse { -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct DeleteTriggerRequest { - #[prost(message, optional, tag="1")] - pub trigger_id: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct DeleteTriggerResponse { -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct ArtifactProducer { - /// These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in - /// Admin's domain. - #[prost(message, optional, tag="1")] - pub entity_id: ::core::option::Option, - #[prost(message, optional, tag="2")] - pub outputs: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct RegisterProducerRequest { - #[prost(message, repeated, tag="1")] - pub producers: ::prost::alloc::vec::Vec, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct ArtifactConsumer { - /// These should all be launch plan IDs - #[prost(message, optional, tag="1")] - pub entity_id: ::core::option::Option, - #[prost(message, optional, tag="2")] - pub inputs: ::core::option::Option, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct RegisterConsumerRequest { - #[prost(message, repeated, tag="1")] - pub consumers: ::prost::alloc::vec::Vec, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct RegisterResponse { -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct ExecutionInputsRequest { - #[prost(message, optional, tag="1")] - pub execution_id: ::core::option::Option, - /// can make this a map in the future, currently no need. - #[prost(message, repeated, tag="2")] - pub inputs: ::prost::alloc::vec::Vec, -} -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct ExecutionInputsResponse { -} -// @@protoc_insertion_point(module) diff --git a/flyteidl/gen/pb_rust/flyteidl.core.rs b/flyteidl/gen/pb_rust/flyteidl.core.rs index 75bb7f2f64..64fb26d7d7 100644 --- a/flyteidl/gen/pb_rust/flyteidl.core.rs +++ b/flyteidl/gen/pb_rust/flyteidl.core.rs @@ -621,6 +621,9 @@ pub struct Identifier { /// Specific version of the resource. #[prost(string, tag="5")] pub version: ::prost::alloc::string::String, + /// Optional, org key applied to the resource. + #[prost(string, tag="6")] + pub org: ::prost::alloc::string::String, } /// Encapsulation of fields that uniquely identifies a Flyte workflow execution #[allow(clippy::derive_partial_eq_without_eq)] @@ -636,6 +639,9 @@ pub struct WorkflowExecutionIdentifier { /// User or system provided value for the resource. #[prost(string, tag="4")] pub name: ::prost::alloc::string::String, + /// Optional, org key applied to the resource. + #[prost(string, tag="5")] + pub org: ::prost::alloc::string::String, } /// Encapsulation of fields that identify a Flyte node execution entity. #[allow(clippy::derive_partial_eq_without_eq)] @@ -724,11 +730,24 @@ pub struct ArtifactKey { pub struct ArtifactBindingData { #[prost(uint32, tag="1")] pub index: u32, - /// These two fields are only relevant in the partition value case - #[prost(string, tag="2")] - pub partition_key: ::prost::alloc::string::String, - #[prost(string, tag="3")] + /// This is only relevant in the time partition case + #[prost(string, tag="4")] pub transform: ::prost::alloc::string::String, + /// These two fields are only relevant in the partition value case + #[prost(oneof="artifact_binding_data::PartitionData", tags="2, 3")] + pub partition_data: ::core::option::Option, +} +/// Nested message and enum types in `ArtifactBindingData`. +pub mod artifact_binding_data { + /// These two fields are only relevant in the partition value case + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum PartitionData { + #[prost(string, tag="2")] + PartitionKey(::prost::alloc::string::String), + #[prost(bool, tag="3")] + BindToTimePartition(bool), + } } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -739,7 +758,7 @@ pub struct InputBindingData { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LabelValue { - #[prost(oneof="label_value::Value", tags="1, 2, 3")] + #[prost(oneof="label_value::Value", tags="1, 2, 3, 4")] pub value: ::core::option::Option, } /// Nested message and enum types in `LabelValue`. @@ -747,11 +766,15 @@ pub mod label_value { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Value { + /// The string static value is for use in the Partitions object #[prost(string, tag="1")] StaticValue(::prost::alloc::string::String), + /// The time value is for use in the TimePartition case #[prost(message, tag="2")] - TriggeredBinding(super::ArtifactBindingData), + TimeValue(::prost_types::Timestamp), #[prost(message, tag="3")] + TriggeredBinding(super::ArtifactBindingData), + #[prost(message, tag="4")] InputBinding(super::InputBindingData), } } @@ -763,6 +786,12 @@ pub struct Partitions { } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] +pub struct TimePartition { + #[prost(message, optional, tag="1")] + pub value: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ArtifactId { #[prost(message, optional, tag="1")] pub artifact_key: ::core::option::Option, @@ -770,27 +799,11 @@ pub struct ArtifactId { pub version: ::prost::alloc::string::String, /// Think of a partition as a tag on an Artifact, except it's a key-value pair. /// Different partitions naturally have different versions (execution ids). - /// This is a oneof because of partition querying... we need a way to distinguish between - /// a user not-specifying partitions when searching, and specifically searching for an Artifact - /// that is not partitioned (this can happen if an Artifact goes from partitioned to non- - /// partitioned across executions). - #[prost(oneof="artifact_id::Dimensions", tags="3")] - pub dimensions: ::core::option::Option, -} -/// Nested message and enum types in `ArtifactID`. -pub mod artifact_id { - /// Think of a partition as a tag on an Artifact, except it's a key-value pair. - /// Different partitions naturally have different versions (execution ids). - /// This is a oneof because of partition querying... we need a way to distinguish between - /// a user not-specifying partitions when searching, and specifically searching for an Artifact - /// that is not partitioned (this can happen if an Artifact goes from partitioned to non- - /// partitioned across executions). - #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] - pub enum Dimensions { - #[prost(message, tag="3")] - Partitions(super::Partitions), - } + #[prost(message, optional, tag="3")] + pub partitions: ::core::option::Option, + /// There is no such thing as an empty time partition - if it's not set, then there is no time partition. + #[prost(message, optional, tag="4")] + pub time_partition: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -828,17 +841,6 @@ pub mod artifact_query { Binding(super::ArtifactBindingData), } } -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct Trigger { - /// This will be set to a launch plan type, but note that this is different than the actual launch plan type. - #[prost(message, optional, tag="1")] - pub trigger_id: ::core::option::Option, - /// These are partial artifact IDs that will be triggered on - /// Consider making these ArtifactQuery instead. - #[prost(message, repeated, tag="2")] - pub triggers: ::prost::alloc::vec::Vec, -} /// Defines a strongly typed variable. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] @@ -2028,6 +2030,58 @@ pub mod quality_of_service { Spec(super::QualityOfServiceSpec), } } +/// Span represents a duration trace of Flyte execution. The id field denotes a Flyte execution entity or an operation +/// which uniquely identifies the Span. The spans attribute allows this Span to be further broken down into more +/// precise definitions. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Span { + /// start_time defines the instance this span began. + #[prost(message, optional, tag="1")] + pub start_time: ::core::option::Option<::prost_types::Timestamp>, + /// end_time defines the instance this span completed. + #[prost(message, optional, tag="2")] + pub end_time: ::core::option::Option<::prost_types::Timestamp>, + /// spans defines a collection of Spans that breakdown this execution. + #[prost(message, repeated, tag="7")] + pub spans: ::prost::alloc::vec::Vec, + #[prost(oneof="span::Id", tags="3, 4, 5, 6")] + pub id: ::core::option::Option, +} +/// Nested message and enum types in `Span`. +pub mod span { + #[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Id { + /// workflow_id is the id of the workflow execution this Span represents. + #[prost(message, tag="3")] + WorkflowId(super::WorkflowExecutionIdentifier), + /// node_id is the id of the node execution this Span represents. + #[prost(message, tag="4")] + NodeId(super::NodeExecutionIdentifier), + /// task_id is the id of the task execution this Span represents. + #[prost(message, tag="5")] + TaskId(super::TaskExecutionIdentifier), + /// operation_id is the id of a unique operation that this Span represents. + #[prost(string, tag="6")] + OperationId(::prost::alloc::string::String), + } +} +/// ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ExecutionMetricResult { + /// The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG. + #[prost(string, tag="1")] + pub metric: ::prost::alloc::string::String, + /// The result data in prometheus range query result format + /// + /// This may include multiple time series, differentiated by their metric labels. + /// Start time is greater of (execution attempt start, 48h ago) + /// End time is lesser of (execution attempt end, now) + #[prost(message, optional, tag="2")] + pub data: ::core::option::Option<::prost_types::Struct>, +} /// Defines a 2-level tree where the root is a comparison operator and Operands are primitives or known variables. /// Each expression results in a boolean result. #[allow(clippy::derive_partial_eq_without_eq)] @@ -2711,6 +2765,8 @@ pub enum CatalogCacheStatus { CachePutFailure = 5, /// Used to indicate the cache lookup was skipped CacheSkipped = 6, + /// Used to indicate that the cache was evicted + CacheEvicted = 7, } impl CatalogCacheStatus { /// String value of the enum field names used in the ProtoBuf definition. @@ -2726,6 +2782,7 @@ impl CatalogCacheStatus { CatalogCacheStatus::CacheLookupFailure => "CACHE_LOOKUP_FAILURE", CatalogCacheStatus::CachePutFailure => "CACHE_PUT_FAILURE", CatalogCacheStatus::CacheSkipped => "CACHE_SKIPPED", + CatalogCacheStatus::CacheEvicted => "CACHE_EVICTED", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -2738,47 +2795,11 @@ impl CatalogCacheStatus { "CACHE_LOOKUP_FAILURE" => Some(Self::CacheLookupFailure), "CACHE_PUT_FAILURE" => Some(Self::CachePutFailure), "CACHE_SKIPPED" => Some(Self::CacheSkipped), + "CACHE_EVICTED" => Some(Self::CacheEvicted), _ => None, } } } -/// Span represents a duration trace of Flyte execution. The id field denotes a Flyte execution entity or an operation -/// which uniquely identifies the Span. The spans attribute allows this Span to be further broken down into more -/// precise definitions. -#[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Message)] -pub struct Span { - /// start_time defines the instance this span began. - #[prost(message, optional, tag="1")] - pub start_time: ::core::option::Option<::prost_types::Timestamp>, - /// end_time defines the instance this span completed. - #[prost(message, optional, tag="2")] - pub end_time: ::core::option::Option<::prost_types::Timestamp>, - /// spans defines a collection of Spans that breakdown this execution. - #[prost(message, repeated, tag="7")] - pub spans: ::prost::alloc::vec::Vec, - #[prost(oneof="span::Id", tags="3, 4, 5, 6")] - pub id: ::core::option::Option, -} -/// Nested message and enum types in `Span`. -pub mod span { - #[allow(clippy::derive_partial_eq_without_eq)] -#[derive(Clone, PartialEq, ::prost::Oneof)] - pub enum Id { - /// workflow_id is the id of the workflow execution this Span represents. - #[prost(message, tag="3")] - WorkflowId(super::WorkflowExecutionIdentifier), - /// node_id is the id of the node execution this Span represents. - #[prost(message, tag="4")] - NodeId(super::NodeExecutionIdentifier), - /// task_id is the id of the task execution this Span represents. - #[prost(message, tag="5")] - TaskId(super::TaskExecutionIdentifier), - /// operation_id is the id of a unique operation that this Span represents. - #[prost(string, tag="6")] - OperationId(::prost::alloc::string::String), - } -} /// Describes a set of tasks to execute and how the final outputs are produced. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] diff --git a/flyteidl/gen/pb_rust/flyteidl.event.rs b/flyteidl/gen/pb_rust/flyteidl.event.rs index b7dd7ac9f3..703e5c9e9c 100644 --- a/flyteidl/gen/pb_rust/flyteidl.event.rs +++ b/flyteidl/gen/pb_rust/flyteidl.event.rs @@ -393,23 +393,19 @@ pub struct CloudEventWorkflowExecution { #[prost(message, optional, tag="1")] pub raw_event: ::core::option::Option, #[prost(message, optional, tag="2")] - pub output_data: ::core::option::Option, - #[prost(message, optional, tag="3")] pub output_interface: ::core::option::Option, - #[prost(message, optional, tag="4")] - pub input_data: ::core::option::Option, /// The following are ExecutionMetadata fields /// We can't have the ExecutionMetadata object directly because of import cycle - #[prost(message, repeated, tag="5")] + #[prost(message, repeated, tag="3")] pub artifact_ids: ::prost::alloc::vec::Vec, - #[prost(message, optional, tag="6")] + #[prost(message, optional, tag="4")] pub reference_execution: ::core::option::Option, - #[prost(string, tag="7")] + #[prost(string, tag="5")] pub principal: ::prost::alloc::string::String, /// The ID of the LP that generated the execution that generated the Artifact. /// Here for provenance information. /// Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - #[prost(message, optional, tag="8")] + #[prost(message, optional, tag="6")] pub launch_plan_id: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] @@ -420,24 +416,19 @@ pub struct CloudEventNodeExecution { /// The relevant task execution if applicable #[prost(message, optional, tag="2")] pub task_exec_id: ::core::option::Option, - /// Hydrated output - #[prost(message, optional, tag="3")] - pub output_data: ::core::option::Option, /// The typed interface for the task that produced the event. - #[prost(message, optional, tag="4")] + #[prost(message, optional, tag="3")] pub output_interface: ::core::option::Option, - #[prost(message, optional, tag="5")] - pub input_data: ::core::option::Option, /// The following are ExecutionMetadata fields /// We can't have the ExecutionMetadata object directly because of import cycle - #[prost(message, repeated, tag="6")] + #[prost(message, repeated, tag="4")] pub artifact_ids: ::prost::alloc::vec::Vec, - #[prost(string, tag="7")] + #[prost(string, tag="5")] pub principal: ::prost::alloc::string::String, /// The ID of the LP that generated the execution that generated the Artifact. /// Here for provenance information. /// Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - #[prost(message, optional, tag="8")] + #[prost(message, optional, tag="6")] pub launch_plan_id: ::core::option::Option, } #[allow(clippy::derive_partial_eq_without_eq)] @@ -458,12 +449,12 @@ pub struct CloudEventExecutionStart { pub launch_plan_id: ::core::option::Option, #[prost(message, optional, tag="3")] pub workflow_id: ::core::option::Option, - /// Artifact IDs found + /// Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run. #[prost(message, repeated, tag="4")] pub artifact_ids: ::prost::alloc::vec::Vec, - /// Artifact keys found. + /// Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service. #[prost(string, repeated, tag="5")] - pub artifact_keys: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + pub artifact_trackers: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, #[prost(string, tag="6")] pub principal: ::prost::alloc::string::String, } diff --git a/flyteidl/generate_protos.sh b/flyteidl/generate_protos.sh index 7955536737..38d663e276 100755 --- a/flyteidl/generate_protos.sh +++ b/flyteidl/generate_protos.sh @@ -12,14 +12,13 @@ export LC_ALL=C.UTF-8 docker run --rm -u $(id -u):$(id -g) -v $DIR:/defs $LYFT_IMAGE -i ./protos -d protos/flyteidl/service --with_gateway -l go --go_source_relative docker run --rm -u $(id -u):$(id -g) -v $DIR:/defs $LYFT_IMAGE -i ./protos -d protos/flyteidl/admin --with_gateway -l go --go_source_relative -docker run --rm -u $(id -u):$(id -g) -v $DIR:/defs $LYFT_IMAGE -i ./protos -d protos/flyteidl/artifact --with_gateway -l go --go_source_relative docker run --rm -u $(id -u):$(id -g) -v $DIR:/defs $LYFT_IMAGE -i ./protos -d protos/flyteidl/core --with_gateway -l go --go_source_relative docker run --rm -u $(id -u):$(id -g) -v $DIR:/defs $LYFT_IMAGE -i ./protos -d protos/flyteidl/event --with_gateway -l go --go_source_relative docker run --rm -u $(id -u):$(id -g) -v $DIR:/defs $LYFT_IMAGE -i ./protos -d protos/flyteidl/plugins -l go --go_source_relative docker run --rm -u $(id -u):$(id -g) -v $DIR:/defs $LYFT_IMAGE -i ./protos -d protos/flyteidl/datacatalog -l go --go_source_relative languages=("cpp" "java") -idlfolders=("service" "admin" "core" "event" "plugins" "datacatalog" "artifact") +idlfolders=("service" "admin" "core" "event" "plugins" "datacatalog") for lang in "${languages[@]}" do diff --git a/flyteidl/go.mod b/flyteidl/go.mod index f410e79d6b..34466ad69e 100644 --- a/flyteidl/go.mod +++ b/flyteidl/go.mod @@ -2,6 +2,8 @@ module github.com/flyteorg/flyte/flyteidl go 1.21 +toolchain go1.21.3 + require ( github.com/antihax/optional v1.0.0 github.com/flyteorg/flyte/flytestdlib v0.0.0-00010101000000-000000000000 diff --git a/flyteidl/protos/flyteidl/admin/agent.proto b/flyteidl/protos/flyteidl/admin/agent.proto index 778fcc99d0..5b7043e86f 100644 --- a/flyteidl/protos/flyteidl/admin/agent.proto +++ b/flyteidl/protos/flyteidl/admin/agent.proto @@ -5,9 +5,12 @@ option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin import "flyteidl/core/literals.proto"; import "flyteidl/core/tasks.proto"; -import "flyteidl/core/interface.proto"; import "flyteidl/core/identifier.proto"; import "flyteidl/core/execution.proto"; +import "flyteidl/core/metrics.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/timestamp.proto"; + // The state of the execution is used to control its visibility in the UI/CLI. enum State { @@ -75,8 +78,8 @@ message GetTaskResponse { } message Resource { - // The state of the execution is used to control its visibility in the UI/CLI. - State state = 1; + // DEPRECATED. The state of the execution is used to control its visibility in the UI/CLI. + State state = 1 [deprecated = true]; // The outputs of the execution. It's typically used by sql task. Agent service will create a // Structured dataset pointing to the query result table. // +optional @@ -85,6 +88,8 @@ message Resource { string message = 3; // log information for the task execution. repeated core.TaskLog log_links = 4; + // The phase of the execution is used to determine the phase of the plugin's execution. + core.TaskExecution.Phase phase = 5; } // A message used to delete a task. @@ -125,3 +130,48 @@ message ListAgentsRequest {} message ListAgentsResponse { repeated Agent agents = 1; } + +// A request to get the metrics from a task execution. +message GetTaskMetricsRequest { + // A predefined yet extensible Task type identifier. + string task_type = 1; + // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + bytes resource_meta = 2; + // The metrics to query. If empty, will return a default set of metrics. + // e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG + repeated string queries = 3; + // Start timestamp, inclusive. + google.protobuf.Timestamp start_time = 4; + // End timestamp, inclusive.. + google.protobuf.Timestamp end_time = 5; + // Query resolution step width in duration format or float number of seconds. + google.protobuf.Duration step = 6; +} + +// A response containing a list of metrics for a task execution. +message GetTaskMetricsResponse { + // The execution metric results. + repeated core.ExecutionMetricResult results = 1; +} + +// A request to get the log from a task execution. +message GetTaskLogsRequest { + // A predefined yet extensible Task type identifier. + string task_type = 1; + // Metadata is created by the agent. It could be a string (jobId) or a dict (more complex metadata). + bytes resource_meta = 2; + // Number of lines to return. + uint64 lines = 3; + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 4; +} + +// A response containing the logs for a task execution. +message GetTaskLogsResponse { + // The execution log results. + repeated string results = 1; + // In the case of multiple pages of results, the server-provided token can be used to fetch the next page + // in a query. If there are no more results, this value will be empty. + string token = 2; +} \ No newline at end of file diff --git a/flyteidl/protos/flyteidl/admin/common.proto b/flyteidl/protos/flyteidl/admin/common.proto index 93bf9b0e03..6c04b0531a 100644 --- a/flyteidl/protos/flyteidl/admin/common.proto +++ b/flyteidl/protos/flyteidl/admin/common.proto @@ -22,6 +22,9 @@ message NamedEntityIdentifier { // The combination of project + domain + name uniquely identifies the resource. // +optional - in certain contexts - like 'List API', 'Launch plans' string name = 3; + + // Optional, org key applied to the resource. + string org = 4; } // The status of the named entity is used to control its visibility in the UI. @@ -102,6 +105,9 @@ message NamedEntityIdentifierListRequest { // Indicates a list of filters passed as string. // +optional string filters = 6; + + // Optional, org key applied to the resource. + string org = 7; } // Represents a request structure to list NamedEntity objects @@ -129,6 +135,8 @@ message NamedEntityListRequest { // +optional string filters = 7; + // Optional, org key applied to the resource. + string org = 8; } // Represents a list of NamedEntityIdentifiers. diff --git a/flyteidl/protos/flyteidl/admin/execution.proto b/flyteidl/protos/flyteidl/admin/execution.proto index 7ddda0f233..f3dbad3fb3 100644 --- a/flyteidl/protos/flyteidl/admin/execution.proto +++ b/flyteidl/protos/flyteidl/admin/execution.proto @@ -39,6 +39,9 @@ message ExecutionCreateRequest { // included in this map. If not required and not provided, defaults apply. // +optional core.LiteralMap inputs = 5; + + // Optional, org key applied to the resource. + string org = 6; } // Request to relaunch the referenced execution. diff --git a/flyteidl/protos/flyteidl/admin/launch_plan.proto b/flyteidl/protos/flyteidl/admin/launch_plan.proto index 252d926c5d..a13429f751 100644 --- a/flyteidl/protos/flyteidl/admin/launch_plan.proto +++ b/flyteidl/protos/flyteidl/admin/launch_plan.proto @@ -193,7 +193,7 @@ message ActiveLaunchPlanRequest { NamedEntityIdentifier id = 1; } -// Represents a request structure to list active launch plans within a project/domain. +// Represents a request structure to list active launch plans within a project/domain and optional org. // See :ref:`ref_flyteidl.admin.LaunchPlan` for more details message ActiveLaunchPlanListRequest { // Name of the project that contains the identifiers. @@ -216,4 +216,7 @@ message ActiveLaunchPlanListRequest { // Sort ordering. // +optional Sort sort_by = 5; + + // Optional, org key applied to the resource. + string org = 6; } diff --git a/flyteidl/protos/flyteidl/admin/matchable_resource.proto b/flyteidl/protos/flyteidl/admin/matchable_resource.proto index bf93d0bd7e..cca6dbcc41 100644 --- a/flyteidl/protos/flyteidl/admin/matchable_resource.proto +++ b/flyteidl/protos/flyteidl/admin/matchable_resource.proto @@ -154,8 +154,8 @@ message MatchingAttributes { } } -// Represents a custom set of attributes applied for either a domain; a domain and project; or -// domain, project and workflow name. +// Represents a custom set of attributes applied for either a domain (and optional org); a domain and project (and optional org); +// or domain, project and workflow name (and optional org). // These are used to override system level defaults for kubernetes cluster resource management, // default execution values, and more all across different levels of specificity. message MatchableAttributesConfiguration { @@ -168,6 +168,9 @@ message MatchableAttributesConfiguration { string workflow = 4; string launch_plan = 5; + + // Optional, org key applied to the resource. + string org = 6; } // Request all matching resource attributes for a resource type. @@ -175,6 +178,9 @@ message MatchableAttributesConfiguration { message ListMatchableAttributesRequest { // +required MatchableResource resource_type = 1; + + // Optional, org filter applied to list project requests. + string org = 2; } // Response for a request for all matching resource attributes for a resource type. diff --git a/flyteidl/protos/flyteidl/admin/node_execution.proto b/flyteidl/protos/flyteidl/admin/node_execution.proto index 9c80e22efe..411201ea45 100644 --- a/flyteidl/protos/flyteidl/admin/node_execution.proto +++ b/flyteidl/protos/flyteidl/admin/node_execution.proto @@ -235,3 +235,11 @@ message NodeExecutionGetDataResponse { FlyteURLs flyte_urls = 17; } + +message GetDynamicNodeWorkflowRequest { + core.NodeExecutionIdentifier id = 1; +} + +message DynamicNodeWorkflowResponse { + core.CompiledWorkflowClosure compiled_workflow = 1; +} diff --git a/flyteidl/protos/flyteidl/admin/project.proto b/flyteidl/protos/flyteidl/admin/project.proto index 761ae8da05..7a1e57b764 100644 --- a/flyteidl/protos/flyteidl/admin/project.proto +++ b/flyteidl/protos/flyteidl/admin/project.proto @@ -45,6 +45,9 @@ message Project { SYSTEM_GENERATED = 2; } ProjectState state = 6; + + // Optional, org key applied to the resource. + string org = 7; } // Represents a list of projects. @@ -77,6 +80,9 @@ message ProjectListRequest { // Sort ordering. // +optional Sort sort_by = 4; + + // Optional, org filter applied to list project requests. + string org = 5; } // Adds a new user-project within the Flyte deployment. diff --git a/flyteidl/protos/flyteidl/admin/project_attributes.proto b/flyteidl/protos/flyteidl/admin/project_attributes.proto index e61515b0c7..2656ab25f5 100644 --- a/flyteidl/protos/flyteidl/admin/project_attributes.proto +++ b/flyteidl/protos/flyteidl/admin/project_attributes.proto @@ -12,6 +12,9 @@ message ProjectAttributes { string project = 1; MatchingAttributes matching_attributes = 2; + + // Optional, org key applied to the project. + string org = 3; } // Sets custom attributes for a project @@ -35,6 +38,9 @@ message ProjectAttributesGetRequest { // Which type of matchable attributes to return. // +required MatchableResource resource_type = 2; + + // Optional, org key applied to the project. + string org = 3; } // Response to get an individual project level attribute override. @@ -53,6 +59,9 @@ message ProjectAttributesDeleteRequest { // Which type of matchable attributes to delete. // +required MatchableResource resource_type = 2; + + // Optional, org key applied to the project. + string org = 3; } // Purposefully empty, may be populated in the future. diff --git a/flyteidl/protos/flyteidl/admin/project_domain_attributes.proto b/flyteidl/protos/flyteidl/admin/project_domain_attributes.proto index d25ea92324..b493ae1178 100644 --- a/flyteidl/protos/flyteidl/admin/project_domain_attributes.proto +++ b/flyteidl/protos/flyteidl/admin/project_domain_attributes.proto @@ -14,7 +14,10 @@ message ProjectDomainAttributes { // Unique domain id for which this set of attributes will be applied. string domain = 2; - MatchingAttributes matching_attributes = 3; + MatchingAttributes matching_attributes = 3; + + // Optional, org key applied to the attributes. + string org = 4; } // Sets custom attributes for a project-domain combination. @@ -42,6 +45,9 @@ message ProjectDomainAttributesGetRequest { // Which type of matchable attributes to return. // +required MatchableResource resource_type = 3; + + // Optional, org key applied to the attributes. + string org = 4; } // Response to get an individual project domain attribute override. @@ -64,6 +70,9 @@ message ProjectDomainAttributesDeleteRequest { // Which type of matchable attributes to delete. // +required MatchableResource resource_type = 3; + + // Optional, org key applied to the attributes. + string org = 4; } // Purposefully empty, may be populated in the future. diff --git a/flyteidl/protos/flyteidl/admin/workflow_attributes.proto b/flyteidl/protos/flyteidl/admin/workflow_attributes.proto index fed42205a7..9767f00df7 100644 --- a/flyteidl/protos/flyteidl/admin/workflow_attributes.proto +++ b/flyteidl/protos/flyteidl/admin/workflow_attributes.proto @@ -18,6 +18,9 @@ message WorkflowAttributes { string workflow = 3; MatchingAttributes matching_attributes = 4; + + // Optional, org key applied to the attributes. + string org = 5; } // Sets custom attributes for a project, domain and workflow combination. @@ -48,6 +51,9 @@ message WorkflowAttributesGetRequest { // Which type of matchable attributes to return. // +required MatchableResource resource_type = 4; + + // Optional, org key applied to the attributes. + string org = 5; } // Response to get an individual workflow attribute override. @@ -73,6 +79,9 @@ message WorkflowAttributesDeleteRequest { // Which type of matchable attributes to delete. // +required MatchableResource resource_type = 4; + + // Optional, org key applied to the attributes. + string org = 5; } // Purposefully empty, may be populated in the future. diff --git a/flyteidl/protos/flyteidl/artifact/artifacts.proto b/flyteidl/protos/flyteidl/artifact/artifacts.proto deleted file mode 100644 index 43cbddf9c5..0000000000 --- a/flyteidl/protos/flyteidl/artifact/artifacts.proto +++ /dev/null @@ -1,224 +0,0 @@ -syntax = "proto3"; -package flyteidl.artifact; - -option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/artifact"; - -import "google/protobuf/any.proto"; -import "google/api/annotations.proto"; - -import "flyteidl/admin/launch_plan.proto"; -import "flyteidl/core/literals.proto"; -import "flyteidl/core/types.proto"; -import "flyteidl/core/identifier.proto"; -import "flyteidl/core/artifact_id.proto"; -import "flyteidl/core/interface.proto"; -import "flyteidl/event/cloudevents.proto"; - -message Artifact { - core.ArtifactID artifact_id = 1; - - ArtifactSpec spec = 2; - - // references the tag field in ArtifactTag - repeated string tags = 3; - - ArtifactSource source = 4; -} - -message CreateArtifactRequest { - // Specify just project/domain on creation - core.ArtifactKey artifact_key = 1; - - string version = 3; - - ArtifactSpec spec = 2; - - map partitions = 4; - - string tag = 5; - - ArtifactSource source = 6; -} - -message ArtifactSource { - core.WorkflowExecutionIdentifier workflow_execution = 1; - string node_id = 2; - core.Identifier task_id = 3; - uint32 retry_attempt = 4; - - // Uploads, either from the UI or from the CLI, or FlyteRemote, will have this. - string principal = 5; -} - -message ArtifactSpec { - core.Literal value = 1; - - // This type will not form part of the artifact key, so for user-named artifacts, if the user changes the type, but - // forgets to change the name, that is okay. And the reason why this is a separate field is because adding the - // type to all Literals is a lot of work. - core.LiteralType type = 2; - - string short_description = 3; - - // Additional user metadata - google.protobuf.Any user_metadata = 4; - - string metadata_type = 5; -} - - -message CreateArtifactResponse { - Artifact artifact = 1; -} - -message GetArtifactRequest { - core.ArtifactQuery query = 1; - - // If false, then long_description is not returned. - bool details = 2; -} - -message GetArtifactResponse { - Artifact artifact = 1; -} - -message SearchOptions { - // If true, this means a strict partition search. meaning if you don't specify the partition - // field, that will mean, non-partitioned, rather than any partition. - bool strict_partitions = 1; - - // If true, only one artifact per key will be returned. It will be the latest one by creation time. - bool latest_by_key = 2; -} - -message SearchArtifactsRequest { - core.ArtifactKey artifact_key = 1; - - core.Partitions partitions = 2; - - string principal = 3; - string version = 4; - - SearchOptions options = 5; - - string token = 6; - int32 limit = 7; -} - -message SearchArtifactsResponse { - // If artifact specs are not requested, the resultant artifacts may be empty. - repeated Artifact artifacts = 1; - - // continuation token if relevant. - string token = 2; -} - -message FindByWorkflowExecRequest { - core.WorkflowExecutionIdentifier exec_id = 1; - - enum Direction { - INPUTS = 0; - OUTPUTS = 1; - } - - Direction direction = 2; -} - -// Aliases identify a particular version of an artifact. They are different than tags in that they -// have to be unique for a given artifact project/domain/name. That is, for a given project/domain/name/kind, -// at most one version can have any given value at any point. -message AddTagRequest { - core.ArtifactID artifact_id = 1; - - string value = 2; - - // If true, and another version already has the specified kind/value, set this version instead - bool overwrite = 3; -} - -message AddTagResponse {} - -message CreateTriggerRequest { - admin.LaunchPlan trigger_launch_plan = 1; -} - -message CreateTriggerResponse {} - -message DeleteTriggerRequest { - core.Identifier trigger_id = 1; -} - -message DeleteTriggerResponse {} - -message ArtifactProducer { - // These can be tasks, and workflows. Keeping track of the launch plans that a given workflow has is purely in - // Admin's domain. - core.Identifier entity_id = 1; - - core.VariableMap outputs = 2; -} - -message RegisterProducerRequest { - repeated ArtifactProducer producers = 1; -} - -message ArtifactConsumer { - // These should all be launch plan IDs - core.Identifier entity_id = 1; - - core.ParameterMap inputs = 2; -} - -message RegisterConsumerRequest { - repeated ArtifactConsumer consumers = 1; -} - -message RegisterResponse {} - -message ExecutionInputsRequest { - core.WorkflowExecutionIdentifier execution_id = 1; - - // can make this a map in the future, currently no need. - repeated core.ArtifactID inputs = 2; -} - -message ExecutionInputsResponse {} - -service ArtifactRegistry { - rpc CreateArtifact (CreateArtifactRequest) returns (CreateArtifactResponse) {} - - rpc GetArtifact (GetArtifactRequest) returns (GetArtifactResponse) { - option (google.api.http) = { - get: "/artifacts/api/v1/data/artifacts" - additional_bindings {get: "/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}/{query.artifact_id.version}"} - additional_bindings {get: "/artifacts/api/v1/data/artifact/id/{query.artifact_id.artifact_key.project}/{query.artifact_id.artifact_key.domain}/{query.artifact_id.artifact_key.name}"} - additional_bindings {get: "/artifacts/api/v1/data/artifact/tag/{query.artifact_tag.artifact_key.project}/{query.artifact_tag.artifact_key.domain}/{query.artifact_tag.artifact_key.name}"} - }; - } - - rpc SearchArtifacts (SearchArtifactsRequest) returns (SearchArtifactsResponse) { - option (google.api.http) = { - get: "/artifacts/api/v1/data/query/s/{artifact_key.project}/{artifact_key.domain}/{artifact_key.name}" - additional_bindings {get: "/artifacts/api/v1/data/query/{artifact_key.project}/{artifact_key.domain}"} - }; - } - - rpc CreateTrigger (CreateTriggerRequest) returns (CreateTriggerResponse) {} - - rpc DeleteTrigger (DeleteTriggerRequest) returns (DeleteTriggerResponse) {} - - rpc AddTag(AddTagRequest) returns (AddTagResponse) {} - - rpc RegisterProducer(RegisterProducerRequest) returns (RegisterResponse) {} - - rpc RegisterConsumer(RegisterConsumerRequest) returns (RegisterResponse) {} - - rpc SetExecutionInputs(ExecutionInputsRequest) returns (ExecutionInputsResponse) {} - - rpc FindByWorkflowExec (FindByWorkflowExecRequest) returns (SearchArtifactsResponse) { - option (google.api.http) = { - get: "/artifacts/api/v1/data/query/e/{exec_id.project}/{exec_id.domain}/{exec_id.name}/{direction}" - }; - } - -} diff --git a/flyteidl/protos/flyteidl/core/artifact_id.proto b/flyteidl/protos/flyteidl/core/artifact_id.proto index e7719cf010..ab0fa86623 100644 --- a/flyteidl/protos/flyteidl/core/artifact_id.proto +++ b/flyteidl/protos/flyteidl/core/artifact_id.proto @@ -4,6 +4,7 @@ package flyteidl.core; option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core"; +import "google/protobuf/timestamp.proto"; import "flyteidl/core/identifier.proto"; @@ -19,8 +20,13 @@ message ArtifactBindingData { uint32 index = 1; // These two fields are only relevant in the partition value case - string partition_key = 2; - string transform = 3; + oneof partition_data { + string partition_key = 2; + bool bind_to_time_partition = 3; + } + + // This is only relevant in the time partition case + string transform = 4; } message InputBindingData { @@ -29,15 +35,24 @@ message InputBindingData { message LabelValue { oneof value { + // The string static value is for use in the Partitions object string static_value = 1; - ArtifactBindingData triggered_binding = 2; - InputBindingData input_binding = 3; + + // The time value is for use in the TimePartition case + google.protobuf.Timestamp time_value = 2; + ArtifactBindingData triggered_binding = 3; + InputBindingData input_binding = 4; } } + message Partitions { map value = 1; } +message TimePartition { + LabelValue value = 1; +} + message ArtifactID { ArtifactKey artifact_key = 1; @@ -45,13 +60,10 @@ message ArtifactID { // Think of a partition as a tag on an Artifact, except it's a key-value pair. // Different partitions naturally have different versions (execution ids). - // This is a oneof because of partition querying... we need a way to distinguish between - // a user not-specifying partitions when searching, and specifically searching for an Artifact - // that is not partitioned (this can happen if an Artifact goes from partitioned to non- - // partitioned across executions). - oneof dimensions { - Partitions partitions = 3; - } + Partitions partitions = 3; + + // There is no such thing as an empty time partition - if it's not set, then there is no time partition. + TimePartition time_partition = 4; } message ArtifactTag { @@ -76,12 +88,3 @@ message ArtifactQuery { ArtifactBindingData binding = 4; } } - -message Trigger { - // This will be set to a launch plan type, but note that this is different than the actual launch plan type. - Identifier trigger_id = 1; - - // These are partial artifact IDs that will be triggered on - // Consider making these ArtifactQuery instead. - repeated core.ArtifactID triggers = 2; -} diff --git a/flyteidl/protos/flyteidl/core/catalog.proto b/flyteidl/protos/flyteidl/core/catalog.proto index fc86f0b975..4d98c28d7e 100644 --- a/flyteidl/protos/flyteidl/core/catalog.proto +++ b/flyteidl/protos/flyteidl/core/catalog.proto @@ -22,6 +22,8 @@ enum CatalogCacheStatus { CACHE_PUT_FAILURE = 5; // Used to indicate the cache lookup was skipped CACHE_SKIPPED = 6; + // Used to indicate that the cache was evicted + CACHE_EVICTED = 7; }; message CatalogArtifactTag { diff --git a/flyteidl/protos/flyteidl/core/identifier.proto b/flyteidl/protos/flyteidl/core/identifier.proto index d50e836099..50bf22429c 100644 --- a/flyteidl/protos/flyteidl/core/identifier.proto +++ b/flyteidl/protos/flyteidl/core/identifier.proto @@ -33,6 +33,9 @@ message Identifier { // Specific version of the resource. string version = 5; + + // Optional, org key applied to the resource. + string org = 6; } // Encapsulation of fields that uniquely identifies a Flyte workflow execution @@ -46,6 +49,9 @@ message WorkflowExecutionIdentifier { // User or system provided value for the resource. string name = 4; + + // Optional, org key applied to the resource. + string org = 5; } // Encapsulation of fields that identify a Flyte node execution entity. diff --git a/flyteidl/protos/flyteidl/core/metrics.proto b/flyteidl/protos/flyteidl/core/metrics.proto index 13823545e4..5244ff4873 100644 --- a/flyteidl/protos/flyteidl/core/metrics.proto +++ b/flyteidl/protos/flyteidl/core/metrics.proto @@ -6,6 +6,7 @@ option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" import "flyteidl/core/identifier.proto"; import "google/protobuf/timestamp.proto"; +import "google/protobuf/struct.proto"; // Span represents a duration trace of Flyte execution. The id field denotes a Flyte execution entity or an operation // which uniquely identifies the Span. The spans attribute allows this Span to be further broken down into more @@ -34,3 +35,16 @@ message Span { // spans defines a collection of Spans that breakdown this execution. repeated Span spans = 7; } + +// ExecutionMetrics is a collection of metrics that are collected during the execution of a Flyte task. +message ExecutionMetricResult { + // The metric this data represents. e.g. EXECUTION_METRIC_USED_CPU_AVG or EXECUTION_METRIC_USED_MEMORY_BYTES_AVG. + string metric = 1; + + // The result data in prometheus range query result format + // https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats. + // This may include multiple time series, differentiated by their metric labels. + // Start time is greater of (execution attempt start, 48h ago) + // End time is lesser of (execution attempt end, now) + google.protobuf.Struct data = 2; +} \ No newline at end of file diff --git a/flyteidl/protos/flyteidl/datacatalog/datacatalog.proto b/flyteidl/protos/flyteidl/datacatalog/datacatalog.proto index 1ddac9fccf..e296603113 100644 --- a/flyteidl/protos/flyteidl/datacatalog/datacatalog.proto +++ b/flyteidl/protos/flyteidl/datacatalog/datacatalog.proto @@ -289,6 +289,9 @@ message DatasetID { string domain = 3; // The domain (eg. environment) string version = 4; // Version of the data schema string UUID = 5; // UUID for the dataset (if set the above fields are optional) + + // Optional, org key applied to the resource. + string org = 6; } /* @@ -386,6 +389,8 @@ message DatasetPropertyFilter { string name = 2; string domain = 3; string version = 4; + // Optional, org key applied to the dataset. + string org = 5; } } diff --git a/flyteidl/protos/flyteidl/event/cloudevents.proto b/flyteidl/protos/flyteidl/event/cloudevents.proto index 0c628d52d4..d02c5ff516 100644 --- a/flyteidl/protos/flyteidl/event/cloudevents.proto +++ b/flyteidl/protos/flyteidl/event/cloudevents.proto @@ -16,22 +16,18 @@ import "google/protobuf/timestamp.proto"; message CloudEventWorkflowExecution { event.WorkflowExecutionEvent raw_event = 1; - core.LiteralMap output_data = 2; - - core.TypedInterface output_interface = 3; - - core.LiteralMap input_data = 4; + core.TypedInterface output_interface = 2; // The following are ExecutionMetadata fields // We can't have the ExecutionMetadata object directly because of import cycle - repeated core.ArtifactID artifact_ids = 5; - core.WorkflowExecutionIdentifier reference_execution = 6; - string principal = 7; + repeated core.ArtifactID artifact_ids = 3; + core.WorkflowExecutionIdentifier reference_execution = 4; + string principal = 5; // The ID of the LP that generated the execution that generated the Artifact. // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - core.Identifier launch_plan_id = 8; + core.Identifier launch_plan_id = 6; } message CloudEventNodeExecution { @@ -40,23 +36,18 @@ message CloudEventNodeExecution { // The relevant task execution if applicable core.TaskExecutionIdentifier task_exec_id = 2; - // Hydrated output - core.LiteralMap output_data = 3; - // The typed interface for the task that produced the event. - core.TypedInterface output_interface = 4; - - core.LiteralMap input_data = 5; + core.TypedInterface output_interface = 3; // The following are ExecutionMetadata fields // We can't have the ExecutionMetadata object directly because of import cycle - repeated core.ArtifactID artifact_ids = 6; - string principal = 7; + repeated core.ArtifactID artifact_ids = 4; + string principal = 5; // The ID of the LP that generated the execution that generated the Artifact. // Here for provenance information. // Launch plan IDs are easier to get than workflow IDs so we'll use these for now. - core.Identifier launch_plan_id = 8; + core.Identifier launch_plan_id = 6; } message CloudEventTaskExecution { @@ -72,11 +63,11 @@ message CloudEventExecutionStart { core.Identifier workflow_id = 3; - // Artifact IDs found + // Artifact inputs to the workflow execution for which we have the full Artifact ID. These are likely the result of artifact queries that are run. repeated core.ArtifactID artifact_ids = 4; - // Artifact keys found. - repeated string artifact_keys = 5; + // Artifact inputs to the workflow execution for which we only have the tracking bit that's installed into the Literal's metadata by the Artifact service. + repeated string artifact_trackers = 5; string principal = 6; } diff --git a/flyteidl/protos/flyteidl/service/admin.proto b/flyteidl/protos/flyteidl/service/admin.proto index 95ad07686f..37e0b1ecb5 100644 --- a/flyteidl/protos/flyteidl/service/admin.proto +++ b/flyteidl/protos/flyteidl/service/admin.proto @@ -29,6 +29,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/tasks" body: "*" + additional_bindings { + post: "/api/v1/tasks/org/{id.org}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Create and register a task definition." @@ -51,16 +55,22 @@ service AdminService { rpc GetTask (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.Task) { option (google.api.http) = { get: "/api/v1/tasks/{id.project}/{id.domain}/{id.name}/{id.version}" + additional_bindings { + get: "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve an existing task definition." // }; } - // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. + // Fetch a list of :ref:`ref_flyteidl.admin.NamedEntityIdentifier` of task objects. rpc ListTaskIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { option (google.api.http) = { get: "/api/v1/task_ids/{project}/{domain}" + additional_bindings { + get: "/api/v1/tasks/org/{org}/{project}/{domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing task definition identifiers matching input filters." @@ -72,7 +82,13 @@ service AdminService { option (google.api.http) = { get: "/api/v1/tasks/{id.project}/{id.domain}/{id.name}" additional_bindings { - get: "/api/v1/tasks/{id.project}/{id.domain}" + get: "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}/{id.name}", + } + additional_bindings { + get: "/api/v1/tasks/{id.project}/{id.domain}", + } + additional_bindings { + get: "/api/v1/tasks/org/{id.org}/{id.project}/{id.domain}", } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { @@ -85,6 +101,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/workflows" body: "*" + additional_bindings { + post: "/api/v1/workflows/org/{id.org}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Create and register a workflow definition." @@ -107,6 +127,9 @@ service AdminService { rpc GetWorkflow (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.Workflow) { option (google.api.http) = { get: "/api/v1/workflows/{id.project}/{id.domain}/{id.name}/{id.version}" + additional_bindings { + get: "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve an existing workflow definition." @@ -117,6 +140,9 @@ service AdminService { rpc ListWorkflowIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { option (google.api.http) = { get: "/api/v1/workflow_ids/{project}/{domain}" + additional_bindings { + get: "/api/v1/workflows/org/{org}/{project}/{domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch an existing workflow definition identifiers matching input filters." @@ -128,7 +154,13 @@ service AdminService { option (google.api.http) = { get: "/api/v1/workflows/{id.project}/{id.domain}/{id.name}" additional_bindings { - get: "/api/v1/workflows/{id.project}/{id.domain}" + get: "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}/{id.name}", + } + additional_bindings { + get: "/api/v1/workflows/{id.project}/{id.domain}", + } + additional_bindings { + get: "/api/v1/workflows/org/{id.org}/{id.project}/{id.domain}", } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { @@ -141,6 +173,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/launch_plans" body: "*" + additional_bindings { + post: "/api/v1/launch_plans/org/{id.org}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Create and register a launch plan definition." @@ -163,6 +199,9 @@ service AdminService { rpc GetLaunchPlan (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.LaunchPlan) { option (google.api.http) = { get: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" + additional_bindings { + get: "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve an existing launch plan definition." @@ -173,6 +212,9 @@ service AdminService { rpc GetActiveLaunchPlan (flyteidl.admin.ActiveLaunchPlanRequest) returns (flyteidl.admin.LaunchPlan) { option (google.api.http) = { get: "/api/v1/active_launch_plans/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/active_launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve the active launch plan version specified by input request filters." @@ -183,6 +225,9 @@ service AdminService { rpc ListActiveLaunchPlans (flyteidl.admin.ActiveLaunchPlanListRequest) returns (flyteidl.admin.LaunchPlanList) { option (google.api.http) = { get: "/api/v1/active_launch_plans/{project}/{domain}" + additional_bindings { + get: "/api/v1/active_launch_plans/org/{org}/{project}/{domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch the active launch plan versions specified by input request filters." @@ -193,6 +238,9 @@ service AdminService { rpc ListLaunchPlanIds (flyteidl.admin.NamedEntityIdentifierListRequest) returns (flyteidl.admin.NamedEntityIdentifierList) { option (google.api.http) = { get: "/api/v1/launch_plan_ids/{project}/{domain}" + additional_bindings { + get: "/api/v1/launch_plan_ids/org/{org}/{project}/{domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing launch plan definition identifiers matching input filters." @@ -203,9 +251,15 @@ service AdminService { rpc ListLaunchPlans (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.LaunchPlanList) { option (google.api.http) = { get: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}" + } additional_bindings { get: "/api/v1/launch_plans/{id.project}/{id.domain}" } + additional_bindings { + get: "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing launch plan definitions matching input filters." @@ -216,6 +270,9 @@ service AdminService { rpc UpdateLaunchPlan (flyteidl.admin.LaunchPlanUpdateRequest) returns (flyteidl.admin.LaunchPlanUpdateResponse) { option (google.api.http) = { put: "/api/v1/launch_plans/{id.project}/{id.domain}/{id.name}/{id.version}" + additional_bindings { + put: "/api/v1/launch_plans/org/{id.org}/{id.project}/{id.domain}/{id.name}/{id.version}" + } body: "*" }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { @@ -233,6 +290,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/executions" body: "*" + additional_bindings { + put: "/api/v1/executions/org/{org}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Create a workflow execution." @@ -244,6 +305,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/executions/relaunch" body: "*" + additional_bindings { + post: "/api/v1/executions/org/{id.org}/relaunch" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Relaunch a workflow execution." @@ -259,6 +324,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/executions/recover" body: "*" + additional_bindings { + post: "/api/v1/executions/org/{id.org}/recover" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Recreates a previously-run workflow execution that will only start executing from the last known failure point. " @@ -273,6 +342,9 @@ service AdminService { rpc GetExecution (flyteidl.admin.WorkflowExecutionGetRequest) returns (flyteidl.admin.Execution) { option (google.api.http) = { get: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve an existing workflow execution." @@ -284,6 +356,10 @@ service AdminService { option (google.api.http) = { put: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" body: "*" + additional_bindings { + put: "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Update execution belonging to project domain." @@ -294,6 +370,9 @@ service AdminService { rpc GetExecutionData (flyteidl.admin.WorkflowExecutionGetDataRequest) returns (flyteidl.admin.WorkflowExecutionGetDataResponse) { option (google.api.http) = { get: "/api/v1/data/executions/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/data/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve input and output data from an existing workflow execution." @@ -304,6 +383,9 @@ service AdminService { rpc ListExecutions (flyteidl.admin.ResourceListRequest) returns (flyteidl.admin.ExecutionList) { option (google.api.http) = { get: "/api/v1/executions/{id.project}/{id.domain}" + additional_bindings { + get: "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing workflow executions matching input filters." @@ -315,6 +397,10 @@ service AdminService { option (google.api.http) = { delete: "/api/v1/executions/{id.project}/{id.domain}/{id.name}" body: "*" + additional_bindings { + delete: "/api/v1/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Terminate the active workflow execution specified in the request." @@ -325,16 +411,35 @@ service AdminService { rpc GetNodeExecution (flyteidl.admin.NodeExecutionGetRequest) returns (flyteidl.admin.NodeExecution) { option (google.api.http) = { get: "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + additional_bindings { + get: "/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve an existing node execution." // }; } + // Fetches a :ref:`ref_flyteidl.admin.DynamicNodeWorkflowResponse`. + rpc GetDynamicNodeWorkflow (flyteidl.admin.GetDynamicNodeWorkflowRequest) returns (flyteidl.admin.DynamicNodeWorkflowResponse) { + option (google.api.http) = { + get: "/api/v1/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow" + additional_bindings { + get: "/api/v1/node_executions/org/{id.execution_id.org}/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}/dynamic_workflow" + } + }; + // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { + // description: "Retrieve a workflow closure from a dynamic node execution." + // }; + } + // Fetch a list of :ref:`ref_flyteidl.admin.NodeExecution`. rpc ListNodeExecutions (flyteidl.admin.NodeExecutionListRequest) returns (flyteidl.admin.NodeExecutionList) { option (google.api.http) = { get: "/api/v1/node_executions/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + additional_bindings { + get: "/api/v1/node_executions/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing node executions matching input filters." @@ -345,6 +450,9 @@ service AdminService { rpc ListNodeExecutionsForTask (flyteidl.admin.NodeExecutionForTaskListRequest) returns (flyteidl.admin.NodeExecutionList) { option (google.api.http) = { get: "/api/v1/children/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}" + additional_bindings { + get: "/api/v1/children/org/{task_execution_id.node_execution_id.execution_id.org}/task_executions/{task_execution_id.node_execution_id.execution_id.project}/{task_execution_id.node_execution_id.execution_id.domain}/{task_execution_id.node_execution_id.execution_id.name}/{task_execution_id.node_execution_id.node_id}/{task_execution_id.task_id.project}/{task_execution_id.task_id.domain}/{task_execution_id.task_id.name}/{task_execution_id.task_id.version}/{task_execution_id.retry_attempt}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch child node executions launched by the specified task execution." @@ -355,6 +463,9 @@ service AdminService { rpc GetNodeExecutionData (flyteidl.admin.NodeExecutionGetDataRequest) returns (flyteidl.admin.NodeExecutionGetDataResponse) { option (google.api.http) = { get: "/api/v1/data/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + additional_bindings { + get: "/api/v1/data/org/{id.execution_id.org}/node_executions/{id.execution_id.project}/{id.execution_id.domain}/{id.execution_id.name}/{id.node_id}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve input and output data from an existing node execution." @@ -366,29 +477,40 @@ service AdminService { option (google.api.http) = { post: "/api/v1/projects" body: "*" + additional_bindings { + post: "/api/v1/projects/org/{project.org}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Register a project." // }; } - // Updates an existing :ref:`ref_flyteidl.admin.Project` + // Updates an existing :ref:`ref_flyteidl.admin.Project` // flyteidl.admin.Project should be passed but the domains property should be empty; // it will be ignored in the handler as domains cannot be updated via this API. rpc UpdateProject (flyteidl.admin.Project) returns (flyteidl.admin.ProjectUpdateResponse) { option (google.api.http) = { put: "/api/v1/projects/{id}" body: "*" + additional_bindings { + put: "/api/v1/projects/org/{org}/{id}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Update a project." // }; } - // Fetches a list of :ref:`ref_flyteidl.admin.Project` + // Fetches a list of :ref:`ref_flyteidl.admin.Project` rpc ListProjects (flyteidl.admin.ProjectListRequest) returns (flyteidl.admin.Projects) { option (google.api.http) = { get: "/api/v1/projects" + additional_bindings { + get: "/api/v1/projects/org/{org}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch registered projects." @@ -400,6 +522,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/events/workflows" body: "*" + additional_bindings { + post: "/api/v1/events/org/{event.execution_id.org}/workflows" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Create a workflow execution event recording a phase transition." @@ -411,6 +537,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/events/nodes" body: "*" + additional_bindings { + post: "/api/v1/events/org/{event.id.execution_id.org}/nodes" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Create a node execution event recording a phase transition." @@ -422,6 +552,10 @@ service AdminService { option (google.api.http) = { post: "/api/v1/events/tasks" body: "*" + additional_bindings { + post: "/api/v1/events/org/{event.parent_node_execution_id.execution_id.org}/tasks" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Create a task execution event recording a phase transition." @@ -431,7 +565,10 @@ service AdminService { // Fetches a :ref:`ref_flyteidl.admin.TaskExecution`. rpc GetTaskExecution (flyteidl.admin.TaskExecutionGetRequest) returns (flyteidl.admin.TaskExecution) { option (google.api.http) = { - get: "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + get: "/api/v1/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + additional_bindings { + get: "/api/v1/task_executions/org/{id.node_execution_id.execution_id.org}/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve an existing task execution." @@ -441,7 +578,10 @@ service AdminService { // Fetches a list of :ref:`ref_flyteidl.admin.TaskExecution`. rpc ListTaskExecutions (flyteidl.admin.TaskExecutionListRequest) returns (flyteidl.admin.TaskExecutionList) { option (google.api.http) = { - get: "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" + get: "/api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" + additional_bindings { + get: "/api/v1/task_executions/org/{node_execution_id.execution_id.org}/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing task executions matching input filters." @@ -452,7 +592,10 @@ service AdminService { // Fetches input and output data for a :ref:`ref_flyteidl.admin.TaskExecution`. rpc GetTaskExecutionData (flyteidl.admin.TaskExecutionGetDataRequest) returns (flyteidl.admin.TaskExecutionGetDataResponse) { option (google.api.http) = { - get: "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + get: "/api/v1/data/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + additional_bindings { + get: "/api/v1/data/org/{id.node_execution_id.execution_id.org}/task_executions/{id.node_execution_id.execution_id.project}/{id.node_execution_id.execution_id.domain}/{id.node_execution_id.execution_id.name}/{id.node_execution_id.node_id}/{id.task_id.project}/{id.task_id.domain}/{id.task_id.name}/{id.task_id.version}/{id.retry_attempt}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve input and output data from an existing task execution." @@ -464,6 +607,10 @@ service AdminService { option (google.api.http) = { put: "/api/v1/project_domain_attributes/{attributes.project}/{attributes.domain}" body: "*" + additional_bindings { + put: "/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Update the customized resource attributes associated with a project-domain combination" @@ -474,6 +621,9 @@ service AdminService { rpc GetProjectDomainAttributes (flyteidl.admin.ProjectDomainAttributesGetRequest) returns (flyteidl.admin.ProjectDomainAttributesGetResponse) { option (google.api.http) = { get: "/api/v1/project_domain_attributes/{project}/{domain}" + additional_bindings { + get: "/api/v1/project_domain_attributes/org/{org}/{project}/{domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve the customized resource attributes associated with a project-domain combination" @@ -485,6 +635,10 @@ service AdminService { option (google.api.http) = { delete: "/api/v1/project_domain_attributes/{project}/{domain}" body: "*" + additional_bindings { + delete: "/api/v1/project_domain_attributes/org/{org}/{project}/{domain}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Delete the customized resource attributes associated with a project-domain combination" @@ -496,6 +650,10 @@ service AdminService { option (google.api.http) = { put: "/api/v1/project_attributes/{attributes.project}" body: "*" + additional_bindings { + put: "/api/v1/project_domain_attributes/org/{attributes.org}/{attributes.project}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Update the customized resource attributes associated with a project" @@ -506,6 +664,9 @@ service AdminService { rpc GetProjectAttributes (flyteidl.admin.ProjectAttributesGetRequest) returns (flyteidl.admin.ProjectAttributesGetResponse) { option (google.api.http) = { get: "/api/v1/project_attributes/{project}" + additional_bindings { + get: "/api/v1/project_domain_attributes/org/{org}/{project}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve the customized resource attributes associated with a project" @@ -517,6 +678,10 @@ service AdminService { option (google.api.http) = { delete: "/api/v1/project_attributes/{project}" body: "*" + additional_bindings { + delete: "/api/v1/project_domain_attributes/org/{org}/{project}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Delete the customized resource attributes associated with a project" @@ -527,6 +692,10 @@ service AdminService { option (google.api.http) = { put: "/api/v1/workflow_attributes/{attributes.project}/{attributes.domain}/{attributes.workflow}" body: "*" + additional_bindings { + put: "/api/v1/workflow_attributes/org/{attributes.org}/{attributes.project}/{attributes.domain}/{attributes.workflow}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Update the customized resource attributes associated with a project, domain and workflow combination" @@ -537,6 +706,9 @@ service AdminService { rpc GetWorkflowAttributes (flyteidl.admin.WorkflowAttributesGetRequest) returns (flyteidl.admin.WorkflowAttributesGetResponse) { option (google.api.http) = { get: "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" + additional_bindings { + get: "/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve the customized resource attributes associated with a project, domain and workflow combination" @@ -548,6 +720,10 @@ service AdminService { option (google.api.http) = { delete: "/api/v1/workflow_attributes/{project}/{domain}/{workflow}" body: "*" + additional_bindings { + delete: "/api/v1/workflow_attributes/org/{org}/{project}/{domain}/{workflow}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Delete the customized resource attributes associated with a project, domain and workflow combination" @@ -558,6 +734,9 @@ service AdminService { rpc ListMatchableAttributes (flyteidl.admin.ListMatchableAttributesRequest) returns (flyteidl.admin.ListMatchableAttributesResponse) { option (google.api.http) = { get: "/api/v1/matchable_attributes" + additional_bindings { + get: "/api/v1/matchable_attributes/org/{org}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve a list of MatchableAttributesConfiguration objects." @@ -568,6 +747,9 @@ service AdminService { rpc ListNamedEntities (flyteidl.admin.NamedEntityListRequest) returns (flyteidl.admin.NamedEntityList) { option (google.api.http) = { get: "/api/v1/named_entities/{resource_type}/{project}/{domain}" + additional_bindings { + get: "/api/v1/named_entities/org/{org}/{resource_type}/{project}/{domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve a list of NamedEntity objects sharing a common resource type, project, and domain." @@ -578,6 +760,9 @@ service AdminService { rpc GetNamedEntity (flyteidl.admin.NamedEntityGetRequest) returns (flyteidl.admin.NamedEntity) { option (google.api.http) = { get: "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve a NamedEntity object." @@ -589,6 +774,10 @@ service AdminService { option (google.api.http) = { put: "/api/v1/named_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" body: "*" + additional_bindings { + put: "/api/v1/named_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Update the fields associated with a NamedEntity" @@ -596,18 +785,21 @@ service AdminService { } rpc GetVersion (flyteidl.admin.GetVersionRequest) returns (flyteidl.admin.GetVersionResponse) { - option (google.api.http) = { - get: "/api/v1/version" - }; - // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { - // description: "Retrieve the Version (including the Build information) for FlyteAdmin service" - // }; + option (google.api.http) = { + get: "/api/v1/version" + }; + // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { + // description: "Retrieve the Version (including the Build information) for FlyteAdmin service" + // }; } // Fetch a :ref:`ref_flyteidl.admin.DescriptionEntity` object. rpc GetDescriptionEntity (flyteidl.admin.ObjectGetRequest) returns (flyteidl.admin.DescriptionEntity) { option (google.api.http) = { get: "/api/v1/description_entities/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}" + additional_bindings { + get: "/api/v1/description_entities/org/{id.org}/{id.resource_type}/{id.project}/{id.domain}/{id.name}/{id.version}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve an existing description entity description." @@ -618,9 +810,15 @@ service AdminService { rpc ListDescriptionEntities (flyteidl.admin.DescriptionEntityListRequest) returns (flyteidl.admin.DescriptionEntityList) { option (google.api.http) = { get: "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}/{id.name}" + } additional_bindings { get: "/api/v1/description_entities/{resource_type}/{id.project}/{id.domain}" } + additional_bindings { + get: "/api/v1/description_entities/org/{id.org}/{resource_type}/{id.project}/{id.domain}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing description entity definitions matching input filters." @@ -631,6 +829,9 @@ service AdminService { rpc GetExecutionMetrics (flyteidl.admin.WorkflowExecutionGetMetricsRequest) returns (flyteidl.admin.WorkflowExecutionGetMetricsResponse) { option (google.api.http) = { get: "/api/v1/metrics/executions/{id.project}/{id.domain}/{id.name}" + additional_bindings { + get: "/api/v1/metrics/executions/org/{id.org}/{id.project}/{id.domain}/{id.name}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Retrieve metrics from an existing workflow execution." diff --git a/flyteidl/protos/flyteidl/service/agent.proto b/flyteidl/protos/flyteidl/service/agent.proto index eba19e496c..7538c0b819 100644 --- a/flyteidl/protos/flyteidl/service/agent.proto +++ b/flyteidl/protos/flyteidl/service/agent.proto @@ -14,6 +14,16 @@ service AsyncAgentService { rpc GetTask (flyteidl.admin.GetTaskRequest) returns (flyteidl.admin.GetTaskResponse){}; // Delete the task resource. rpc DeleteTask (flyteidl.admin.DeleteTaskRequest) returns (flyteidl.admin.DeleteTaskResponse){}; + + // GetTaskMetrics returns one or more task execution metrics, if available. + // + // Errors include + // * OutOfRange if metrics are not available for the specified task time range + // * various other errors + rpc GetTaskMetrics(flyteidl.admin.GetTaskMetricsRequest) returns (flyteidl.admin.GetTaskMetricsResponse){}; + + // GetTaskLogs returns task execution logs, if available. + rpc GetTaskLogs(flyteidl.admin.GetTaskLogsRequest) returns (flyteidl.admin.GetTaskLogsResponse){}; } // AgentMetadataService defines an RPC service that is also served over HTTP via grpc-gateway. diff --git a/flyteidl/protos/flyteidl/service/external_plugin_service.proto b/flyteidl/protos/flyteidl/service/external_plugin_service.proto index ce890cf8f2..a3035290e2 100644 --- a/flyteidl/protos/flyteidl/service/external_plugin_service.proto +++ b/flyteidl/protos/flyteidl/service/external_plugin_service.proto @@ -4,7 +4,6 @@ package flyteidl.service; option go_package = "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service"; import "flyteidl/core/literals.proto"; import "flyteidl/core/tasks.proto"; -import "flyteidl/core/interface.proto"; // ExternalPluginService defines an RPC Service that allows propeller to send the request to the backend plugin server. service ExternalPluginService { diff --git a/flyteidl/protos/flyteidl/service/signal.proto b/flyteidl/protos/flyteidl/service/signal.proto index 91030d226c..07389774d0 100644 --- a/flyteidl/protos/flyteidl/service/signal.proto +++ b/flyteidl/protos/flyteidl/service/signal.proto @@ -24,6 +24,9 @@ service SignalService { rpc ListSignals (flyteidl.admin.SignalListRequest) returns (flyteidl.admin.SignalList) { option (google.api.http) = { get: "/api/v1/signals/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + additional_bindings: { + get: "/api/v1/signals/org/{workflow_execution_id.org}/{workflow_execution_id.project}/{workflow_execution_id.domain}/{workflow_execution_id.name}" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Fetch existing signal definitions matching the input signal id filters." @@ -35,6 +38,10 @@ service SignalService { option (google.api.http) = { post: "/api/v1/signals" body: "*" + additional_bindings: { + post: "/api/v1/signals/org/{id.execution_id.org}" + body: "*" + } }; // option (grpc.gateway.protoc_gen_swagger.options.openapiv2_operation) = { // description: "Set a signal value." diff --git a/flyteidl/pull_request_template.md b/flyteidl/pull_request_template.md deleted file mode 100644 index 9cdab99b46..0000000000 --- a/flyteidl/pull_request_template.md +++ /dev/null @@ -1,35 +0,0 @@ -## _Read then delete this section_ - -_- Make sure to use a concise title for the pull-request._ - -_- Use #patch, #minor or #major in the pull-request title to bump the corresponding version. Otherwise, the patch version -will be bumped. [More details](https://github.com/marketplace/actions/github-tag-bump)_ - -# TL;DR -_Please replace this text with a description of what this PR accomplishes._ - -## Type - - [ ] Bug Fix - - [ ] Feature - - [ ] Plugin - -## Are all requirements met? - - - [ ] Code completed - - [ ] Smoke tested - - [ ] Unit tests added - - [ ] Code documentation added - - [ ] Any pending items have an associated Issue - -## Complete description - _How did you fix the bug, make the feature etc. Link to any design docs etc_ - -## Tracking Issue -_Remove the '*fixes*' keyword if there will be multiple PRs to fix the linked issue_ - -fixes https://github.com/flyteorg/flyte/issues/ - -## Follow-up issue -_NA_ -OR -_https://github.com/flyteorg/flyte/issues/_ diff --git a/flyteidl/setup.py b/flyteidl/setup.py index b82355362e..2f439ad360 100644 --- a/flyteidl/setup.py +++ b/flyteidl/setup.py @@ -41,4 +41,5 @@ extras_require={ ':python_version=="2.7"': ['typing>=3.6'], # allow typehinting PY2 }, + license='Apache-2.0', ) diff --git a/flyteidl/validate/__init__.py b/flyteidl/validate/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/flyteidl/validate/validate_pb2.py b/flyteidl/validate/validate_pb2.py deleted file mode 100644 index 73988e7c38..0000000000 --- a/flyteidl/validate/validate_pb2.py +++ /dev/null @@ -1,2366 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: validate/validate.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='validate/validate.proto', - package='validate', - syntax='proto2', - serialized_options=_b('\n\032io.envoyproxy.pgv.validateZ2github.com/envoyproxy/protoc-gen-validate/validate'), - serialized_pb=_b('\n\x17validate/validate.proto\x12\x08validate\x1a google/protobuf/descriptor.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x98\x07\n\nFieldRules\x12\'\n\x07message\x18\x11 \x01(\x0b\x32\x16.validate.MessageRules\x12%\n\x05\x66loat\x18\x01 \x01(\x0b\x32\x14.validate.FloatRulesH\x00\x12\'\n\x06\x64ouble\x18\x02 \x01(\x0b\x32\x15.validate.DoubleRulesH\x00\x12%\n\x05int32\x18\x03 \x01(\x0b\x32\x14.validate.Int32RulesH\x00\x12%\n\x05int64\x18\x04 \x01(\x0b\x32\x14.validate.Int64RulesH\x00\x12\'\n\x06uint32\x18\x05 \x01(\x0b\x32\x15.validate.UInt32RulesH\x00\x12\'\n\x06uint64\x18\x06 \x01(\x0b\x32\x15.validate.UInt64RulesH\x00\x12\'\n\x06sint32\x18\x07 \x01(\x0b\x32\x15.validate.SInt32RulesH\x00\x12\'\n\x06sint64\x18\x08 \x01(\x0b\x32\x15.validate.SInt64RulesH\x00\x12)\n\x07\x66ixed32\x18\t \x01(\x0b\x32\x16.validate.Fixed32RulesH\x00\x12)\n\x07\x66ixed64\x18\n \x01(\x0b\x32\x16.validate.Fixed64RulesH\x00\x12+\n\x08sfixed32\x18\x0b \x01(\x0b\x32\x17.validate.SFixed32RulesH\x00\x12+\n\x08sfixed64\x18\x0c \x01(\x0b\x32\x17.validate.SFixed64RulesH\x00\x12#\n\x04\x62ool\x18\r \x01(\x0b\x32\x13.validate.BoolRulesH\x00\x12\'\n\x06string\x18\x0e \x01(\x0b\x32\x15.validate.StringRulesH\x00\x12%\n\x05\x62ytes\x18\x0f \x01(\x0b\x32\x14.validate.BytesRulesH\x00\x12#\n\x04\x65num\x18\x10 \x01(\x0b\x32\x13.validate.EnumRulesH\x00\x12+\n\x08repeated\x18\x12 \x01(\x0b\x32\x17.validate.RepeatedRulesH\x00\x12!\n\x03map\x18\x13 \x01(\x0b\x32\x12.validate.MapRulesH\x00\x12!\n\x03\x61ny\x18\x14 \x01(\x0b\x32\x12.validate.AnyRulesH\x00\x12+\n\x08\x64uration\x18\x15 \x01(\x0b\x32\x17.validate.DurationRulesH\x00\x12-\n\ttimestamp\x18\x16 \x01(\x0b\x32\x18.validate.TimestampRulesH\x00\x42\x06\n\x04type\"\x7f\n\nFloatRules\x12\r\n\x05\x63onst\x18\x01 \x01(\x02\x12\n\n\x02lt\x18\x02 \x01(\x02\x12\x0b\n\x03lte\x18\x03 \x01(\x02\x12\n\n\x02gt\x18\x04 \x01(\x02\x12\x0b\n\x03gte\x18\x05 \x01(\x02\x12\n\n\x02in\x18\x06 \x03(\x02\x12\x0e\n\x06not_in\x18\x07 \x03(\x02\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x80\x01\n\x0b\x44oubleRules\x12\r\n\x05\x63onst\x18\x01 \x01(\x01\x12\n\n\x02lt\x18\x02 \x01(\x01\x12\x0b\n\x03lte\x18\x03 \x01(\x01\x12\n\n\x02gt\x18\x04 \x01(\x01\x12\x0b\n\x03gte\x18\x05 \x01(\x01\x12\n\n\x02in\x18\x06 \x03(\x01\x12\x0e\n\x06not_in\x18\x07 \x03(\x01\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x7f\n\nInt32Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x05\x12\n\n\x02lt\x18\x02 \x01(\x05\x12\x0b\n\x03lte\x18\x03 \x01(\x05\x12\n\n\x02gt\x18\x04 \x01(\x05\x12\x0b\n\x03gte\x18\x05 \x01(\x05\x12\n\n\x02in\x18\x06 \x03(\x05\x12\x0e\n\x06not_in\x18\x07 \x03(\x05\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x7f\n\nInt64Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x03\x12\n\n\x02lt\x18\x02 \x01(\x03\x12\x0b\n\x03lte\x18\x03 \x01(\x03\x12\n\n\x02gt\x18\x04 \x01(\x03\x12\x0b\n\x03gte\x18\x05 \x01(\x03\x12\n\n\x02in\x18\x06 \x03(\x03\x12\x0e\n\x06not_in\x18\x07 \x03(\x03\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x80\x01\n\x0bUInt32Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\r\x12\n\n\x02lt\x18\x02 \x01(\r\x12\x0b\n\x03lte\x18\x03 \x01(\r\x12\n\n\x02gt\x18\x04 \x01(\r\x12\x0b\n\x03gte\x18\x05 \x01(\r\x12\n\n\x02in\x18\x06 \x03(\r\x12\x0e\n\x06not_in\x18\x07 \x03(\r\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x80\x01\n\x0bUInt64Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x04\x12\n\n\x02lt\x18\x02 \x01(\x04\x12\x0b\n\x03lte\x18\x03 \x01(\x04\x12\n\n\x02gt\x18\x04 \x01(\x04\x12\x0b\n\x03gte\x18\x05 \x01(\x04\x12\n\n\x02in\x18\x06 \x03(\x04\x12\x0e\n\x06not_in\x18\x07 \x03(\x04\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x80\x01\n\x0bSInt32Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x11\x12\n\n\x02lt\x18\x02 \x01(\x11\x12\x0b\n\x03lte\x18\x03 \x01(\x11\x12\n\n\x02gt\x18\x04 \x01(\x11\x12\x0b\n\x03gte\x18\x05 \x01(\x11\x12\n\n\x02in\x18\x06 \x03(\x11\x12\x0e\n\x06not_in\x18\x07 \x03(\x11\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x80\x01\n\x0bSInt64Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x12\x12\n\n\x02lt\x18\x02 \x01(\x12\x12\x0b\n\x03lte\x18\x03 \x01(\x12\x12\n\n\x02gt\x18\x04 \x01(\x12\x12\x0b\n\x03gte\x18\x05 \x01(\x12\x12\n\n\x02in\x18\x06 \x03(\x12\x12\x0e\n\x06not_in\x18\x07 \x03(\x12\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x81\x01\n\x0c\x46ixed32Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x07\x12\n\n\x02lt\x18\x02 \x01(\x07\x12\x0b\n\x03lte\x18\x03 \x01(\x07\x12\n\n\x02gt\x18\x04 \x01(\x07\x12\x0b\n\x03gte\x18\x05 \x01(\x07\x12\n\n\x02in\x18\x06 \x03(\x07\x12\x0e\n\x06not_in\x18\x07 \x03(\x07\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x81\x01\n\x0c\x46ixed64Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x06\x12\n\n\x02lt\x18\x02 \x01(\x06\x12\x0b\n\x03lte\x18\x03 \x01(\x06\x12\n\n\x02gt\x18\x04 \x01(\x06\x12\x0b\n\x03gte\x18\x05 \x01(\x06\x12\n\n\x02in\x18\x06 \x03(\x06\x12\x0e\n\x06not_in\x18\x07 \x03(\x06\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x82\x01\n\rSFixed32Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x0f\x12\n\n\x02lt\x18\x02 \x01(\x0f\x12\x0b\n\x03lte\x18\x03 \x01(\x0f\x12\n\n\x02gt\x18\x04 \x01(\x0f\x12\x0b\n\x03gte\x18\x05 \x01(\x0f\x12\n\n\x02in\x18\x06 \x03(\x0f\x12\x0e\n\x06not_in\x18\x07 \x03(\x0f\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x82\x01\n\rSFixed64Rules\x12\r\n\x05\x63onst\x18\x01 \x01(\x10\x12\n\n\x02lt\x18\x02 \x01(\x10\x12\x0b\n\x03lte\x18\x03 \x01(\x10\x12\n\n\x02gt\x18\x04 \x01(\x10\x12\x0b\n\x03gte\x18\x05 \x01(\x10\x12\n\n\x02in\x18\x06 \x03(\x10\x12\x0e\n\x06not_in\x18\x07 \x03(\x10\x12\x14\n\x0cignore_empty\x18\x08 \x01(\x08\"\x1a\n\tBoolRules\x12\r\n\x05\x63onst\x18\x01 \x01(\x08\"\xfd\x03\n\x0bStringRules\x12\r\n\x05\x63onst\x18\x01 \x01(\t\x12\x0b\n\x03len\x18\x13 \x01(\x04\x12\x0f\n\x07min_len\x18\x02 \x01(\x04\x12\x0f\n\x07max_len\x18\x03 \x01(\x04\x12\x11\n\tlen_bytes\x18\x14 \x01(\x04\x12\x11\n\tmin_bytes\x18\x04 \x01(\x04\x12\x11\n\tmax_bytes\x18\x05 \x01(\x04\x12\x0f\n\x07pattern\x18\x06 \x01(\t\x12\x0e\n\x06prefix\x18\x07 \x01(\t\x12\x0e\n\x06suffix\x18\x08 \x01(\t\x12\x10\n\x08\x63ontains\x18\t \x01(\t\x12\x14\n\x0cnot_contains\x18\x17 \x01(\t\x12\n\n\x02in\x18\n \x03(\t\x12\x0e\n\x06not_in\x18\x0b \x03(\t\x12\x0f\n\x05\x65mail\x18\x0c \x01(\x08H\x00\x12\x12\n\x08hostname\x18\r \x01(\x08H\x00\x12\x0c\n\x02ip\x18\x0e \x01(\x08H\x00\x12\x0e\n\x04ipv4\x18\x0f \x01(\x08H\x00\x12\x0e\n\x04ipv6\x18\x10 \x01(\x08H\x00\x12\r\n\x03uri\x18\x11 \x01(\x08H\x00\x12\x11\n\x07uri_ref\x18\x12 \x01(\x08H\x00\x12\x11\n\x07\x61\x64\x64ress\x18\x15 \x01(\x08H\x00\x12\x0e\n\x04uuid\x18\x16 \x01(\x08H\x00\x12\x30\n\x10well_known_regex\x18\x18 \x01(\x0e\x32\x14.validate.KnownRegexH\x00\x12\x14\n\x06strict\x18\x19 \x01(\x08:\x04true\x12\x14\n\x0cignore_empty\x18\x1a \x01(\x08\x42\x0c\n\nwell_known\"\xfb\x01\n\nBytesRules\x12\r\n\x05\x63onst\x18\x01 \x01(\x0c\x12\x0b\n\x03len\x18\r \x01(\x04\x12\x0f\n\x07min_len\x18\x02 \x01(\x04\x12\x0f\n\x07max_len\x18\x03 \x01(\x04\x12\x0f\n\x07pattern\x18\x04 \x01(\t\x12\x0e\n\x06prefix\x18\x05 \x01(\x0c\x12\x0e\n\x06suffix\x18\x06 \x01(\x0c\x12\x10\n\x08\x63ontains\x18\x07 \x01(\x0c\x12\n\n\x02in\x18\x08 \x03(\x0c\x12\x0e\n\x06not_in\x18\t \x03(\x0c\x12\x0c\n\x02ip\x18\n \x01(\x08H\x00\x12\x0e\n\x04ipv4\x18\x0b \x01(\x08H\x00\x12\x0e\n\x04ipv6\x18\x0c \x01(\x08H\x00\x12\x14\n\x0cignore_empty\x18\x0e \x01(\x08\x42\x0c\n\nwell_known\"L\n\tEnumRules\x12\r\n\x05\x63onst\x18\x01 \x01(\x05\x12\x14\n\x0c\x64\x65\x66ined_only\x18\x02 \x01(\x08\x12\n\n\x02in\x18\x03 \x03(\x05\x12\x0e\n\x06not_in\x18\x04 \x03(\x05\".\n\x0cMessageRules\x12\x0c\n\x04skip\x18\x01 \x01(\x08\x12\x10\n\x08required\x18\x02 \x01(\x08\"\x80\x01\n\rRepeatedRules\x12\x11\n\tmin_items\x18\x01 \x01(\x04\x12\x11\n\tmax_items\x18\x02 \x01(\x04\x12\x0e\n\x06unique\x18\x03 \x01(\x08\x12#\n\x05items\x18\x04 \x01(\x0b\x32\x14.validate.FieldRules\x12\x14\n\x0cignore_empty\x18\x05 \x01(\x08\"\xa3\x01\n\x08MapRules\x12\x11\n\tmin_pairs\x18\x01 \x01(\x04\x12\x11\n\tmax_pairs\x18\x02 \x01(\x04\x12\x11\n\tno_sparse\x18\x03 \x01(\x08\x12\"\n\x04keys\x18\x04 \x01(\x0b\x32\x14.validate.FieldRules\x12$\n\x06values\x18\x05 \x01(\x0b\x32\x14.validate.FieldRules\x12\x14\n\x0cignore_empty\x18\x06 \x01(\x08\"8\n\x08\x41nyRules\x12\x10\n\x08required\x18\x01 \x01(\x08\x12\n\n\x02in\x18\x02 \x03(\t\x12\x0e\n\x06not_in\x18\x03 \x03(\t\"\xbb\x02\n\rDurationRules\x12\x10\n\x08required\x18\x01 \x01(\x08\x12(\n\x05\x63onst\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12%\n\x02lt\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12&\n\x03lte\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12%\n\x02gt\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12&\n\x03gte\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12%\n\x02in\x18\x07 \x03(\x0b\x32\x19.google.protobuf.Duration\x12)\n\x06not_in\x18\x08 \x03(\x0b\x32\x19.google.protobuf.Duration\"\xba\x02\n\x0eTimestampRules\x12\x10\n\x08required\x18\x01 \x01(\x08\x12)\n\x05\x63onst\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12&\n\x02lt\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\x03lte\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12&\n\x02gt\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\x03gte\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06lt_now\x18\x07 \x01(\x08\x12\x0e\n\x06gt_now\x18\x08 \x01(\x08\x12)\n\x06within\x18\t \x01(\x0b\x32\x19.google.protobuf.Duration*F\n\nKnownRegex\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x14\n\x10HTTP_HEADER_NAME\x10\x01\x12\x15\n\x11HTTP_HEADER_VALUE\x10\x02:2\n\x08\x64isabled\x12\x1f.google.protobuf.MessageOptions\x18\xaf\x08 \x01(\x08:1\n\x07ignored\x12\x1f.google.protobuf.MessageOptions\x18\xb0\x08 \x01(\x08:0\n\x08required\x12\x1d.google.protobuf.OneofOptions\x18\xaf\x08 \x01(\x08:C\n\x05rules\x12\x1d.google.protobuf.FieldOptions\x18\xaf\x08 \x01(\x0b\x32\x14.validate.FieldRulesBP\n\x1aio.envoyproxy.pgv.validateZ2github.com/envoyproxy/protoc-gen-validate/validate') - , - dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - -_KNOWNREGEX = _descriptor.EnumDescriptor( - name='KnownRegex', - full_name='validate.KnownRegex', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='UNKNOWN', index=0, number=0, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='HTTP_HEADER_NAME', index=1, number=1, - serialized_options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='HTTP_HEADER_VALUE', index=2, number=2, - serialized_options=None, - type=None), - ], - containing_type=None, - serialized_options=None, - serialized_start=4541, - serialized_end=4611, -) -_sym_db.RegisterEnumDescriptor(_KNOWNREGEX) - -KnownRegex = enum_type_wrapper.EnumTypeWrapper(_KNOWNREGEX) -UNKNOWN = 0 -HTTP_HEADER_NAME = 1 -HTTP_HEADER_VALUE = 2 - -DISABLED_FIELD_NUMBER = 1071 -disabled = _descriptor.FieldDescriptor( - name='disabled', full_name='validate.disabled', index=0, - number=1071, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR) -IGNORED_FIELD_NUMBER = 1072 -ignored = _descriptor.FieldDescriptor( - name='ignored', full_name='validate.ignored', index=1, - number=1072, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR) -REQUIRED_FIELD_NUMBER = 1071 -required = _descriptor.FieldDescriptor( - name='required', full_name='validate.required', index=2, - number=1071, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR) -RULES_FIELD_NUMBER = 1071 -rules = _descriptor.FieldDescriptor( - name='rules', full_name='validate.rules', index=3, - number=1071, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR) - - -_FIELDRULES = _descriptor.Descriptor( - name='FieldRules', - full_name='validate.FieldRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='message', full_name='validate.FieldRules.message', index=0, - number=17, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='float', full_name='validate.FieldRules.float', index=1, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double', full_name='validate.FieldRules.double', index=2, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int32', full_name='validate.FieldRules.int32', index=3, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int64', full_name='validate.FieldRules.int64', index=4, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uint32', full_name='validate.FieldRules.uint32', index=5, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uint64', full_name='validate.FieldRules.uint64', index=6, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='sint32', full_name='validate.FieldRules.sint32', index=7, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='sint64', full_name='validate.FieldRules.sint64', index=8, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='fixed32', full_name='validate.FieldRules.fixed32', index=9, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='fixed64', full_name='validate.FieldRules.fixed64', index=10, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='sfixed32', full_name='validate.FieldRules.sfixed32', index=11, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='sfixed64', full_name='validate.FieldRules.sfixed64', index=12, - number=12, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bool', full_name='validate.FieldRules.bool', index=13, - number=13, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string', full_name='validate.FieldRules.string', index=14, - number=14, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bytes', full_name='validate.FieldRules.bytes', index=15, - number=15, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='enum', full_name='validate.FieldRules.enum', index=16, - number=16, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='repeated', full_name='validate.FieldRules.repeated', index=17, - number=18, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='map', full_name='validate.FieldRules.map', index=18, - number=19, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='any', full_name='validate.FieldRules.any', index=19, - number=20, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='duration', full_name='validate.FieldRules.duration', index=20, - number=21, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestamp', full_name='validate.FieldRules.timestamp', index=21, - number=22, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='type', full_name='validate.FieldRules.type', - index=0, containing_type=None, fields=[]), - ], - serialized_start=137, - serialized_end=1057, -) - - -_FLOATRULES = _descriptor.Descriptor( - name='FloatRules', - full_name='validate.FloatRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.FloatRules.const', index=0, - number=1, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.FloatRules.lt', index=1, - number=2, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.FloatRules.lte', index=2, - number=3, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.FloatRules.gt', index=3, - number=4, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.FloatRules.gte', index=4, - number=5, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.FloatRules.in', index=5, - number=6, type=2, cpp_type=6, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.FloatRules.not_in', index=6, - number=7, type=2, cpp_type=6, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.FloatRules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1059, - serialized_end=1186, -) - - -_DOUBLERULES = _descriptor.Descriptor( - name='DoubleRules', - full_name='validate.DoubleRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.DoubleRules.const', index=0, - number=1, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.DoubleRules.lt', index=1, - number=2, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.DoubleRules.lte', index=2, - number=3, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.DoubleRules.gt', index=3, - number=4, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.DoubleRules.gte', index=4, - number=5, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.DoubleRules.in', index=5, - number=6, type=1, cpp_type=5, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.DoubleRules.not_in', index=6, - number=7, type=1, cpp_type=5, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.DoubleRules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1189, - serialized_end=1317, -) - - -_INT32RULES = _descriptor.Descriptor( - name='Int32Rules', - full_name='validate.Int32Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.Int32Rules.const', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.Int32Rules.lt', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.Int32Rules.lte', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.Int32Rules.gt', index=3, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.Int32Rules.gte', index=4, - number=5, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.Int32Rules.in', index=5, - number=6, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.Int32Rules.not_in', index=6, - number=7, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.Int32Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1319, - serialized_end=1446, -) - - -_INT64RULES = _descriptor.Descriptor( - name='Int64Rules', - full_name='validate.Int64Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.Int64Rules.const', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.Int64Rules.lt', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.Int64Rules.lte', index=2, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.Int64Rules.gt', index=3, - number=4, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.Int64Rules.gte', index=4, - number=5, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.Int64Rules.in', index=5, - number=6, type=3, cpp_type=2, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.Int64Rules.not_in', index=6, - number=7, type=3, cpp_type=2, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.Int64Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1448, - serialized_end=1575, -) - - -_UINT32RULES = _descriptor.Descriptor( - name='UInt32Rules', - full_name='validate.UInt32Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.UInt32Rules.const', index=0, - number=1, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.UInt32Rules.lt', index=1, - number=2, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.UInt32Rules.lte', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.UInt32Rules.gt', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.UInt32Rules.gte', index=4, - number=5, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.UInt32Rules.in', index=5, - number=6, type=13, cpp_type=3, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.UInt32Rules.not_in', index=6, - number=7, type=13, cpp_type=3, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.UInt32Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1578, - serialized_end=1706, -) - - -_UINT64RULES = _descriptor.Descriptor( - name='UInt64Rules', - full_name='validate.UInt64Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.UInt64Rules.const', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.UInt64Rules.lt', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.UInt64Rules.lte', index=2, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.UInt64Rules.gt', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.UInt64Rules.gte', index=4, - number=5, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.UInt64Rules.in', index=5, - number=6, type=4, cpp_type=4, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.UInt64Rules.not_in', index=6, - number=7, type=4, cpp_type=4, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.UInt64Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1709, - serialized_end=1837, -) - - -_SINT32RULES = _descriptor.Descriptor( - name='SInt32Rules', - full_name='validate.SInt32Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.SInt32Rules.const', index=0, - number=1, type=17, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.SInt32Rules.lt', index=1, - number=2, type=17, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.SInt32Rules.lte', index=2, - number=3, type=17, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.SInt32Rules.gt', index=3, - number=4, type=17, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.SInt32Rules.gte', index=4, - number=5, type=17, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.SInt32Rules.in', index=5, - number=6, type=17, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.SInt32Rules.not_in', index=6, - number=7, type=17, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.SInt32Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1840, - serialized_end=1968, -) - - -_SINT64RULES = _descriptor.Descriptor( - name='SInt64Rules', - full_name='validate.SInt64Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.SInt64Rules.const', index=0, - number=1, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.SInt64Rules.lt', index=1, - number=2, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.SInt64Rules.lte', index=2, - number=3, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.SInt64Rules.gt', index=3, - number=4, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.SInt64Rules.gte', index=4, - number=5, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.SInt64Rules.in', index=5, - number=6, type=18, cpp_type=2, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.SInt64Rules.not_in', index=6, - number=7, type=18, cpp_type=2, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.SInt64Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1971, - serialized_end=2099, -) - - -_FIXED32RULES = _descriptor.Descriptor( - name='Fixed32Rules', - full_name='validate.Fixed32Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.Fixed32Rules.const', index=0, - number=1, type=7, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.Fixed32Rules.lt', index=1, - number=2, type=7, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.Fixed32Rules.lte', index=2, - number=3, type=7, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.Fixed32Rules.gt', index=3, - number=4, type=7, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.Fixed32Rules.gte', index=4, - number=5, type=7, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.Fixed32Rules.in', index=5, - number=6, type=7, cpp_type=3, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.Fixed32Rules.not_in', index=6, - number=7, type=7, cpp_type=3, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.Fixed32Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2102, - serialized_end=2231, -) - - -_FIXED64RULES = _descriptor.Descriptor( - name='Fixed64Rules', - full_name='validate.Fixed64Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.Fixed64Rules.const', index=0, - number=1, type=6, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.Fixed64Rules.lt', index=1, - number=2, type=6, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.Fixed64Rules.lte', index=2, - number=3, type=6, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.Fixed64Rules.gt', index=3, - number=4, type=6, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.Fixed64Rules.gte', index=4, - number=5, type=6, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.Fixed64Rules.in', index=5, - number=6, type=6, cpp_type=4, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.Fixed64Rules.not_in', index=6, - number=7, type=6, cpp_type=4, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.Fixed64Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2234, - serialized_end=2363, -) - - -_SFIXED32RULES = _descriptor.Descriptor( - name='SFixed32Rules', - full_name='validate.SFixed32Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.SFixed32Rules.const', index=0, - number=1, type=15, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.SFixed32Rules.lt', index=1, - number=2, type=15, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.SFixed32Rules.lte', index=2, - number=3, type=15, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.SFixed32Rules.gt', index=3, - number=4, type=15, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.SFixed32Rules.gte', index=4, - number=5, type=15, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.SFixed32Rules.in', index=5, - number=6, type=15, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.SFixed32Rules.not_in', index=6, - number=7, type=15, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.SFixed32Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2366, - serialized_end=2496, -) - - -_SFIXED64RULES = _descriptor.Descriptor( - name='SFixed64Rules', - full_name='validate.SFixed64Rules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.SFixed64Rules.const', index=0, - number=1, type=16, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.SFixed64Rules.lt', index=1, - number=2, type=16, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.SFixed64Rules.lte', index=2, - number=3, type=16, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.SFixed64Rules.gt', index=3, - number=4, type=16, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.SFixed64Rules.gte', index=4, - number=5, type=16, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.SFixed64Rules.in', index=5, - number=6, type=16, cpp_type=2, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.SFixed64Rules.not_in', index=6, - number=7, type=16, cpp_type=2, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.SFixed64Rules.ignore_empty', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2499, - serialized_end=2629, -) - - -_BOOLRULES = _descriptor.Descriptor( - name='BoolRules', - full_name='validate.BoolRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.BoolRules.const', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2631, - serialized_end=2657, -) - - -_STRINGRULES = _descriptor.Descriptor( - name='StringRules', - full_name='validate.StringRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.StringRules.const', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='len', full_name='validate.StringRules.len', index=1, - number=19, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='min_len', full_name='validate.StringRules.min_len', index=2, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='max_len', full_name='validate.StringRules.max_len', index=3, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='len_bytes', full_name='validate.StringRules.len_bytes', index=4, - number=20, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='min_bytes', full_name='validate.StringRules.min_bytes', index=5, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='max_bytes', full_name='validate.StringRules.max_bytes', index=6, - number=5, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='pattern', full_name='validate.StringRules.pattern', index=7, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='prefix', full_name='validate.StringRules.prefix', index=8, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='suffix', full_name='validate.StringRules.suffix', index=9, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='contains', full_name='validate.StringRules.contains', index=10, - number=9, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_contains', full_name='validate.StringRules.not_contains', index=11, - number=23, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.StringRules.in', index=12, - number=10, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.StringRules.not_in', index=13, - number=11, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='email', full_name='validate.StringRules.email', index=14, - number=12, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='hostname', full_name='validate.StringRules.hostname', index=15, - number=13, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ip', full_name='validate.StringRules.ip', index=16, - number=14, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ipv4', full_name='validate.StringRules.ipv4', index=17, - number=15, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ipv6', full_name='validate.StringRules.ipv6', index=18, - number=16, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uri', full_name='validate.StringRules.uri', index=19, - number=17, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uri_ref', full_name='validate.StringRules.uri_ref', index=20, - number=18, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='address', full_name='validate.StringRules.address', index=21, - number=21, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uuid', full_name='validate.StringRules.uuid', index=22, - number=22, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='well_known_regex', full_name='validate.StringRules.well_known_regex', index=23, - number=24, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='strict', full_name='validate.StringRules.strict', index=24, - number=25, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=True, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.StringRules.ignore_empty', index=25, - number=26, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='well_known', full_name='validate.StringRules.well_known', - index=0, containing_type=None, fields=[]), - ], - serialized_start=2660, - serialized_end=3169, -) - - -_BYTESRULES = _descriptor.Descriptor( - name='BytesRules', - full_name='validate.BytesRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.BytesRules.const', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='len', full_name='validate.BytesRules.len', index=1, - number=13, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='min_len', full_name='validate.BytesRules.min_len', index=2, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='max_len', full_name='validate.BytesRules.max_len', index=3, - number=3, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='pattern', full_name='validate.BytesRules.pattern', index=4, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='prefix', full_name='validate.BytesRules.prefix', index=5, - number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='suffix', full_name='validate.BytesRules.suffix', index=6, - number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='contains', full_name='validate.BytesRules.contains', index=7, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.BytesRules.in', index=8, - number=8, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.BytesRules.not_in', index=9, - number=9, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ip', full_name='validate.BytesRules.ip', index=10, - number=10, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ipv4', full_name='validate.BytesRules.ipv4', index=11, - number=11, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ipv6', full_name='validate.BytesRules.ipv6', index=12, - number=12, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.BytesRules.ignore_empty', index=13, - number=14, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='well_known', full_name='validate.BytesRules.well_known', - index=0, containing_type=None, fields=[]), - ], - serialized_start=3172, - serialized_end=3423, -) - - -_ENUMRULES = _descriptor.Descriptor( - name='EnumRules', - full_name='validate.EnumRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='const', full_name='validate.EnumRules.const', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='defined_only', full_name='validate.EnumRules.defined_only', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.EnumRules.in', index=2, - number=3, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.EnumRules.not_in', index=3, - number=4, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3425, - serialized_end=3501, -) - - -_MESSAGERULES = _descriptor.Descriptor( - name='MessageRules', - full_name='validate.MessageRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='skip', full_name='validate.MessageRules.skip', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='required', full_name='validate.MessageRules.required', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3503, - serialized_end=3549, -) - - -_REPEATEDRULES = _descriptor.Descriptor( - name='RepeatedRules', - full_name='validate.RepeatedRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='min_items', full_name='validate.RepeatedRules.min_items', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='max_items', full_name='validate.RepeatedRules.max_items', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='unique', full_name='validate.RepeatedRules.unique', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='items', full_name='validate.RepeatedRules.items', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.RepeatedRules.ignore_empty', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3552, - serialized_end=3680, -) - - -_MAPRULES = _descriptor.Descriptor( - name='MapRules', - full_name='validate.MapRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='min_pairs', full_name='validate.MapRules.min_pairs', index=0, - number=1, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='max_pairs', full_name='validate.MapRules.max_pairs', index=1, - number=2, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='no_sparse', full_name='validate.MapRules.no_sparse', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='keys', full_name='validate.MapRules.keys', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='values', full_name='validate.MapRules.values', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ignore_empty', full_name='validate.MapRules.ignore_empty', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3683, - serialized_end=3846, -) - - -_ANYRULES = _descriptor.Descriptor( - name='AnyRules', - full_name='validate.AnyRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='required', full_name='validate.AnyRules.required', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.AnyRules.in', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.AnyRules.not_in', index=2, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3848, - serialized_end=3904, -) - - -_DURATIONRULES = _descriptor.Descriptor( - name='DurationRules', - full_name='validate.DurationRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='required', full_name='validate.DurationRules.required', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='const', full_name='validate.DurationRules.const', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.DurationRules.lt', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.DurationRules.lte', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.DurationRules.gt', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.DurationRules.gte', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='in', full_name='validate.DurationRules.in', index=6, - number=7, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='not_in', full_name='validate.DurationRules.not_in', index=7, - number=8, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3907, - serialized_end=4222, -) - - -_TIMESTAMPRULES = _descriptor.Descriptor( - name='TimestampRules', - full_name='validate.TimestampRules', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='required', full_name='validate.TimestampRules.required', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='const', full_name='validate.TimestampRules.const', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt', full_name='validate.TimestampRules.lt', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lte', full_name='validate.TimestampRules.lte', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt', full_name='validate.TimestampRules.gt', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gte', full_name='validate.TimestampRules.gte', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lt_now', full_name='validate.TimestampRules.lt_now', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='gt_now', full_name='validate.TimestampRules.gt_now', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='within', full_name='validate.TimestampRules.within', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4225, - serialized_end=4539, -) - -_FIELDRULES.fields_by_name['message'].message_type = _MESSAGERULES -_FIELDRULES.fields_by_name['float'].message_type = _FLOATRULES -_FIELDRULES.fields_by_name['double'].message_type = _DOUBLERULES -_FIELDRULES.fields_by_name['int32'].message_type = _INT32RULES -_FIELDRULES.fields_by_name['int64'].message_type = _INT64RULES -_FIELDRULES.fields_by_name['uint32'].message_type = _UINT32RULES -_FIELDRULES.fields_by_name['uint64'].message_type = _UINT64RULES -_FIELDRULES.fields_by_name['sint32'].message_type = _SINT32RULES -_FIELDRULES.fields_by_name['sint64'].message_type = _SINT64RULES -_FIELDRULES.fields_by_name['fixed32'].message_type = _FIXED32RULES -_FIELDRULES.fields_by_name['fixed64'].message_type = _FIXED64RULES -_FIELDRULES.fields_by_name['sfixed32'].message_type = _SFIXED32RULES -_FIELDRULES.fields_by_name['sfixed64'].message_type = _SFIXED64RULES -_FIELDRULES.fields_by_name['bool'].message_type = _BOOLRULES -_FIELDRULES.fields_by_name['string'].message_type = _STRINGRULES -_FIELDRULES.fields_by_name['bytes'].message_type = _BYTESRULES -_FIELDRULES.fields_by_name['enum'].message_type = _ENUMRULES -_FIELDRULES.fields_by_name['repeated'].message_type = _REPEATEDRULES -_FIELDRULES.fields_by_name['map'].message_type = _MAPRULES -_FIELDRULES.fields_by_name['any'].message_type = _ANYRULES -_FIELDRULES.fields_by_name['duration'].message_type = _DURATIONRULES -_FIELDRULES.fields_by_name['timestamp'].message_type = _TIMESTAMPRULES -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['float']) -_FIELDRULES.fields_by_name['float'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['double']) -_FIELDRULES.fields_by_name['double'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['int32']) -_FIELDRULES.fields_by_name['int32'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['int64']) -_FIELDRULES.fields_by_name['int64'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['uint32']) -_FIELDRULES.fields_by_name['uint32'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['uint64']) -_FIELDRULES.fields_by_name['uint64'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['sint32']) -_FIELDRULES.fields_by_name['sint32'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['sint64']) -_FIELDRULES.fields_by_name['sint64'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['fixed32']) -_FIELDRULES.fields_by_name['fixed32'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['fixed64']) -_FIELDRULES.fields_by_name['fixed64'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['sfixed32']) -_FIELDRULES.fields_by_name['sfixed32'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['sfixed64']) -_FIELDRULES.fields_by_name['sfixed64'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['bool']) -_FIELDRULES.fields_by_name['bool'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['string']) -_FIELDRULES.fields_by_name['string'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['bytes']) -_FIELDRULES.fields_by_name['bytes'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['enum']) -_FIELDRULES.fields_by_name['enum'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['repeated']) -_FIELDRULES.fields_by_name['repeated'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['map']) -_FIELDRULES.fields_by_name['map'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['any']) -_FIELDRULES.fields_by_name['any'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['duration']) -_FIELDRULES.fields_by_name['duration'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_FIELDRULES.oneofs_by_name['type'].fields.append( - _FIELDRULES.fields_by_name['timestamp']) -_FIELDRULES.fields_by_name['timestamp'].containing_oneof = _FIELDRULES.oneofs_by_name['type'] -_STRINGRULES.fields_by_name['well_known_regex'].enum_type = _KNOWNREGEX -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['email']) -_STRINGRULES.fields_by_name['email'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['hostname']) -_STRINGRULES.fields_by_name['hostname'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['ip']) -_STRINGRULES.fields_by_name['ip'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['ipv4']) -_STRINGRULES.fields_by_name['ipv4'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['ipv6']) -_STRINGRULES.fields_by_name['ipv6'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['uri']) -_STRINGRULES.fields_by_name['uri'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['uri_ref']) -_STRINGRULES.fields_by_name['uri_ref'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['address']) -_STRINGRULES.fields_by_name['address'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['uuid']) -_STRINGRULES.fields_by_name['uuid'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_STRINGRULES.oneofs_by_name['well_known'].fields.append( - _STRINGRULES.fields_by_name['well_known_regex']) -_STRINGRULES.fields_by_name['well_known_regex'].containing_oneof = _STRINGRULES.oneofs_by_name['well_known'] -_BYTESRULES.oneofs_by_name['well_known'].fields.append( - _BYTESRULES.fields_by_name['ip']) -_BYTESRULES.fields_by_name['ip'].containing_oneof = _BYTESRULES.oneofs_by_name['well_known'] -_BYTESRULES.oneofs_by_name['well_known'].fields.append( - _BYTESRULES.fields_by_name['ipv4']) -_BYTESRULES.fields_by_name['ipv4'].containing_oneof = _BYTESRULES.oneofs_by_name['well_known'] -_BYTESRULES.oneofs_by_name['well_known'].fields.append( - _BYTESRULES.fields_by_name['ipv6']) -_BYTESRULES.fields_by_name['ipv6'].containing_oneof = _BYTESRULES.oneofs_by_name['well_known'] -_REPEATEDRULES.fields_by_name['items'].message_type = _FIELDRULES -_MAPRULES.fields_by_name['keys'].message_type = _FIELDRULES -_MAPRULES.fields_by_name['values'].message_type = _FIELDRULES -_DURATIONRULES.fields_by_name['const'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DURATIONRULES.fields_by_name['lt'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DURATIONRULES.fields_by_name['lte'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DURATIONRULES.fields_by_name['gt'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DURATIONRULES.fields_by_name['gte'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DURATIONRULES.fields_by_name['in'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_DURATIONRULES.fields_by_name['not_in'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_TIMESTAMPRULES.fields_by_name['const'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TIMESTAMPRULES.fields_by_name['lt'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TIMESTAMPRULES.fields_by_name['lte'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TIMESTAMPRULES.fields_by_name['gt'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TIMESTAMPRULES.fields_by_name['gte'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_TIMESTAMPRULES.fields_by_name['within'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -DESCRIPTOR.message_types_by_name['FieldRules'] = _FIELDRULES -DESCRIPTOR.message_types_by_name['FloatRules'] = _FLOATRULES -DESCRIPTOR.message_types_by_name['DoubleRules'] = _DOUBLERULES -DESCRIPTOR.message_types_by_name['Int32Rules'] = _INT32RULES -DESCRIPTOR.message_types_by_name['Int64Rules'] = _INT64RULES -DESCRIPTOR.message_types_by_name['UInt32Rules'] = _UINT32RULES -DESCRIPTOR.message_types_by_name['UInt64Rules'] = _UINT64RULES -DESCRIPTOR.message_types_by_name['SInt32Rules'] = _SINT32RULES -DESCRIPTOR.message_types_by_name['SInt64Rules'] = _SINT64RULES -DESCRIPTOR.message_types_by_name['Fixed32Rules'] = _FIXED32RULES -DESCRIPTOR.message_types_by_name['Fixed64Rules'] = _FIXED64RULES -DESCRIPTOR.message_types_by_name['SFixed32Rules'] = _SFIXED32RULES -DESCRIPTOR.message_types_by_name['SFixed64Rules'] = _SFIXED64RULES -DESCRIPTOR.message_types_by_name['BoolRules'] = _BOOLRULES -DESCRIPTOR.message_types_by_name['StringRules'] = _STRINGRULES -DESCRIPTOR.message_types_by_name['BytesRules'] = _BYTESRULES -DESCRIPTOR.message_types_by_name['EnumRules'] = _ENUMRULES -DESCRIPTOR.message_types_by_name['MessageRules'] = _MESSAGERULES -DESCRIPTOR.message_types_by_name['RepeatedRules'] = _REPEATEDRULES -DESCRIPTOR.message_types_by_name['MapRules'] = _MAPRULES -DESCRIPTOR.message_types_by_name['AnyRules'] = _ANYRULES -DESCRIPTOR.message_types_by_name['DurationRules'] = _DURATIONRULES -DESCRIPTOR.message_types_by_name['TimestampRules'] = _TIMESTAMPRULES -DESCRIPTOR.enum_types_by_name['KnownRegex'] = _KNOWNREGEX -DESCRIPTOR.extensions_by_name['disabled'] = disabled -DESCRIPTOR.extensions_by_name['ignored'] = ignored -DESCRIPTOR.extensions_by_name['required'] = required -DESCRIPTOR.extensions_by_name['rules'] = rules -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -FieldRules = _reflection.GeneratedProtocolMessageType('FieldRules', (_message.Message,), dict( - DESCRIPTOR = _FIELDRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.FieldRules) - )) -_sym_db.RegisterMessage(FieldRules) - -FloatRules = _reflection.GeneratedProtocolMessageType('FloatRules', (_message.Message,), dict( - DESCRIPTOR = _FLOATRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.FloatRules) - )) -_sym_db.RegisterMessage(FloatRules) - -DoubleRules = _reflection.GeneratedProtocolMessageType('DoubleRules', (_message.Message,), dict( - DESCRIPTOR = _DOUBLERULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.DoubleRules) - )) -_sym_db.RegisterMessage(DoubleRules) - -Int32Rules = _reflection.GeneratedProtocolMessageType('Int32Rules', (_message.Message,), dict( - DESCRIPTOR = _INT32RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.Int32Rules) - )) -_sym_db.RegisterMessage(Int32Rules) - -Int64Rules = _reflection.GeneratedProtocolMessageType('Int64Rules', (_message.Message,), dict( - DESCRIPTOR = _INT64RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.Int64Rules) - )) -_sym_db.RegisterMessage(Int64Rules) - -UInt32Rules = _reflection.GeneratedProtocolMessageType('UInt32Rules', (_message.Message,), dict( - DESCRIPTOR = _UINT32RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.UInt32Rules) - )) -_sym_db.RegisterMessage(UInt32Rules) - -UInt64Rules = _reflection.GeneratedProtocolMessageType('UInt64Rules', (_message.Message,), dict( - DESCRIPTOR = _UINT64RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.UInt64Rules) - )) -_sym_db.RegisterMessage(UInt64Rules) - -SInt32Rules = _reflection.GeneratedProtocolMessageType('SInt32Rules', (_message.Message,), dict( - DESCRIPTOR = _SINT32RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.SInt32Rules) - )) -_sym_db.RegisterMessage(SInt32Rules) - -SInt64Rules = _reflection.GeneratedProtocolMessageType('SInt64Rules', (_message.Message,), dict( - DESCRIPTOR = _SINT64RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.SInt64Rules) - )) -_sym_db.RegisterMessage(SInt64Rules) - -Fixed32Rules = _reflection.GeneratedProtocolMessageType('Fixed32Rules', (_message.Message,), dict( - DESCRIPTOR = _FIXED32RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.Fixed32Rules) - )) -_sym_db.RegisterMessage(Fixed32Rules) - -Fixed64Rules = _reflection.GeneratedProtocolMessageType('Fixed64Rules', (_message.Message,), dict( - DESCRIPTOR = _FIXED64RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.Fixed64Rules) - )) -_sym_db.RegisterMessage(Fixed64Rules) - -SFixed32Rules = _reflection.GeneratedProtocolMessageType('SFixed32Rules', (_message.Message,), dict( - DESCRIPTOR = _SFIXED32RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.SFixed32Rules) - )) -_sym_db.RegisterMessage(SFixed32Rules) - -SFixed64Rules = _reflection.GeneratedProtocolMessageType('SFixed64Rules', (_message.Message,), dict( - DESCRIPTOR = _SFIXED64RULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.SFixed64Rules) - )) -_sym_db.RegisterMessage(SFixed64Rules) - -BoolRules = _reflection.GeneratedProtocolMessageType('BoolRules', (_message.Message,), dict( - DESCRIPTOR = _BOOLRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.BoolRules) - )) -_sym_db.RegisterMessage(BoolRules) - -StringRules = _reflection.GeneratedProtocolMessageType('StringRules', (_message.Message,), dict( - DESCRIPTOR = _STRINGRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.StringRules) - )) -_sym_db.RegisterMessage(StringRules) - -BytesRules = _reflection.GeneratedProtocolMessageType('BytesRules', (_message.Message,), dict( - DESCRIPTOR = _BYTESRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.BytesRules) - )) -_sym_db.RegisterMessage(BytesRules) - -EnumRules = _reflection.GeneratedProtocolMessageType('EnumRules', (_message.Message,), dict( - DESCRIPTOR = _ENUMRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.EnumRules) - )) -_sym_db.RegisterMessage(EnumRules) - -MessageRules = _reflection.GeneratedProtocolMessageType('MessageRules', (_message.Message,), dict( - DESCRIPTOR = _MESSAGERULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.MessageRules) - )) -_sym_db.RegisterMessage(MessageRules) - -RepeatedRules = _reflection.GeneratedProtocolMessageType('RepeatedRules', (_message.Message,), dict( - DESCRIPTOR = _REPEATEDRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.RepeatedRules) - )) -_sym_db.RegisterMessage(RepeatedRules) - -MapRules = _reflection.GeneratedProtocolMessageType('MapRules', (_message.Message,), dict( - DESCRIPTOR = _MAPRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.MapRules) - )) -_sym_db.RegisterMessage(MapRules) - -AnyRules = _reflection.GeneratedProtocolMessageType('AnyRules', (_message.Message,), dict( - DESCRIPTOR = _ANYRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.AnyRules) - )) -_sym_db.RegisterMessage(AnyRules) - -DurationRules = _reflection.GeneratedProtocolMessageType('DurationRules', (_message.Message,), dict( - DESCRIPTOR = _DURATIONRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.DurationRules) - )) -_sym_db.RegisterMessage(DurationRules) - -TimestampRules = _reflection.GeneratedProtocolMessageType('TimestampRules', (_message.Message,), dict( - DESCRIPTOR = _TIMESTAMPRULES, - __module__ = 'validate.validate_pb2' - # @@protoc_insertion_point(class_scope:validate.TimestampRules) - )) -_sym_db.RegisterMessage(TimestampRules) - -google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(disabled) -google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(ignored) -google_dot_protobuf_dot_descriptor__pb2.OneofOptions.RegisterExtension(required) -rules.message_type = _FIELDRULES -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(rules) - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/flyteplugins/go/tasks/config_load_test.go b/flyteplugins/go/tasks/config_load_test.go index 6e02277742..9d27afb26a 100644 --- a/flyteplugins/go/tasks/config_load_test.go +++ b/flyteplugins/go/tasks/config_load_test.go @@ -44,7 +44,6 @@ func TestLoadConfig(t *testing.T) { }, k8sConfig.DefaultEnvVars) assert.NotNil(t, k8sConfig.ResourceTolerations) assert.Contains(t, k8sConfig.ResourceTolerations, v1.ResourceName("nvidia.com/gpu")) - assert.Contains(t, k8sConfig.ResourceTolerations, v1.ResourceStorage) tolGPU := v1.Toleration{ Key: "flyte/gpu", Value: "dedicated", @@ -52,15 +51,7 @@ func TestLoadConfig(t *testing.T) { Effect: v1.TaintEffectNoSchedule, } - tolStorage := v1.Toleration{ - Key: "storage", - Value: "special", - Operator: v1.TolerationOpEqual, - Effect: v1.TaintEffectPreferNoSchedule, - } - assert.Equal(t, []v1.Toleration{tolGPU}, k8sConfig.ResourceTolerations[v1.ResourceName("nvidia.com/gpu")]) - assert.Equal(t, []v1.Toleration{tolStorage}, k8sConfig.ResourceTolerations[v1.ResourceStorage]) expectedCPU := resource.MustParse("1000m") assert.True(t, expectedCPU.Equal(k8sConfig.DefaultCPURequest)) expectedMemory := resource.MustParse("1024Mi") diff --git a/flyteplugins/go/tasks/logs/config.go b/flyteplugins/go/tasks/logs/config.go index b802844a4a..c8eb293035 100644 --- a/flyteplugins/go/tasks/logs/config.go +++ b/flyteplugins/go/tasks/logs/config.go @@ -28,8 +28,7 @@ type LogConfig struct { StackdriverLogResourceName string `json:"stackdriver-logresourcename" pflag:",Name of the logresource in stackdriver"` StackDriverTemplateURI tasklog.TemplateURI `json:"stackdriver-template-uri" pflag:",Template Uri to use when building stackdriver log links"` - IsFlyinEnabled bool `json:"flyin-enabled" pflag:",Enable Log-links to flyin logs"` - FlyinTemplateURI tasklog.TemplateURI `json:"flyin-template-uri" pflag:",Template Uri to use when building flyin log links"` + DynamicLogLinks map[string]tasklog.TemplateLogPlugin `json:"dynamic-log-links" pflag:"-,Map of dynamic log links"` Templates []tasklog.TemplateLogPlugin `json:"templates" pflag:"-,"` } diff --git a/flyteplugins/go/tasks/logs/logconfig_flags.go b/flyteplugins/go/tasks/logs/logconfig_flags.go index de8ba022dc..00c08a8a58 100755 --- a/flyteplugins/go/tasks/logs/logconfig_flags.go +++ b/flyteplugins/go/tasks/logs/logconfig_flags.go @@ -61,7 +61,5 @@ func (cfg LogConfig) GetPFlagSet(prefix string) *pflag.FlagSet { cmdFlags.String(fmt.Sprintf("%v%v", prefix, "gcp-project"), DefaultConfig.GCPProjectName, "Name of the project in GCP") cmdFlags.String(fmt.Sprintf("%v%v", prefix, "stackdriver-logresourcename"), DefaultConfig.StackdriverLogResourceName, "Name of the logresource in stackdriver") cmdFlags.String(fmt.Sprintf("%v%v", prefix, "stackdriver-template-uri"), DefaultConfig.StackDriverTemplateURI, "Template Uri to use when building stackdriver log links") - cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "flyin-enabled"), DefaultConfig.IsFlyinEnabled, "Enable Log-links to flyin logs") - cmdFlags.String(fmt.Sprintf("%v%v", prefix, "flyin-template-uri"), DefaultConfig.FlyinTemplateURI, "Template Uri to use when building flyin log links") return cmdFlags } diff --git a/flyteplugins/go/tasks/logs/logconfig_flags_test.go b/flyteplugins/go/tasks/logs/logconfig_flags_test.go index dfbee43c69..8bb775df1f 100755 --- a/flyteplugins/go/tasks/logs/logconfig_flags_test.go +++ b/flyteplugins/go/tasks/logs/logconfig_flags_test.go @@ -253,32 +253,4 @@ func TestLogConfig_SetFlags(t *testing.T) { } }) }) - t.Run("Test_flyin-enabled", func(t *testing.T) { - - t.Run("Override", func(t *testing.T) { - testValue := "1" - - cmdFlags.Set("flyin-enabled", testValue) - if vBool, err := cmdFlags.GetBool("flyin-enabled"); err == nil { - testDecodeJson_LogConfig(t, fmt.Sprintf("%v", vBool), &actual.IsFlyinEnabled) - - } else { - assert.FailNow(t, err.Error()) - } - }) - }) - t.Run("Test_flyin-template-uri", func(t *testing.T) { - - t.Run("Override", func(t *testing.T) { - testValue := "1" - - cmdFlags.Set("flyin-template-uri", testValue) - if vString, err := cmdFlags.GetString("flyin-template-uri"); err == nil { - testDecodeJson_LogConfig(t, fmt.Sprintf("%v", vString), &actual.FlyinTemplateURI) - - } else { - assert.FailNow(t, err.Error()) - } - }) - }) } diff --git a/flyteplugins/go/tasks/logs/logging_utils.go b/flyteplugins/go/tasks/logs/logging_utils.go index 20f3522e27..04d23f4ec8 100644 --- a/flyteplugins/go/tasks/logs/logging_utils.go +++ b/flyteplugins/go/tasks/logs/logging_utils.go @@ -14,7 +14,7 @@ import ( ) // Internal -func GetLogsForContainerInPod(ctx context.Context, logPlugin tasklog.Plugin, taskExecID pluginsCore.TaskExecutionID, pod *v1.Pod, index uint32, nameSuffix string, extraLogTemplateVarsByScheme *tasklog.TemplateVarsByScheme, taskTemplate *core.TaskTemplate) ([]*core.TaskLog, error) { +func GetLogsForContainerInPod(ctx context.Context, logPlugin tasklog.Plugin, taskExecID pluginsCore.TaskExecutionID, pod *v1.Pod, index uint32, nameSuffix string, extraLogTemplateVars []tasklog.TemplateVar, taskTemplate *core.TaskTemplate) ([]*core.TaskLog, error) { if logPlugin == nil { return nil, nil } @@ -39,19 +39,19 @@ func GetLogsForContainerInPod(ctx context.Context, logPlugin tasklog.Plugin, tas logs, err := logPlugin.GetTaskLogs( tasklog.Input{ - PodName: pod.Name, - PodUID: string(pod.GetUID()), - Namespace: pod.Namespace, - ContainerName: pod.Spec.Containers[index].Name, - ContainerID: pod.Status.ContainerStatuses[index].ContainerID, - LogName: nameSuffix, - PodRFC3339StartTime: time.Unix(startTime, 0).Format(time.RFC3339), - PodRFC3339FinishTime: time.Unix(finishTime, 0).Format(time.RFC3339), - PodUnixStartTime: startTime, - PodUnixFinishTime: finishTime, - TaskExecutionID: taskExecID, - ExtraTemplateVarsByScheme: extraLogTemplateVarsByScheme, - TaskTemplate: taskTemplate, + PodName: pod.Name, + PodUID: string(pod.GetUID()), + Namespace: pod.Namespace, + ContainerName: pod.Spec.Containers[index].Name, + ContainerID: pod.Status.ContainerStatuses[index].ContainerID, + LogName: nameSuffix, + PodRFC3339StartTime: time.Unix(startTime, 0).Format(time.RFC3339), + PodRFC3339FinishTime: time.Unix(finishTime, 0).Format(time.RFC3339), + PodUnixStartTime: startTime, + PodUnixFinishTime: finishTime, + TaskExecutionID: taskExecID, + ExtraTemplateVars: extraLogTemplateVars, + TaskTemplate: taskTemplate, }, ) @@ -63,13 +63,14 @@ func GetLogsForContainerInPod(ctx context.Context, logPlugin tasklog.Plugin, tas } type templateLogPluginCollection struct { - plugins []tasklog.TemplateLogPlugin + plugins []tasklog.TemplateLogPlugin + dynamicPlugins []tasklog.TemplateLogPlugin } func (t templateLogPluginCollection) GetTaskLogs(input tasklog.Input) (tasklog.Output, error) { var taskLogs []*core.TaskLog - for _, plugin := range t.plugins { + for _, plugin := range append(t.plugins, t.dynamicPlugins...) { o, err := plugin.GetTaskLogs(input) if err != nil { return tasklog.Output{}, err @@ -84,39 +85,43 @@ func (t templateLogPluginCollection) GetTaskLogs(input tasklog.Input) (tasklog.O func InitializeLogPlugins(cfg *LogConfig) (tasklog.Plugin, error) { // Use a list to maintain order. var plugins []tasklog.TemplateLogPlugin + var dynamicPlugins []tasklog.TemplateLogPlugin if cfg.IsKubernetesEnabled { if len(cfg.KubernetesTemplateURI) > 0 { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Kubernetes Logs", Scheme: tasklog.TemplateSchemePod, TemplateURIs: []tasklog.TemplateURI{cfg.KubernetesTemplateURI}, MessageFormat: core.TaskLog_JSON}) + plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Kubernetes Logs", TemplateURIs: []tasklog.TemplateURI{cfg.KubernetesTemplateURI}, MessageFormat: core.TaskLog_JSON}) } else { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Kubernetes Logs", Scheme: tasklog.TemplateSchemePod, TemplateURIs: []tasklog.TemplateURI{fmt.Sprintf("%s/#!/log/{{ .namespace }}/{{ .podName }}/pod?namespace={{ .namespace }}", cfg.KubernetesURL)}, MessageFormat: core.TaskLog_JSON}) + plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Kubernetes Logs", TemplateURIs: []tasklog.TemplateURI{fmt.Sprintf("%s/#!/log/{{ .namespace }}/{{ .podName }}/pod?namespace={{ .namespace }}", cfg.KubernetesURL)}, MessageFormat: core.TaskLog_JSON}) } } if cfg.IsCloudwatchEnabled { if len(cfg.CloudwatchTemplateURI) > 0 { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Cloudwatch Logs", Scheme: tasklog.TemplateSchemePod, TemplateURIs: []tasklog.TemplateURI{cfg.CloudwatchTemplateURI}, MessageFormat: core.TaskLog_JSON}) + plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Cloudwatch Logs", TemplateURIs: []tasklog.TemplateURI{cfg.CloudwatchTemplateURI}, MessageFormat: core.TaskLog_JSON}) } else { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Cloudwatch Logs", Scheme: tasklog.TemplateSchemePod, TemplateURIs: []tasklog.TemplateURI{fmt.Sprintf("https://console.aws.amazon.com/cloudwatch/home?region=%s#logEventViewer:group=%s;stream=var.log.containers.{{ .podName }}_{{ .namespace }}_{{ .containerName }}-{{ .containerId }}.log", cfg.CloudwatchRegion, cfg.CloudwatchLogGroup)}, MessageFormat: core.TaskLog_JSON}) + plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Cloudwatch Logs", TemplateURIs: []tasklog.TemplateURI{fmt.Sprintf("https://console.aws.amazon.com/cloudwatch/home?region=%s#logEventViewer:group=%s;stream=var.log.containers.{{ .podName }}_{{ .namespace }}_{{ .containerName }}-{{ .containerId }}.log", cfg.CloudwatchRegion, cfg.CloudwatchLogGroup)}, MessageFormat: core.TaskLog_JSON}) } } if cfg.IsStackDriverEnabled { if len(cfg.StackDriverTemplateURI) > 0 { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Stackdriver Logs", Scheme: tasklog.TemplateSchemePod, TemplateURIs: []tasklog.TemplateURI{cfg.StackDriverTemplateURI}, MessageFormat: core.TaskLog_JSON}) + plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Stackdriver Logs", TemplateURIs: []tasklog.TemplateURI{cfg.StackDriverTemplateURI}, MessageFormat: core.TaskLog_JSON}) } else { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Stackdriver Logs", Scheme: tasklog.TemplateSchemePod, TemplateURIs: []tasklog.TemplateURI{fmt.Sprintf("https://console.cloud.google.com/logs/viewer?project=%s&angularJsUrl=%%2Flogs%%2Fviewer%%3Fproject%%3D%s&resource=%s&advancedFilter=resource.labels.pod_name%%3D{{ .podName }}", cfg.GCPProjectName, cfg.GCPProjectName, cfg.StackdriverLogResourceName)}, MessageFormat: core.TaskLog_JSON}) + plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Stackdriver Logs", TemplateURIs: []tasklog.TemplateURI{fmt.Sprintf("https://console.cloud.google.com/logs/viewer?project=%s&angularJsUrl=%%2Flogs%%2Fviewer%%3Fproject%%3D%s&resource=%s&advancedFilter=resource.labels.pod_name%%3D{{ .podName }}", cfg.GCPProjectName, cfg.GCPProjectName, cfg.StackdriverLogResourceName)}, MessageFormat: core.TaskLog_JSON}) } } - if cfg.IsFlyinEnabled { - if len(cfg.FlyinTemplateURI) > 0 { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Flyin Logs", Scheme: tasklog.TemplateSchemeFlyin, TemplateURIs: []tasklog.TemplateURI{cfg.FlyinTemplateURI}, MessageFormat: core.TaskLog_JSON}) - } else { - plugins = append(plugins, tasklog.TemplateLogPlugin{DisplayName: "Flyin Logs", Scheme: tasklog.TemplateSchemeFlyin, TemplateURIs: []tasklog.TemplateURI{fmt.Sprintf("https://flyin.%s/logs/{{ .namespace }}/{{ .podName }}/{{ .containerName }}/{{ .containerId }}", cfg.GCPProjectName)}, MessageFormat: core.TaskLog_JSON}) - } + for logLinkType, dynamicLogLink := range cfg.DynamicLogLinks { + dynamicPlugins = append( + dynamicPlugins, + tasklog.TemplateLogPlugin{ + Name: logLinkType, + DisplayName: dynamicLogLink.DisplayName, + DynamicTemplateURIs: dynamicLogLink.TemplateURIs, + MessageFormat: core.TaskLog_JSON, + }) } plugins = append(plugins, cfg.Templates...) - return templateLogPluginCollection{plugins: plugins}, nil + return templateLogPluginCollection{plugins: plugins, dynamicPlugins: dynamicPlugins}, nil } diff --git a/flyteplugins/go/tasks/logs/logging_utils_test.go b/flyteplugins/go/tasks/logs/logging_utils_test.go index 46eb682201..946d069d50 100644 --- a/flyteplugins/go/tasks/logs/logging_utils_test.go +++ b/flyteplugins/go/tasks/logs/logging_utils_test.go @@ -334,7 +334,6 @@ func TestGetLogsForContainerInPod_Templates(t *testing.T) { "https://flyte.corp.net/console/projects/{{ .executionProject }}/domains/{{ .executionDomain }}/executions/{{ .executionName }}/nodeId/{{ .nodeID }}/taskId/{{ .taskID }}/attempt/{{ .taskRetryAttempt }}/view/logs", }, MessageFormat: core.TaskLog_JSON, - Scheme: tasklog.TemplateSchemeTaskExecution, }, }, }, nil, []*core.TaskLog{ @@ -351,30 +350,164 @@ func TestGetLogsForContainerInPod_Templates(t *testing.T) { }) } -func TestGetLogsForContainerInPod_Flyin(t *testing.T) { - assertTestSucceeded(t, - &LogConfig{ - IsKubernetesEnabled: true, - KubernetesTemplateURI: "https://k8s.com", - IsFlyinEnabled: true, - FlyinTemplateURI: "https://flyin.mydomain.com:{{ .port }}/{{ .namespace }}/{{ .podName }}/{{ .containerName }}/{{ .containerId }}", +func TestGetLogsForContainerInPod_Flyteinteractive(t *testing.T) { + tests := []struct { + name string + config *LogConfig + template *core.TaskTemplate + expectedTaskLogs []*core.TaskLog + }{ + { + "Flyteinteractive enabled but no task template", + &LogConfig{ + DynamicLogLinks: map[string]tasklog.TemplateLogPlugin{ + "vscode": tasklog.TemplateLogPlugin{ + DisplayName: "vscode link", + TemplateURIs: []tasklog.TemplateURI{ + "https://flyteinteractive.mydomain.com:{{ .taskConfig.port }}/{{ .namespace }}/{{ .podName }}/{{ .containerName }}/{{ .containerId }}", + }, + }, + }, + }, + nil, + nil, }, - &core.TaskTemplate{ - Config: map[string]string{ - "link_type": "vscode", - "port": "65535", + { + "Flyteinteractive enabled but config not found in task template", + &LogConfig{ + DynamicLogLinks: map[string]tasklog.TemplateLogPlugin{ + "vscode": tasklog.TemplateLogPlugin{ + DisplayName: "vscode link", + TemplateURIs: []tasklog.TemplateURI{ + "https://flyteinteractive.mydomain.com:{{ .taskConfig.port }}/{{ .namespace }}/{{ .podName }}/{{ .containerName }}/{{ .containerId }}", + }, + }, + }, }, + &core.TaskTemplate{}, + nil, }, - []*core.TaskLog{ - { - Uri: "https://k8s.com", - MessageFormat: core.TaskLog_JSON, - Name: "Kubernetes Logs my-Suffix", + { + "Flyteinteractive disabled but config present in TaskTemplate", + &LogConfig{}, + &core.TaskTemplate{ + Config: map[string]string{ + "link_type": "vscode", + "port": "65535", + }, }, - { - Uri: "https://flyin.mydomain.com:65535/my-namespace/my-pod/ContainerName/ContainerID", - MessageFormat: core.TaskLog_JSON, - Name: "Flyin Logs my-Suffix", + nil, + }, + { + "Flyteinteractive - multiple dynamic options", + &LogConfig{ + DynamicLogLinks: map[string]tasklog.TemplateLogPlugin{ + "vscode": tasklog.TemplateLogPlugin{ + DisplayName: "vscode link", + TemplateURIs: []tasklog.TemplateURI{ + "https://abc.com:{{ .taskConfig.port }}/{{ .taskConfig.route }}", + }, + }, + }, + }, + &core.TaskTemplate{ + Config: map[string]string{ + "link_type": "vscode", + "port": "65535", + "route": "a-route", + }, + }, + []*core.TaskLog{ + { + Uri: "https://abc.com:65535/a-route", + MessageFormat: core.TaskLog_JSON, + Name: "vscode link my-Suffix", + }, + }, + }, + { + "Flyteinteractive - multiple uses of the template (invalid use of ports in a URI)", + &LogConfig{ + DynamicLogLinks: map[string]tasklog.TemplateLogPlugin{ + "vscode": tasklog.TemplateLogPlugin{ + DisplayName: "vscode link", + TemplateURIs: []tasklog.TemplateURI{ + "https://abc.com:{{ .taskConfig.port }}:{{ .taskConfig.port}}", + }, + }, + }, + }, + &core.TaskTemplate{ + Config: map[string]string{ + "link_type": "vscode", + "port": "65535", + }, + }, + []*core.TaskLog{ + { + Uri: "https://abc.com:65535:65535", + MessageFormat: core.TaskLog_JSON, + Name: "vscode link my-Suffix", + }, + }, + }, + { + "Flyteinteractive disabled and K8s enabled and flyteinteractive config present in TaskTemplate", + &LogConfig{ + IsKubernetesEnabled: true, + KubernetesTemplateURI: "https://k8s.com/{{ .namespace }}/{{ .podName }}/{{ .containerName }}/{{ .containerId }}", + }, + &core.TaskTemplate{ + Config: map[string]string{ + "link_type": "vscode", + "port": "65535", + }, + }, + []*core.TaskLog{ + { + Uri: "https://k8s.com/my-namespace/my-pod/ContainerName/ContainerID", + MessageFormat: core.TaskLog_JSON, + Name: "Kubernetes Logs my-Suffix", + }, + }, + }, + { + "Flyteinteractive and K8s enabled", + &LogConfig{ + IsKubernetesEnabled: true, + KubernetesTemplateURI: "https://k8s.com/{{ .namespace }}/{{ .podName }}/{{ .containerName }}/{{ .containerId }}", + DynamicLogLinks: map[string]tasklog.TemplateLogPlugin{ + "vscode": tasklog.TemplateLogPlugin{ + DisplayName: "vscode link", + TemplateURIs: []tasklog.TemplateURI{ + "https://flyteinteractive.mydomain.com:{{ .taskConfig.port }}/{{ .namespace }}/{{ .podName }}/{{ .containerName }}/{{ .containerId }}", + }, + }, + }, }, + &core.TaskTemplate{ + Config: map[string]string{ + "link_type": "vscode", + "port": "65535", + }, + }, + []*core.TaskLog{ + { + Uri: "https://k8s.com/my-namespace/my-pod/ContainerName/ContainerID", + MessageFormat: core.TaskLog_JSON, + Name: "Kubernetes Logs my-Suffix", + }, + { + Uri: "https://flyteinteractive.mydomain.com:65535/my-namespace/my-pod/ContainerName/ContainerID", + MessageFormat: core.TaskLog_JSON, + Name: "vscode link my-Suffix", + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assertTestSucceeded(t, tt.config, tt.template, tt.expectedTaskLogs) }) + } } diff --git a/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl_test.go b/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl_test.go index 4b8e2efb37..03d2d99d73 100644 --- a/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/catalog/async_client_impl_test.go @@ -110,8 +110,8 @@ func TestAsyncClientImpl_Download(t *testing.T) { func TestAsyncClientImpl_Upload(t *testing.T) { ctx := context.Background() - inputHash1 := "{UNSPECIFIED {} [] 0}:-0-DNhkpTTPC5YDtRGb4yT-PFxgMSgHzHrKAQKgQGEfGRY" - inputHash2 := "{UNSPECIFIED {} [] 0}:-1-26M4dwarvBVJqJSUC4JC1GtRYgVBIAmQfsFSdLVMlAc" + inputHash1 := "{UNSPECIFIED {} [] 0}:-0-DNhkpTTPC5YDtRGb4yT-PFxgMSgHzHrKAQKgQGEfGRY" + inputHash2 := "{UNSPECIFIED {} [] 0}:-1-26M4dwarvBVJqJSUC4JC1GtRYgVBIAmQfsFSdLVMlAc" q := &mocks.IndexedWorkQueue{} info := &mocks.WorkItemInfo{} diff --git a/flyteplugins/go/tasks/pluginmachinery/core/phase_enumer.go b/flyteplugins/go/tasks/pluginmachinery/core/phase_enumer.go index bd26c4d560..caa4d86250 100644 --- a/flyteplugins/go/tasks/pluginmachinery/core/phase_enumer.go +++ b/flyteplugins/go/tasks/pluginmachinery/core/phase_enumer.go @@ -6,9 +6,9 @@ import ( "fmt" ) -const _PhaseName = "PhaseUndefinedPhaseNotReadyPhaseWaitingForResourcesPhaseQueuedPhaseInitializingPhaseRunningPhaseSuccessPhaseRetryableFailurePhasePermanentFailurePhaseWaitingForCache" +const _PhaseName = "PhaseUndefinedPhaseNotReadyPhaseWaitingForResourcesPhaseQueuedPhaseInitializingPhaseRunningPhaseSuccessPhaseRetryableFailurePhasePermanentFailurePhaseWaitingForCachePhaseAborted" -var _PhaseIndex = [...]uint8{0, 14, 27, 51, 62, 79, 91, 103, 124, 145, 165} +var _PhaseIndex = [...]uint8{0, 14, 27, 51, 62, 79, 91, 103, 124, 145, 165, 177} func (i Phase) String() string { if i < 0 || i >= Phase(len(_PhaseIndex)-1) { @@ -17,7 +17,7 @@ func (i Phase) String() string { return _PhaseName[_PhaseIndex[i]:_PhaseIndex[i+1]] } -var _PhaseValues = []Phase{0, 1, 2, 3, 4, 5, 6, 7, 8, 9} +var _PhaseValues = []Phase{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10} var _PhaseNameToValueMap = map[string]Phase{ _PhaseName[0:14]: 0, @@ -30,6 +30,7 @@ var _PhaseNameToValueMap = map[string]Phase{ _PhaseName[103:124]: 7, _PhaseName[124:145]: 8, _PhaseName[145:165]: 9, + _PhaseName[165:177]: 10, } // PhaseString retrieves an enum value from the enum constants string name. diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go index b2ed99fbdb..cffc57a77d 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper.go @@ -160,8 +160,6 @@ func ApplyResourceOverrides(resources, platformResources v1.ResourceRequirements // TODO: Make configurable. 1/15/2019 Flyte Cluster doesn't support setting storage requests/limits. // https://github.com/kubernetes/enhancements/issues/362 - delete(resources.Requests, v1.ResourceStorage) - delete(resources.Limits, v1.ResourceStorage) gpuResourceName := config.GetK8sPluginConfig().GpuResourceName shouldAdjustGPU := false diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go index 4515d6311c..f07db9417d 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/container_helper_test.go @@ -211,13 +211,11 @@ func TestApplyResourceOverrides_RemoveStorage(t *testing.T) { requestedResourceQuantity := resource.MustParse("1") overrides := ApplyResourceOverrides(v1.ResourceRequirements{ Requests: v1.ResourceList{ - v1.ResourceStorage: requestedResourceQuantity, v1.ResourceMemory: requestedResourceQuantity, v1.ResourceCPU: requestedResourceQuantity, v1.ResourceEphemeralStorage: requestedResourceQuantity, }, Limits: v1.ResourceList{ - v1.ResourceStorage: requestedResourceQuantity, v1.ResourceMemory: requestedResourceQuantity, v1.ResourceEphemeralStorage: requestedResourceQuantity, }, @@ -261,7 +259,6 @@ func TestMergeResources_EmptyIn(t *testing.T) { v1.ResourceEphemeralStorage: requestedResourceQuantity, }, Limits: v1.ResourceList{ - v1.ResourceStorage: requestedResourceQuantity, v1.ResourceMemory: requestedResourceQuantity, v1.ResourceEphemeralStorage: requestedResourceQuantity, }, @@ -280,7 +277,6 @@ func TestMergeResources_EmptyOut(t *testing.T) { v1.ResourceEphemeralStorage: requestedResourceQuantity, }, Limits: v1.ResourceList{ - v1.ResourceStorage: requestedResourceQuantity, v1.ResourceMemory: requestedResourceQuantity, v1.ResourceEphemeralStorage: requestedResourceQuantity, }, diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go index 60a3833397..aec1f573a6 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/k8s_resource_adds_test.go @@ -33,8 +33,8 @@ func TestGetTolerationsForResources(t *testing.T) { Effect: v12.TaintEffectNoSchedule, } - tolStorage := v12.Toleration{ - Key: "storage", + tolEphemeralStorage := v12.Toleration{ + Key: "ephemeral-storage", Value: "dedicated", Operator: v12.TolerationOpExists, Effect: v12.TaintEffectNoSchedule, @@ -55,8 +55,8 @@ func TestGetTolerationsForResources(t *testing.T) { args{ v12.ResourceRequirements{ Limits: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, }, @@ -69,8 +69,8 @@ func TestGetTolerationsForResources(t *testing.T) { args{ v12.ResourceRequirements{ Requests: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, }, @@ -83,12 +83,12 @@ func TestGetTolerationsForResources(t *testing.T) { args{ v12.ResourceRequirements{ Limits: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, Requests: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, }, @@ -101,84 +101,84 @@ func TestGetTolerationsForResources(t *testing.T) { args{ v12.ResourceRequirements{ Limits: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, }, map[v12.ResourceName][]v12.Toleration{ - v12.ResourceStorage: {tolStorage}, - ResourceNvidiaGPU: {tolGPU}, + v12.ResourceEphemeralStorage: {tolEphemeralStorage}, + ResourceNvidiaGPU: {tolGPU}, }, nil, - []v12.Toleration{tolStorage}, + []v12.Toleration{tolEphemeralStorage}, }, { "tolerations-req", args{ v12.ResourceRequirements{ Requests: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, }, map[v12.ResourceName][]v12.Toleration{ - v12.ResourceStorage: {tolStorage}, - ResourceNvidiaGPU: {tolGPU}, + v12.ResourceEphemeralStorage: {tolEphemeralStorage}, + ResourceNvidiaGPU: {tolGPU}, }, nil, - []v12.Toleration{tolStorage}, + []v12.Toleration{tolEphemeralStorage}, }, { "tolerations-both", args{ v12.ResourceRequirements{ Limits: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, Requests: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, }, map[v12.ResourceName][]v12.Toleration{ - v12.ResourceStorage: {tolStorage}, - ResourceNvidiaGPU: {tolGPU}, + v12.ResourceEphemeralStorage: {tolEphemeralStorage}, + ResourceNvidiaGPU: {tolGPU}, }, nil, - []v12.Toleration{tolStorage}, + []v12.Toleration{tolEphemeralStorage}, }, { "no-tolerations-both", args{ v12.ResourceRequirements{ Limits: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), - ResourceNvidiaGPU: resource.MustParse("1"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), + ResourceNvidiaGPU: resource.MustParse("1"), }, Requests: v12.ResourceList{ - v12.ResourceCPU: resource.MustParse("1024m"), - v12.ResourceStorage: resource.MustParse("100M"), + v12.ResourceCPU: resource.MustParse("1024m"), + v12.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, }, map[v12.ResourceName][]v12.Toleration{ - v12.ResourceStorage: {tolStorage}, - ResourceNvidiaGPU: {tolGPU}, + v12.ResourceEphemeralStorage: {tolEphemeralStorage}, + ResourceNvidiaGPU: {tolGPU}, }, nil, - []v12.Toleration{tolStorage, tolGPU}, + []v12.Toleration{tolEphemeralStorage, tolGPU}, }, { "default-tolerations", args{}, nil, - []v12.Toleration{tolStorage}, - []v12.Toleration{tolStorage}, + []v12.Toleration{tolEphemeralStorage}, + []v12.Toleration{tolEphemeralStorage}, }, } for _, tt := range tests { diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go index 2a17751c62..69111dc030 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper.go @@ -759,7 +759,7 @@ func DemystifySuccess(status v1.PodStatus, info pluginsCore.TaskInfo) (pluginsCo for _, status := range append( append(status.InitContainerStatuses, status.ContainerStatuses...), status.EphemeralContainerStatuses...) { if status.State.Terminated != nil && strings.Contains(status.State.Terminated.Reason, OOMKilled) { - return pluginsCore.PhaseInfoRetryableFailure("OOMKilled", + return pluginsCore.PhaseInfoRetryableFailure(OOMKilled, "Pod reported success despite being OOMKilled", &info), nil } } @@ -777,9 +777,14 @@ func DeterminePrimaryContainerPhase(primaryContainerName string, statuses []v1.C } if s.State.Terminated != nil { - if s.State.Terminated.ExitCode != 0 { + if s.State.Terminated.ExitCode != 0 || strings.Contains(s.State.Terminated.Reason, OOMKilled) { + message := fmt.Sprintf("\r\n[%v] terminated with exit code (%v). Reason [%v]. Message: \n%v.", + s.Name, + s.State.Terminated.ExitCode, + s.State.Terminated.Reason, + s.State.Terminated.Message) return pluginsCore.PhaseInfoRetryableFailure( - s.State.Terminated.Reason, s.State.Terminated.Message, info) + s.State.Terminated.Reason, message, info) } return pluginsCore.PhaseInfoSuccess(info) } diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go index b5a51323d2..b12813b387 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/pod_helper_test.go @@ -693,12 +693,12 @@ func TestApplyGPUNodeSelectors(t *testing.T) { func updatePod(t *testing.T) { taskExecutionMetadata := dummyTaskExecutionMetadata(&v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, Requests: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, nil) @@ -809,13 +809,13 @@ func toK8sPodInterruptible(t *testing.T) { x := dummyExecContext(dummyTaskTemplate(), &v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), - ResourceNvidiaGPU: resource.MustParse("1"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), + ResourceNvidiaGPU: resource.MustParse("1"), }, Requests: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, nil) @@ -853,8 +853,8 @@ func TestToK8sPod(t *testing.T) { Effect: v1.TaintEffectNoSchedule, } - tolStorage := v1.Toleration{ - Key: "storage", + tolEphemeralStorage := v1.Toleration{ + Key: "ephemeral-storage", Value: "dedicated", Operator: v1.TolerationOpExists, Effect: v1.TaintEffectNoSchedule, @@ -862,8 +862,8 @@ func TestToK8sPod(t *testing.T) { assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ ResourceTolerations: map[v1.ResourceName][]v1.Toleration{ - v1.ResourceStorage: {tolStorage}, - ResourceNvidiaGPU: {tolGPU}, + v1.ResourceEphemeralStorage: {tolEphemeralStorage}, + ResourceNvidiaGPU: {tolGPU}, }, DefaultCPURequest: resource.MustParse("1024m"), DefaultMemoryRequest: resource.MustParse("1024Mi"), @@ -876,48 +876,48 @@ func TestToK8sPod(t *testing.T) { t.Run("WithGPU", func(t *testing.T) { x := dummyExecContext(dummyTaskTemplate(), &v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), - ResourceNvidiaGPU: resource.MustParse("1"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), + ResourceNvidiaGPU: resource.MustParse("1"), }, Requests: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, nil) p, _, _, err := ToK8sPodSpec(ctx, x) assert.NoError(t, err) - assert.Equal(t, len(p.Tolerations), 1) + assert.Equal(t, len(p.Tolerations), 2) }) t.Run("NoGPU", func(t *testing.T) { x := dummyExecContext(dummyTaskTemplate(), &v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, Requests: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, nil) p, _, _, err := ToK8sPodSpec(ctx, x) assert.NoError(t, err) - assert.Equal(t, len(p.Tolerations), 0) + assert.Equal(t, len(p.Tolerations), 1) assert.Equal(t, "some-acceptable-name", p.Containers[0].Name) }) t.Run("Default toleration, selector, scheduler", func(t *testing.T) { x := dummyExecContext(dummyTaskTemplate(), &v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, Requests: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, }, nil) @@ -1728,7 +1728,7 @@ func TestDeterminePrimaryContainerPhase(t *testing.T) { }, info) assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) assert.Equal(t, "foo", phaseInfo.Err().Code) - assert.Equal(t, "foo failed", phaseInfo.Err().Message) + assert.Equal(t, "\r\n[primary] terminated with exit code (1). Reason [foo]. Message: \nfoo failed.", phaseInfo.Err().Message) }) t.Run("primary container succeeded", func(t *testing.T) { phaseInfo := DeterminePrimaryContainerPhase(primaryContainerName, []v1.ContainerStatus{ @@ -1751,6 +1751,23 @@ func TestDeterminePrimaryContainerPhase(t *testing.T) { assert.Equal(t, PrimaryContainerNotFound, phaseInfo.Err().Code) assert.Equal(t, "Primary container [primary] not found in pod's container statuses", phaseInfo.Err().Message) }) + t.Run("primary container failed with OOMKilled", func(t *testing.T) { + phaseInfo := DeterminePrimaryContainerPhase(primaryContainerName, []v1.ContainerStatus{ + secondaryContainer, { + Name: primaryContainerName, + State: v1.ContainerState{ + Terminated: &v1.ContainerStateTerminated{ + ExitCode: 0, + Reason: OOMKilled, + Message: "foo failed", + }, + }, + }, + }, info) + assert.Equal(t, pluginsCore.PhaseRetryableFailure, phaseInfo.Phase()) + assert.Equal(t, OOMKilled, phaseInfo.Err().Code) + assert.Equal(t, "\r\n[primary] terminated with exit code (0). Reason [OOMKilled]. Message: \nfoo failed.", phaseInfo.Err().Message) + }) } func TestGetPodTemplate(t *testing.T) { diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go index fe626764a2..ef7807aadd 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils.go @@ -36,10 +36,6 @@ func ToK8sResourceList(resources []*core.Resources_ResourceEntry) (v1.ResourceLi if !v.IsZero() { k8sResources[v1.ResourceMemory] = v } - case core.Resources_STORAGE: - if !v.IsZero() { - k8sResources[v1.ResourceStorage] = v - } case core.Resources_GPU: if !v.IsZero() { k8sResources[ResourceNvidiaGPU] = v diff --git a/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils_test.go b/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils_test.go index 8c75f00a90..07e519cc80 100644 --- a/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/flytek8s/utils_test.go @@ -33,7 +33,6 @@ func TestToK8sResourceList(t *testing.T) { {Name: core.Resources_CPU, Value: "250m"}, {Name: core.Resources_GPU, Value: "1"}, {Name: core.Resources_MEMORY, Value: "1024Mi"}, - {Name: core.Resources_STORAGE, Value: "1024Mi"}, {Name: core.Resources_EPHEMERAL_STORAGE, Value: "1024Mi"}, }) @@ -43,7 +42,6 @@ func TestToK8sResourceList(t *testing.T) { assert.Equal(t, resource.MustParse("250m"), r[v1.ResourceCPU]) assert.Equal(t, resource.MustParse("1"), r[ResourceNvidiaGPU]) assert.Equal(t, resource.MustParse("1024Mi"), r[v1.ResourceMemory]) - assert.Equal(t, resource.MustParse("1024Mi"), r[v1.ResourceStorage]) assert.Equal(t, resource.MustParse("1024Mi"), r[v1.ResourceEphemeralStorage]) } { diff --git a/flyteplugins/go/tasks/pluginmachinery/ioutils/paths.go b/flyteplugins/go/tasks/pluginmachinery/ioutils/paths.go index 28bacceaed..e499535e70 100644 --- a/flyteplugins/go/tasks/pluginmachinery/ioutils/paths.go +++ b/flyteplugins/go/tasks/pluginmachinery/ioutils/paths.go @@ -43,7 +43,7 @@ func ConstructCheckpointPath(store storage.ReferenceConstructor, rawOutputPrefix func constructPath(store storage.ReferenceConstructor, base storage.DataReference, suffix string) storage.DataReference { res, err := store.ConstructReference(context.Background(), base, suffix) if err != nil { - logger.Error(context.Background(), "Failed to construct path. Base[%v] Error: %v", base, err) + logger.Errorf(context.Background(), "Failed to construct path. Base[%v] Error: %v", base, err) } return res diff --git a/flyteplugins/go/tasks/pluginmachinery/k8s/client.go b/flyteplugins/go/tasks/pluginmachinery/k8s/client.go index f14ae2c8a0..0ab46081e9 100644 --- a/flyteplugins/go/tasks/pluginmachinery/k8s/client.go +++ b/flyteplugins/go/tasks/pluginmachinery/k8s/client.go @@ -69,7 +69,7 @@ func NewKubeClient(config *rest.Config, options Options) (core.KubeClient, error if options.ClientOptions == nil { options.ClientOptions = &client.Options{ HTTPClient: httpClient, - Mapper: mapper, + Mapper: mapper, } } diff --git a/flyteplugins/go/tasks/pluginmachinery/tasklog/plugin.go b/flyteplugins/go/tasks/pluginmachinery/tasklog/plugin.go index da2357a6d9..fa47fa4729 100644 --- a/flyteplugins/go/tasks/pluginmachinery/tasklog/plugin.go +++ b/flyteplugins/go/tasks/pluginmachinery/tasklog/plugin.go @@ -14,7 +14,6 @@ type TemplateScheme int const ( TemplateSchemePod TemplateScheme = iota TemplateSchemeTaskExecution - TemplateSchemeFlyin ) // TemplateURI is a URI that accepts templates. See: go/tasks/pluginmachinery/tasklog/template.go for available templates. @@ -25,32 +24,23 @@ type TemplateVar struct { Value string } -type TemplateVars []TemplateVar - -type TemplateVarsByScheme struct { - Common TemplateVars - Pod TemplateVars - TaskExecution TemplateVars - Flyin TemplateVars -} - // Input contains all available information about task's execution that a log plugin can use to construct task's // log links. type Input struct { - HostName string - PodName string - Namespace string - ContainerName string - ContainerID string - LogName string - PodRFC3339StartTime string - PodRFC3339FinishTime string - PodUnixStartTime int64 - PodUnixFinishTime int64 - PodUID string - TaskExecutionID pluginsCore.TaskExecutionID - ExtraTemplateVarsByScheme *TemplateVarsByScheme - TaskTemplate *core.TaskTemplate + HostName string + PodName string + Namespace string + ContainerName string + ContainerID string + LogName string + PodRFC3339StartTime string + PodRFC3339FinishTime string + PodUnixStartTime int64 + PodUnixFinishTime int64 + PodUID string + TaskExecutionID pluginsCore.TaskExecutionID + ExtraTemplateVars []TemplateVar + TaskTemplate *core.TaskTemplate } // Output contains all task logs a plugin generates for a given Input. @@ -65,8 +55,11 @@ type Plugin interface { } type TemplateLogPlugin struct { - DisplayName string `json:"displayName" pflag:",Display name for the generated log when displayed in the console."` - TemplateURIs []TemplateURI `json:"templateUris" pflag:",URI Templates for generating task log links."` - MessageFormat core.TaskLog_MessageFormat `json:"messageFormat" pflag:",Log Message Format."` - Scheme TemplateScheme `json:"scheme" pflag:",Templating scheme to use. Supported values are Pod and TaskExecution."` + Name string `json:"name" pflag:",Name of the plugin."` + DisplayName string `json:"displayName" pflag:",Display name for the generated log when displayed in the console."` + TemplateURIs []TemplateURI `json:"templateUris" pflag:",URI Templates for generating task log links."` + DynamicTemplateURIs []TemplateURI `json:"dynamicTemplateUris" pflag:",URI Templates for generating dynamic task log links."` + MessageFormat core.TaskLog_MessageFormat `json:"messageFormat" pflag:"-,Log Message Format."` + // Deprecated: Please, do not use + DeprecatedScheme TemplateScheme `json:"scheme" pflag:",Templating scheme to use. Supported values are Pod and TaskExecution."` } diff --git a/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go b/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go index ea5c5f373c..e5481ecfbd 100644 --- a/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go +++ b/flyteplugins/go/tasks/pluginmachinery/tasklog/template.go @@ -13,6 +13,12 @@ func MustCreateRegex(varName string) *regexp.Regexp { return regexp.MustCompile(fmt.Sprintf(`(?i){{\s*[\.$]%s\s*}}`, varName)) } +var taskConfigVarRegex = regexp.MustCompile(`(?i){{\s*.taskConfig[\.$]([a-zA-Z_]+)\s*}}`) + +func MustCreateDynamicLogRegex(varName string) *regexp.Regexp { + return regexp.MustCompile(fmt.Sprintf(`(?i){{\s*.taskConfig[\.$]%s\s*}}`, varName)) +} + type templateRegexes struct { LogName *regexp.Regexp PodName *regexp.Regexp @@ -35,7 +41,6 @@ type templateRegexes struct { ExecutionProject *regexp.Regexp ExecutionDomain *regexp.Regexp GeneratedName *regexp.Regexp - Port *regexp.Regexp } func initDefaultRegexes() templateRegexes { @@ -61,13 +66,12 @@ func initDefaultRegexes() templateRegexes { MustCreateRegex("executionProject"), MustCreateRegex("executionDomain"), MustCreateRegex("generatedName"), - MustCreateRegex("port"), } } var defaultRegexes = initDefaultRegexes() -func replaceAll(template string, vars TemplateVars) string { +func replaceAll(template string, vars []TemplateVar) string { for _, v := range vars { if len(v.Value) > 0 { template = v.Regex.ReplaceAllLiteralString(template, v.Value) @@ -76,58 +80,33 @@ func replaceAll(template string, vars TemplateVars) string { return template } -func (input Input) templateVarsForScheme(scheme TemplateScheme) TemplateVars { - vars := TemplateVars{ - {defaultRegexes.LogName, input.LogName}, +func (input Input) templateVars() []TemplateVar { + vars := []TemplateVar{ + TemplateVar{defaultRegexes.LogName, input.LogName}, } - gotExtraTemplateVars := input.ExtraTemplateVarsByScheme != nil + gotExtraTemplateVars := input.ExtraTemplateVars != nil if gotExtraTemplateVars { - vars = append(vars, input.ExtraTemplateVarsByScheme.Common...) + vars = append(vars, input.ExtraTemplateVars...) } - switch scheme { - case TemplateSchemeFlyin: - port := input.TaskTemplate.GetConfig()["port"] - if port == "" { - port = "8080" - } - vars = append( - vars, - TemplateVar{defaultRegexes.Port, port}, - ) - fallthrough - case TemplateSchemePod: - // Container IDs are prefixed with docker://, cri-o://, etc. which is stripped by fluentd before pushing to a log - // stream. Therefore, we must also strip the prefix. - containerID := input.ContainerID - stripDelimiter := "://" - if split := strings.Split(input.ContainerID, stripDelimiter); len(split) > 1 { - containerID = split[1] - } - vars = append( - vars, - TemplateVar{defaultRegexes.PodName, input.PodName}, - TemplateVar{defaultRegexes.PodUID, input.PodUID}, - TemplateVar{defaultRegexes.Namespace, input.Namespace}, - TemplateVar{defaultRegexes.ContainerName, input.ContainerName}, - TemplateVar{defaultRegexes.ContainerID, containerID}, - TemplateVar{defaultRegexes.Hostname, input.HostName}, - TemplateVar{defaultRegexes.PodRFC3339StartTime, input.PodRFC3339StartTime}, - TemplateVar{defaultRegexes.PodRFC3339FinishTime, input.PodRFC3339FinishTime}, - TemplateVar{ - defaultRegexes.PodUnixStartTime, - strconv.FormatInt(input.PodUnixStartTime, 10), - }, - TemplateVar{ - defaultRegexes.PodUnixFinishTime, - strconv.FormatInt(input.PodUnixFinishTime, 10), - }, - ) - if gotExtraTemplateVars { - vars = append(vars, input.ExtraTemplateVarsByScheme.Pod...) - } - case TemplateSchemeTaskExecution: + // Container IDs are prefixed with docker://, cri-o://, etc. which is stripped by fluentd before pushing to a log + // stream. Therefore, we must also strip the prefix. + containerID := input.ContainerID + stripDelimiter := "://" + if split := strings.Split(input.ContainerID, stripDelimiter); len(split) > 1 { + containerID = split[1] + } + vars = append( + vars, + TemplateVar{defaultRegexes.PodName, input.PodName}, + TemplateVar{defaultRegexes.PodUID, input.PodUID}, + TemplateVar{defaultRegexes.Namespace, input.Namespace}, + TemplateVar{defaultRegexes.ContainerName, input.ContainerName}, + TemplateVar{defaultRegexes.ContainerID, containerID}, + TemplateVar{defaultRegexes.Hostname, input.HostName}, + ) + if input.TaskExecutionID != nil { taskExecutionIdentifier := input.TaskExecutionID.GetID() vars = append( vars, @@ -182,33 +161,44 @@ func (input Input) templateVarsForScheme(scheme TemplateScheme) TemplateVars { }, ) } - if gotExtraTemplateVars { - vars = append(vars, input.ExtraTemplateVarsByScheme.TaskExecution...) - } } + vars = append( + vars, + TemplateVar{defaultRegexes.PodRFC3339StartTime, input.PodRFC3339StartTime}, + TemplateVar{defaultRegexes.PodRFC3339FinishTime, input.PodRFC3339FinishTime}, + TemplateVar{ + defaultRegexes.PodUnixStartTime, + strconv.FormatInt(input.PodUnixStartTime, 10), + }, + TemplateVar{ + defaultRegexes.PodUnixFinishTime, + strconv.FormatInt(input.PodUnixFinishTime, 10), + }, + ) + return vars } +func getDynamicLogLinkTypes(taskTemplate *core.TaskTemplate) []string { + if taskTemplate == nil { + return nil + } + config := taskTemplate.GetConfig() + if config == nil { + return nil + } + linkType := config["link_type"] + if linkType == "" { + return nil + } + return strings.Split(linkType, ",") +} + func (p TemplateLogPlugin) GetTaskLogs(input Input) (Output, error) { - templateVars := input.templateVarsForScheme(p.Scheme) + templateVars := input.templateVars() taskLogs := make([]*core.TaskLog, 0, len(p.TemplateURIs)) - - // Grab metadata from task template and check if key "link_type" is set to "vscode". - // If so, add a vscode link to the task logs. - isFlyin := false - if input.TaskTemplate != nil && input.TaskTemplate.GetConfig() != nil { - config := input.TaskTemplate.GetConfig() - if config != nil && config["link_type"] == "vscode" { - isFlyin = true - } - } for _, templateURI := range p.TemplateURIs { - // Skip Flyin logs if plugin is enabled but no metadata is defined in input's task template. - // This is to prevent Flyin logs from being generated for tasks that don't have a Flyin metadata section. - if p.DisplayName == "Flyin Logs" && !isFlyin { - continue - } taskLogs = append(taskLogs, &core.TaskLog{ Uri: replaceAll(templateURI, templateVars), Name: p.DisplayName + input.LogName, @@ -216,5 +206,24 @@ func (p TemplateLogPlugin) GetTaskLogs(input Input) (Output, error) { }) } + for _, dynamicLogLinkType := range getDynamicLogLinkTypes(input.TaskTemplate) { + for _, dynamicTemplateURI := range p.DynamicTemplateURIs { + if p.Name == dynamicLogLinkType { + for _, match := range taskConfigVarRegex.FindAllStringSubmatch(dynamicTemplateURI, -1) { + if len(match) > 1 { + if value, found := input.TaskTemplate.GetConfig()[match[1]]; found { + templateVars = append(templateVars, TemplateVar{MustCreateDynamicLogRegex(match[1]), value}) + } + } + } + taskLogs = append(taskLogs, &core.TaskLog{ + Uri: replaceAll(dynamicTemplateURI, templateVars), + Name: p.DisplayName + input.LogName, + MessageFormat: p.MessageFormat, + }) + } + } + } + return Output{TaskLogs: taskLogs}, nil } diff --git a/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go b/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go index ad6eef25a3..42226bd7c0 100644 --- a/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go +++ b/flyteplugins/go/tasks/pluginmachinery/tasklog/template_test.go @@ -43,19 +43,23 @@ func dummyTaskExecID() pluginCore.TaskExecutionID { return tID } -func Test_Input_templateVarsForScheme(t *testing.T) { +func Test_Input_templateVars(t *testing.T) { testRegexes := struct { - Foo *regexp.Regexp - Bar *regexp.Regexp - Baz *regexp.Regexp - Ham *regexp.Regexp - Spam *regexp.Regexp + Foo *regexp.Regexp + Bar *regexp.Regexp + Baz *regexp.Regexp + Ham *regexp.Regexp + Spam *regexp.Regexp + LinkType *regexp.Regexp + Port *regexp.Regexp }{ MustCreateRegex("foo"), MustCreateRegex("bar"), MustCreateRegex("baz"), MustCreateRegex("ham"), MustCreateRegex("spam"), + MustCreateDynamicLogRegex("link_type"), + MustCreateDynamicLogRegex("port"), } podBase := Input{ HostName: "my-host", @@ -71,43 +75,27 @@ func Test_Input_templateVarsForScheme(t *testing.T) { PodUnixFinishTime: 12345, } taskExecutionBase := Input{ - LogName: "main_logs", - TaskExecutionID: dummyTaskExecID(), - } - flyinBase := Input{ - HostName: "my-host", - PodName: "my-pod", - PodUID: "my-pod-uid", - Namespace: "my-namespace", - ContainerName: "my-container", - ContainerID: "docker://containerID", LogName: "main_logs", + TaskExecutionID: dummyTaskExecID(), PodRFC3339StartTime: "1970-01-01T01:02:03+01:00", PodRFC3339FinishTime: "1970-01-01T04:25:45+01:00", PodUnixStartTime: 123, PodUnixFinishTime: 12345, - TaskTemplate: &core.TaskTemplate{ - Config: map[string]string{ - "port": "1234", - }, - }, } tests := []struct { name string - scheme TemplateScheme baseVars Input - extraVars *TemplateVarsByScheme - exact TemplateVars - contains TemplateVars - notContains TemplateVars + extraVars []TemplateVar + exact []TemplateVar + contains []TemplateVar + notContains []TemplateVar }{ { "pod happy path", - TemplateSchemePod, podBase, nil, - TemplateVars{ + []TemplateVar{ {defaultRegexes.LogName, "main_logs"}, {defaultRegexes.PodName, "my-pod"}, {defaultRegexes.PodUID, "my-pod-uid"}, @@ -125,49 +113,32 @@ func Test_Input_templateVarsForScheme(t *testing.T) { }, { "pod with extra vars", - TemplateSchemePod, podBase, - &TemplateVarsByScheme{ - Common: TemplateVars{ - {testRegexes.Foo, "foo"}, - }, - Pod: TemplateVars{ - {testRegexes.Bar, "bar"}, - {testRegexes.Baz, "baz"}, - }, - }, - nil, - TemplateVars{ + []TemplateVar{ {testRegexes.Foo, "foo"}, {testRegexes.Bar, "bar"}, {testRegexes.Baz, "baz"}, }, nil, - }, - { - "pod with unused extra vars", - TemplateSchemePod, - podBase, - &TemplateVarsByScheme{ - TaskExecution: TemplateVars{ - {testRegexes.Bar, "bar"}, - {testRegexes.Baz, "baz"}, - }, - }, - nil, - nil, - TemplateVars{ + []TemplateVar{ + {testRegexes.Foo, "foo"}, {testRegexes.Bar, "bar"}, {testRegexes.Baz, "baz"}, }, + nil, }, { "task execution happy path", - TemplateSchemeTaskExecution, taskExecutionBase, nil, - TemplateVars{ + []TemplateVar{ {defaultRegexes.LogName, "main_logs"}, + {defaultRegexes.PodName, ""}, + {defaultRegexes.PodUID, ""}, + {defaultRegexes.Namespace, ""}, + {defaultRegexes.ContainerName, ""}, + {defaultRegexes.ContainerID, ""}, + {defaultRegexes.Hostname, ""}, {defaultRegexes.NodeID, "n0-0-n0"}, {defaultRegexes.GeneratedName, "generated-name"}, {defaultRegexes.TaskRetryAttempt, "1"}, @@ -178,98 +149,46 @@ func Test_Input_templateVarsForScheme(t *testing.T) { {defaultRegexes.ExecutionName, "my-execution-name"}, {defaultRegexes.ExecutionProject, "my-execution-project"}, {defaultRegexes.ExecutionDomain, "my-execution-domain"}, + {defaultRegexes.PodRFC3339StartTime, "1970-01-01T01:02:03+01:00"}, + {defaultRegexes.PodRFC3339FinishTime, "1970-01-01T04:25:45+01:00"}, + {defaultRegexes.PodUnixStartTime, "123"}, + {defaultRegexes.PodUnixFinishTime, "12345"}, }, nil, nil, }, { "task execution with extra vars", - TemplateSchemeTaskExecution, taskExecutionBase, - &TemplateVarsByScheme{ - Common: TemplateVars{ - {testRegexes.Foo, "foo"}, - }, - TaskExecution: TemplateVars{ - {testRegexes.Bar, "bar"}, - {testRegexes.Baz, "baz"}, - }, - }, - nil, - TemplateVars{ + []TemplateVar{ {testRegexes.Foo, "foo"}, {testRegexes.Bar, "bar"}, {testRegexes.Baz, "baz"}, }, nil, - }, - { - "task execution with unused extra vars", - TemplateSchemeTaskExecution, - taskExecutionBase, - &TemplateVarsByScheme{ - Pod: TemplateVars{ - {testRegexes.Bar, "bar"}, - {testRegexes.Baz, "baz"}, - }, - }, - nil, - nil, - TemplateVars{ + []TemplateVar{ + {testRegexes.Foo, "foo"}, {testRegexes.Bar, "bar"}, {testRegexes.Baz, "baz"}, }, - }, - { - "flyin happy path", - TemplateSchemeFlyin, - flyinBase, - nil, - nil, - TemplateVars{ - {defaultRegexes.Port, "1234"}, - }, - nil, - }, - { - "flyin and pod happy path", - TemplateSchemeFlyin, - flyinBase, - nil, - TemplateVars{ - {defaultRegexes.LogName, "main_logs"}, - {defaultRegexes.Port, "1234"}, - {defaultRegexes.PodName, "my-pod"}, - {defaultRegexes.PodUID, "my-pod-uid"}, - {defaultRegexes.Namespace, "my-namespace"}, - {defaultRegexes.ContainerName, "my-container"}, - {defaultRegexes.ContainerID, "containerID"}, - {defaultRegexes.Hostname, "my-host"}, - {defaultRegexes.PodRFC3339StartTime, "1970-01-01T01:02:03+01:00"}, - {defaultRegexes.PodRFC3339FinishTime, "1970-01-01T04:25:45+01:00"}, - {defaultRegexes.PodUnixStartTime, "123"}, - {defaultRegexes.PodUnixFinishTime, "12345"}, - }, - nil, nil, }, { "pod with port not affected", - TemplateSchemePod, podBase, nil, nil, nil, - TemplateVars{ - {defaultRegexes.Port, "1234"}, + []TemplateVar{ + {testRegexes.Port, "1234"}, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { base := tt.baseVars - base.ExtraTemplateVarsByScheme = tt.extraVars - got := base.templateVarsForScheme(tt.scheme) + base.ExtraTemplateVars = tt.extraVars + got := base.templateVars() if tt.exact != nil { assert.Equal(t, got, tt.exact) } @@ -468,7 +387,6 @@ func TestTemplateLogPlugin(t *testing.T) { { "task-with-task-execution-identifier", TemplateLogPlugin{ - Scheme: TemplateSchemeTaskExecution, TemplateURIs: []TemplateURI{"https://flyte.corp.net/console/projects/{{ .executionProject }}/domains/{{ .executionDomain }}/executions/{{ .executionName }}/nodeId/{{ .nodeID }}/taskId/{{ .taskID }}/attempt/{{ .taskRetryAttempt }}/view/logs"}, MessageFormat: core.TaskLog_JSON, }, @@ -500,7 +418,6 @@ func TestTemplateLogPlugin(t *testing.T) { { "mapped-task-with-task-execution-identifier", TemplateLogPlugin{ - Scheme: TemplateSchemeTaskExecution, TemplateURIs: []TemplateURI{"https://flyte.corp.net/console/projects/{{ .executionProject }}/domains/{{ .executionDomain }}/executions/{{ .executionName }}/nodeId/{{ .nodeID }}/taskId/{{ .taskID }}/attempt/{{ .subtaskParentRetryAttempt }}/mappedIndex/{{ .subtaskExecutionIndex }}/mappedAttempt/{{ .subtaskRetryAttempt }}/view/logs"}, MessageFormat: core.TaskLog_JSON, }, @@ -517,12 +434,10 @@ func TestTemplateLogPlugin(t *testing.T) { PodUnixStartTime: 123, PodUnixFinishTime: 12345, TaskExecutionID: dummyTaskExecID(), - ExtraTemplateVarsByScheme: &TemplateVarsByScheme{ - TaskExecution: TemplateVars{ - {MustCreateRegex("subtaskExecutionIndex"), "1"}, - {MustCreateRegex("subtaskRetryAttempt"), "1"}, - {MustCreateRegex("subtaskParentRetryAttempt"), "0"}, - }, + ExtraTemplateVars: []TemplateVar{ + {MustCreateRegex("subtaskExecutionIndex"), "1"}, + {MustCreateRegex("subtaskRetryAttempt"), "1"}, + {MustCreateRegex("subtaskParentRetryAttempt"), "0"}, }, }, }, @@ -537,11 +452,11 @@ func TestTemplateLogPlugin(t *testing.T) { }, }, { - "flyin", + "flyteinteractive", TemplateLogPlugin{ - Scheme: TemplateSchemeFlyin, - TemplateURIs: []TemplateURI{"vscode://flyin:{{ .port }}/{{ .podName }}"}, - MessageFormat: core.TaskLog_JSON, + Name: "vscode", + DynamicTemplateURIs: []TemplateURI{"vscode://flyteinteractive:{{ .taskConfig.port }}/{{ .podName }}"}, + MessageFormat: core.TaskLog_JSON, }, args{ input: Input{ @@ -557,54 +472,54 @@ func TestTemplateLogPlugin(t *testing.T) { Output{ TaskLogs: []*core.TaskLog{ { - Uri: "vscode://flyin:1234/my-pod-name", + Uri: "vscode://flyteinteractive:1234/my-pod-name", MessageFormat: core.TaskLog_JSON, }, }, }, }, { - "flyin - default port", + "flyteinteractive - no link_type in task template", TemplateLogPlugin{ - Scheme: TemplateSchemeFlyin, - TemplateURIs: []TemplateURI{"vscode://flyin:{{ .port }}/{{ .podName }}"}, - MessageFormat: core.TaskLog_JSON, + Name: "vscode", + DynamicTemplateURIs: []TemplateURI{"vscode://flyteinteractive:{{ .taskConfig.port }}/{{ .podName }}"}, + MessageFormat: core.TaskLog_JSON, + DisplayName: "Flyteinteractive Logs", }, args{ input: Input{ PodName: "my-pod-name", - TaskTemplate: &core.TaskTemplate{ - Config: map[string]string{ - "link_type": "vscode", - }, - }, }, }, Output{ - TaskLogs: []*core.TaskLog{ - { - Uri: "vscode://flyin:8080/my-pod-name", - MessageFormat: core.TaskLog_JSON, - }, - }, + TaskLogs: []*core.TaskLog{}, }, }, { - "flyin - no link_type in task template", + "kubernetes", TemplateLogPlugin{ - Scheme: TemplateSchemeFlyin, - TemplateURIs: []TemplateURI{"vscode://flyin:{{ .port }}/{{ .podName }}"}, + TemplateURIs: []TemplateURI{"https://dashboard.k8s.net/#!/log/{{.namespace}}/{{.podName}}/pod?namespace={{.namespace}}"}, MessageFormat: core.TaskLog_JSON, - DisplayName: "Flyin Logs", }, args{ input: Input{ - PodName: "my-pod-name", + PodName: "flyteexamples-development-task-name", + PodUID: "pod-uid", + Namespace: "flyteexamples-development", + ContainerName: "ignore", + ContainerID: "ignore", + LogName: "main_logs", + PodRFC3339StartTime: "1970-01-01T01:02:03+01:00", + PodRFC3339FinishTime: "1970-01-01T04:25:45+01:00", + PodUnixStartTime: 123, + PodUnixFinishTime: 12345, }, }, - Output{ - TaskLogs: []*core.TaskLog{}, - }, + Output{TaskLogs: []*core.TaskLog{{ + Uri: "https://dashboard.k8s.net/#!/log/flyteexamples-development/flyteexamples-development-task-name/pod?namespace=flyteexamples-development", + MessageFormat: core.TaskLog_JSON, + Name: "main_logs", + }}}, }, } for _, tt := range tests { diff --git a/flyteplugins/go/tasks/pluginmachinery/tasklog/templatescheme_enumer.go b/flyteplugins/go/tasks/pluginmachinery/tasklog/templatescheme_enumer.go index c1f4d668c0..70f15faf01 100644 --- a/flyteplugins/go/tasks/pluginmachinery/tasklog/templatescheme_enumer.go +++ b/flyteplugins/go/tasks/pluginmachinery/tasklog/templatescheme_enumer.go @@ -7,9 +7,9 @@ import ( "fmt" ) -const _TemplateSchemeName = "PodTaskExecutionFlyin" +const _TemplateSchemeName = "PodTaskExecution" -var _TemplateSchemeIndex = [...]uint8{0, 3, 16, 21} +var _TemplateSchemeIndex = [...]uint8{0, 3, 16} func (i TemplateScheme) String() string { if i < 0 || i >= TemplateScheme(len(_TemplateSchemeIndex)-1) { @@ -18,12 +18,11 @@ func (i TemplateScheme) String() string { return _TemplateSchemeName[_TemplateSchemeIndex[i]:_TemplateSchemeIndex[i+1]] } -var _TemplateSchemeValues = []TemplateScheme{0, 1, 2} +var _TemplateSchemeValues = []TemplateScheme{0, 1} var _TemplateSchemeNameToValueMap = map[string]TemplateScheme{ - _TemplateSchemeName[0:3]: 0, - _TemplateSchemeName[3:16]: 1, - _TemplateSchemeName[16:21]: 2, + _TemplateSchemeName[0:3]: 0, + _TemplateSchemeName[3:16]: 1, } // TemplateSchemeString retrieves an enum value from the enum constants string name. diff --git a/flyteplugins/go/tasks/pluginmachinery/webapi/mocks/sync_plugin.go b/flyteplugins/go/tasks/pluginmachinery/webapi/mocks/sync_plugin.go new file mode 100644 index 0000000000..cfe5d38090 --- /dev/null +++ b/flyteplugins/go/tasks/pluginmachinery/webapi/mocks/sync_plugin.go @@ -0,0 +1,88 @@ +// Code generated by mockery v1.0.1. DO NOT EDIT. + +package mocks + +import ( + context "context" + + core "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core" + mock "github.com/stretchr/testify/mock" + + webapi "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/webapi" +) + +// SyncPlugin is an autogenerated mock type for the SyncPlugin type +type SyncPlugin struct { + mock.Mock +} + +type SyncPlugin_Do struct { + *mock.Call +} + +func (_m SyncPlugin_Do) Return(phase core.PhaseInfo, err error) *SyncPlugin_Do { + return &SyncPlugin_Do{Call: _m.Call.Return(phase, err)} +} + +func (_m *SyncPlugin) OnDo(ctx context.Context, tCtx webapi.TaskExecutionContext) *SyncPlugin_Do { + c_call := _m.On("Do", ctx, tCtx) + return &SyncPlugin_Do{Call: c_call} +} + +func (_m *SyncPlugin) OnDoMatch(matchers ...interface{}) *SyncPlugin_Do { + c_call := _m.On("Do", matchers...) + return &SyncPlugin_Do{Call: c_call} +} + +// Do provides a mock function with given fields: ctx, tCtx +func (_m *SyncPlugin) Do(ctx context.Context, tCtx webapi.TaskExecutionContext) (core.PhaseInfo, error) { + ret := _m.Called(ctx, tCtx) + + var r0 core.PhaseInfo + if rf, ok := ret.Get(0).(func(context.Context, webapi.TaskExecutionContext) core.PhaseInfo); ok { + r0 = rf(ctx, tCtx) + } else { + r0 = ret.Get(0).(core.PhaseInfo) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, webapi.TaskExecutionContext) error); ok { + r1 = rf(ctx, tCtx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +type SyncPlugin_GetConfig struct { + *mock.Call +} + +func (_m SyncPlugin_GetConfig) Return(_a0 webapi.PluginConfig) *SyncPlugin_GetConfig { + return &SyncPlugin_GetConfig{Call: _m.Call.Return(_a0)} +} + +func (_m *SyncPlugin) OnGetConfig() *SyncPlugin_GetConfig { + c_call := _m.On("GetConfig") + return &SyncPlugin_GetConfig{Call: c_call} +} + +func (_m *SyncPlugin) OnGetConfigMatch(matchers ...interface{}) *SyncPlugin_GetConfig { + c_call := _m.On("GetConfig", matchers...) + return &SyncPlugin_GetConfig{Call: c_call} +} + +// GetConfig provides a mock function with given fields: +func (_m *SyncPlugin) GetConfig() webapi.PluginConfig { + ret := _m.Called() + + var r0 webapi.PluginConfig + if rf, ok := ret.Get(0).(func() webapi.PluginConfig); ok { + r0 = rf() + } else { + r0 = ret.Get(0).(webapi.PluginConfig) + } + + return r0 +} diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go b/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go index cd3bca93c4..16d44b490e 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/jobs_store.go @@ -272,7 +272,7 @@ func syncBatches(_ context.Context, client Client, handler EventHandler, batchCh for _, jobDetail := range response { job, found := jobIDsMap[*jobDetail.JobId] if !found { - logger.Warn(ctx, "Received an update for unrequested job id [%v]", jobDetail.JobId) + logger.Warnf(ctx, "Received an update for unrequested job id [%v]", jobDetail.JobId) continue } diff --git a/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go b/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go index 666b1e741a..62bc5103dc 100644 --- a/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go +++ b/flyteplugins/go/tasks/plugins/array/awsbatch/monitor.go @@ -49,7 +49,7 @@ func CheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionContext, job // If job isn't currently being monitored (recovering from a restart?), add it to the sync-cache and return if job == nil { - logger.Info(ctx, "Job not found in cache, adding it. [%v]", jobName) + logger.Infof(ctx, "Job not found in cache, adding it. [%v]", jobName) _, err = jobStore.GetOrCreate(jobName, &Job{ ID: *currentState.ExternalJobID, diff --git a/flyteplugins/go/tasks/plugins/array/core/phase_enumer.go b/flyteplugins/go/tasks/plugins/array/core/phase_enumer.go index c659c7bcfe..a5cc4569bc 100644 --- a/flyteplugins/go/tasks/plugins/array/core/phase_enumer.go +++ b/flyteplugins/go/tasks/plugins/array/core/phase_enumer.go @@ -6,9 +6,9 @@ import ( "fmt" ) -const _PhaseName = "PhaseStartPhasePreLaunchPhaseLaunchPhaseWaitingForResourcesPhaseCheckingSubTaskExecutionsPhaseAssembleFinalOutputPhaseWriteToDiscoveryPhaseWriteToDiscoveryThenFailPhaseSuccessPhaseAssembleFinalErrorPhaseRetryableFailurePhasePermanentFailure" +const _PhaseName = "PhaseStartPhasePreLaunchPhaseLaunchPhaseWaitingForResourcesPhaseCheckingSubTaskExecutionsPhaseAssembleFinalOutputPhaseWriteToDiscoveryPhaseWriteToDiscoveryThenFailPhaseSuccessPhaseAssembleFinalErrorPhaseRetryableFailurePhasePermanentFailurePhaseAbortSubTasks" -var _PhaseIndex = [...]uint8{0, 10, 24, 35, 59, 89, 113, 134, 163, 175, 198, 219, 240} +var _PhaseIndex = [...]uint16{0, 10, 24, 35, 59, 89, 113, 134, 163, 175, 198, 219, 240, 258} func (i Phase) String() string { if i >= Phase(len(_PhaseIndex)-1) { @@ -17,7 +17,7 @@ func (i Phase) String() string { return _PhaseName[_PhaseIndex[i]:_PhaseIndex[i+1]] } -var _PhaseValues = []Phase{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11} +var _PhaseValues = []Phase{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12} var _PhaseNameToValueMap = map[string]Phase{ _PhaseName[0:10]: 0, @@ -32,6 +32,7 @@ var _PhaseNameToValueMap = map[string]Phase{ _PhaseName[175:198]: 9, _PhaseName[198:219]: 10, _PhaseName[219:240]: 11, + _PhaseName[240:258]: 12, } // PhaseString retrieves an enum value from the enum constants string name. diff --git a/flyteplugins/go/tasks/plugins/array/k8s/executor.go b/flyteplugins/go/tasks/plugins/array/k8s/executor.go index f664392fd9..3232584674 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/executor.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/executor.go @@ -103,9 +103,6 @@ func (e Executor) Handle(ctx context.Context, tCtx core.TaskExecutionContext) (c case arrayCore.PhasePreLaunch: nextState = pluginState.SetPhase(arrayCore.PhaseLaunch, version+1).SetReason("Nothing to do in PreLaunch phase.") - case arrayCore.PhaseWaitingForResources: - fallthrough - case arrayCore.PhaseLaunch: // In order to maintain backwards compatibility with the state transitions // in the aws batch plugin. Forward to PhaseCheckingSubTasksExecutions where the launching @@ -113,6 +110,9 @@ func (e Executor) Handle(ctx context.Context, tCtx core.TaskExecutionContext) (c nextState = pluginState.SetPhase(arrayCore.PhaseCheckingSubTaskExecutions, version+1).SetReason("Nothing to do in Launch phase.") err = nil + case arrayCore.PhaseWaitingForResources: + fallthrough + case arrayCore.PhaseCheckingSubTaskExecutions: nextState, externalResources, err = LaunchAndCheckSubTasksState(ctx, tCtx, e.kubeClient, pluginConfig, tCtx.DataStore(), tCtx.OutputWriter().GetOutputPrefixPath(), tCtx.OutputWriter().GetRawOutputPrefix(), pluginState) diff --git a/flyteplugins/go/tasks/plugins/array/k8s/management.go b/flyteplugins/go/tasks/plugins/array/k8s/management.go index d6abaaf74b..12eea118cc 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/management.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/management.go @@ -306,7 +306,7 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon } _, version := currentState.GetPhase() - if phase == arrayCore.PhaseCheckingSubTaskExecutions { + if phase == arrayCore.PhaseCheckingSubTaskExecutions || phase == arrayCore.PhaseWaitingForResources { newSubTaskPhaseHash, err := newState.GetArrayStatus().HashCode() if err != nil { return currentState, externalResources, err @@ -316,7 +316,7 @@ func LaunchAndCheckSubTasksState(ctx context.Context, tCtx core.TaskExecutionCon version++ } - newState = newState.SetPhase(phase, version).SetReason("Task is still running") + newState = newState.SetPhase(arrayCore.PhaseCheckingSubTaskExecutions, version).SetReason("Task is still running") } else { newState = newState.SetPhase(phase, version+1) } diff --git a/flyteplugins/go/tasks/plugins/array/k8s/management_test.go b/flyteplugins/go/tasks/plugins/array/k8s/management_test.go index ab26028b09..2bd1d5eefe 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/management_test.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/management_test.go @@ -67,6 +67,7 @@ func getMockTaskExecutionContext(ctx context.Context, parallelism int) *mocks.Ta tID := &mocks.TaskExecutionID{} tID.OnGetGeneratedName().Return("notfound") + tID.On("GetUniqueNodeID").Return("an-unique-id") tID.OnGetID().Return(core2.TaskExecutionIdentifier{ TaskId: &core2.Identifier{ ResourceType: core2.ResourceType_TASK, @@ -299,7 +300,7 @@ func TestCheckSubTasksState(t *testing.T) { // validate results assert.Nil(t, err) p, _ := newState.GetPhase() - assert.Equal(t, arrayCore.PhaseWaitingForResources.String(), p.String()) + assert.Equal(t, arrayCore.PhaseCheckingSubTaskExecutions.String(), p.String()) resourceManager.AssertNumberOfCalls(t, "AllocateResource", subtaskCount) for _, subtaskPhaseIndex := range newState.GetArrayStatus().Detailed.GetItems() { assert.Equal(t, core.PhaseWaitingForResources, core.Phases[subtaskPhaseIndex]) diff --git a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go index 8ac4d6edc0..77b3ac6501 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context.go @@ -187,22 +187,20 @@ var logTemplateRegexes = struct { tasklog.MustCreateRegex("subtaskParentRetryAttempt"), } -func (s SubTaskExecutionID) TemplateVarsByScheme() *tasklog.TemplateVarsByScheme { - return &tasklog.TemplateVarsByScheme{ - TaskExecution: tasklog.TemplateVars{ - {Regex: logTemplateRegexes.ParentName, Value: s.parentName}, - { - Regex: logTemplateRegexes.ExecutionIndex, - Value: strconv.FormatUint(uint64(s.executionIndex), 10), - }, - { - Regex: logTemplateRegexes.RetryAttempt, - Value: strconv.FormatUint(s.subtaskRetryAttempt, 10), - }, - { - Regex: logTemplateRegexes.ParentRetryAttempt, - Value: strconv.FormatUint(uint64(s.taskRetryAttempt), 10), - }, +func (s SubTaskExecutionID) TemplateVarsByScheme() []tasklog.TemplateVar { + return []tasklog.TemplateVar{ + {Regex: logTemplateRegexes.ParentName, Value: s.parentName}, + { + Regex: logTemplateRegexes.ExecutionIndex, + Value: strconv.FormatUint(uint64(s.executionIndex), 10), + }, + { + Regex: logTemplateRegexes.RetryAttempt, + Value: strconv.FormatUint(s.subtaskRetryAttempt, 10), + }, + { + Regex: logTemplateRegexes.ParentRetryAttempt, + Value: strconv.FormatUint(uint64(s.taskRetryAttempt), 10), }, } } diff --git a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go index c3e213e403..103980fab0 100644 --- a/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go +++ b/flyteplugins/go/tasks/plugins/array/k8s/subtask_exec_context_test.go @@ -36,13 +36,11 @@ func TestSubTaskExecutionContext(t *testing.T) { assert.Equal(t, storage.DataReference("/prefix/"), stCtx.OutputWriter().GetOutputPrefixPath()) assert.Equal(t, storage.DataReference("/raw_prefix/5/1"), stCtx.OutputWriter().GetRawOutputPrefix()) assert.Equal(t, - &tasklog.TemplateVarsByScheme{ - TaskExecution: tasklog.TemplateVars{ - {Regex: logTemplateRegexes.ParentName, Value: "notfound"}, - {Regex: logTemplateRegexes.ExecutionIndex, Value: "0"}, - {Regex: logTemplateRegexes.RetryAttempt, Value: "1"}, - {Regex: logTemplateRegexes.ParentRetryAttempt, Value: "0"}, - }, + []tasklog.TemplateVar{ + {Regex: logTemplateRegexes.ParentName, Value: "notfound"}, + {Regex: logTemplateRegexes.ExecutionIndex, Value: "0"}, + {Regex: logTemplateRegexes.RetryAttempt, Value: "1"}, + {Regex: logTemplateRegexes.ParentRetryAttempt, Value: "0"}, }, stCtx.TaskExecutionMetadata().GetTaskExecutionID().(SubTaskExecutionID).TemplateVarsByScheme(), ) diff --git a/flyteplugins/go/tasks/plugins/hive/test_helpers.go b/flyteplugins/go/tasks/plugins/hive/test_helpers.go index f8fe8d4148..b494c15870 100644 --- a/flyteplugins/go/tasks/plugins/hive/test_helpers.go +++ b/flyteplugins/go/tasks/plugins/hive/test_helpers.go @@ -66,8 +66,7 @@ func GetSingleHiveQueryTaskTemplate() idlCore.TaskTemplate { var resourceRequirements = &v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), }, } diff --git a/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go b/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go index 576740af93..e7d43a2256 100644 --- a/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/dask/dask_test.go @@ -179,6 +179,7 @@ func dummyDaskTaskContext(taskTemplate *core.TaskTemplate, resources *v1.Resourc }, }) tID.On("GetGeneratedName").Return(testTaskID) + tID.On("GetUniqueNodeID").Return("an-unique-id") taskExecutionMetadata := &mocks.TaskExecutionMetadata{} taskExecutionMetadata.OnGetTaskExecutionID().Return(tID) diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go index 1c33594997..d0e154835c 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/common/common_operator_test.go @@ -2,6 +2,7 @@ package common import ( "fmt" + "os" "testing" "time" @@ -18,6 +19,13 @@ import ( "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/core/mocks" ) +func TestMain(m *testing.M) { + // All tests should run assuming UTC timezone. + time.Local = time.UTC + code := m.Run() + os.Exit(code) +} + func TestExtractCurrentCondition(t *testing.T) { jobCreated := commonOp.JobCondition{ Type: commonOp.JobCreated, @@ -203,8 +211,9 @@ func TestGetLogsTemplateUri(t *testing.T) { taskCtx := dummyTaskContext() pytorchJobObjectMeta := meta_v1.ObjectMeta{ - Name: "test", - Namespace: "pytorch-namespace", + Name: "test", + Namespace: "pytorch-" + + "namespace", CreationTimestamp: meta_v1.Time{ Time: time.Date(2022, time.January, 1, 12, 0, 0, 0, time.UTC), }, @@ -309,6 +318,8 @@ func dummyTaskContext() pluginsCore.TaskExecutionContext { }, RetryAttempt: 0, }) + tID.OnGetGeneratedName().Return("some-acceptable-name") + tID.On("GetUniqueNodeID").Return("an-unique-id") taskExecutionMetadata := &mocks.TaskExecutionMetadata{} taskExecutionMetadata.OnGetTaskExecutionID().Return(tID) diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go index d009c7c887..29fe47a446 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/mpi/mpi_test.go @@ -148,6 +148,7 @@ func dummyMPITaskContext(taskTemplate *core.TaskTemplate, resources *corev1.Reso }, }) tID.OnGetGeneratedName().Return("some-acceptable-name") + tID.On("GetUniqueNodeID").Return("an-unique-id") overrides := &mocks.TaskOverrides{} overrides.OnGetResources().Return(resources) diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go index e0606b1020..0700644578 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/pytorch/pytorch_test.go @@ -154,6 +154,7 @@ func dummyPytorchTaskContext(taskTemplate *core.TaskTemplate, resources *corev1. }, }) tID.OnGetGeneratedName().Return("some-acceptable-name") + tID.On("GetUniqueNodeID").Return("an-unique-id") overrides := &mocks.TaskOverrides{} overrides.OnGetResources().Return(resources) diff --git a/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go b/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go index c3252183fc..2402bd4c5a 100644 --- a/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/kfoperators/tensorflow/tensorflow_test.go @@ -149,6 +149,7 @@ func dummyTensorFlowTaskContext(taskTemplate *core.TaskTemplate, resources *core }, }) tID.OnGetGeneratedName().Return("some-acceptable-name") + tID.On("GetUniqueNodeID").Return("an-unique-id") overrides := &mocks.TaskOverrides{} overrides.OnGetResources().Return(resources) diff --git a/flyteplugins/go/tasks/plugins/k8s/pod/container_test.go b/flyteplugins/go/tasks/plugins/k8s/pod/container_test.go index 9a70f906b9..e69f764549 100644 --- a/flyteplugins/go/tasks/plugins/k8s/pod/container_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/pod/container_test.go @@ -24,8 +24,7 @@ import ( var containerResourceRequirements = &v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), }, } @@ -206,9 +205,8 @@ func TestContainerTaskExecutor_BuildResource(t *testing.T) { assert.NotEmpty(t, j.Spec.Containers) assert.Equal(t, containerResourceRequirements.Limits[v1.ResourceCPU], j.Spec.Containers[0].Resources.Limits[v1.ResourceCPU]) - // TODO: Once configurable, test when setting storage is supported on the cluster vs not. - storageRes := j.Spec.Containers[0].Resources.Limits[v1.ResourceStorage] - assert.Equal(t, int64(0), (&storageRes).Value()) + ephemeralStorageRes := j.Spec.Containers[0].Resources.Limits[v1.ResourceEphemeralStorage] + assert.Equal(t, int64(0), (&ephemeralStorageRes).Value()) assert.Equal(t, command, j.Spec.Containers[0].Command) assert.Equal(t, []string{"test-data-reference"}, j.Spec.Containers[0].Args) diff --git a/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go b/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go index eae0ac98b7..f72d4eb1d7 100644 --- a/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go +++ b/flyteplugins/go/tasks/plugins/k8s/pod/plugin.go @@ -146,7 +146,7 @@ func (p plugin) GetTaskPhase(ctx context.Context, pluginContext k8s.PluginContex return p.GetTaskPhaseWithLogs(ctx, pluginContext, r, logPlugin, " (User)", nil) } -func (plugin) GetTaskPhaseWithLogs(ctx context.Context, pluginContext k8s.PluginContext, r client.Object, logPlugin tasklog.Plugin, logSuffix string, extraLogTemplateVarsByScheme *tasklog.TemplateVarsByScheme) (pluginsCore.PhaseInfo, error) { +func (plugin) GetTaskPhaseWithLogs(ctx context.Context, pluginContext k8s.PluginContext, r client.Object, logPlugin tasklog.Plugin, logSuffix string, extraLogTemplateVarsByScheme []tasklog.TemplateVar) (pluginsCore.PhaseInfo, error) { pluginState := k8s.PluginState{} _, err := pluginContext.PluginStateReader().Get(&pluginState) if err != nil { diff --git a/flyteplugins/go/tasks/plugins/k8s/pod/sidecar_test.go b/flyteplugins/go/tasks/plugins/k8s/pod/sidecar_test.go index 0c728780d9..a9848f3276 100644 --- a/flyteplugins/go/tasks/plugins/k8s/pod/sidecar_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/pod/sidecar_test.go @@ -84,6 +84,7 @@ func dummySidecarTaskMetadata(resources *v1.ResourceRequirements, extendedResour }, }) tID.On("GetGeneratedName").Return("my_project:my_domain:my_name") + tID.On("GetUniqueNodeID").Return("an-unique-id") taskMetadata.On("GetTaskExecutionID").Return(tID) to := &pluginsCoreMock.TaskOverrides{} @@ -230,15 +231,15 @@ func TestBuildSidecarResource_TaskType2(t *testing.T) { Effect: v1.TaintEffectNoSchedule, } - tolStorage := v1.Toleration{ - Key: "storage", + tolEphemeralStorage := v1.Toleration{ + Key: "ephemeral-storage", Value: "dedicated", Operator: v1.TolerationOpExists, Effect: v1.TaintEffectNoSchedule, } assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ ResourceTolerations: map[v1.ResourceName][]v1.Toleration{ - v1.ResourceStorage: {tolStorage}, + v1.ResourceStorage: {tolEphemeralStorage}, ResourceNvidiaGPU: {tolGPU}, }, DefaultCPURequest: resource.MustParse("1024m"), @@ -340,15 +341,15 @@ func TestBuildSidecarResource_TaskType1(t *testing.T) { Effect: v1.TaintEffectNoSchedule, } - tolStorage := v1.Toleration{ - Key: "storage", + tolEphemeralStorage := v1.Toleration{ + Key: "ephemeral-storage", Value: "dedicated", Operator: v1.TolerationOpExists, Effect: v1.TaintEffectNoSchedule, } assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ ResourceTolerations: map[v1.ResourceName][]v1.Toleration{ - v1.ResourceStorage: {tolStorage}, + v1.ResourceStorage: {tolEphemeralStorage}, ResourceNvidiaGPU: {tolGPU}, }, DefaultCPURequest: resource.MustParse("1024m"), @@ -457,15 +458,15 @@ func TestBuildSidecarResource(t *testing.T) { Effect: v1.TaintEffectNoSchedule, } - tolStorage := v1.Toleration{ - Key: "storage", + tolEphemeralStorage := v1.Toleration{ + Key: "ephemeral-storage", Value: "dedicated", Operator: v1.TolerationOpExists, Effect: v1.TaintEffectNoSchedule, } assert.NoError(t, config.SetK8sPluginConfig(&config.K8sPluginConfig{ ResourceTolerations: map[v1.ResourceName][]v1.Toleration{ - v1.ResourceStorage: {tolStorage}, + v1.ResourceStorage: {tolEphemeralStorage}, ResourceNvidiaGPU: {tolGPU}, }, DefaultCPURequest: resource.MustParse("1024m"), diff --git a/flyteplugins/go/tasks/plugins/k8s/ray/config.go b/flyteplugins/go/tasks/plugins/k8s/ray/config.go index 8601264edf..67ea4d2aeb 100644 --- a/flyteplugins/go/tasks/plugins/k8s/ray/config.go +++ b/flyteplugins/go/tasks/plugins/k8s/ray/config.go @@ -82,7 +82,7 @@ type Config struct { RemoteClusterConfig pluginmachinery.ClusterConfig `json:"remoteClusterConfig" pflag:"Configuration of remote K8s cluster for ray jobs"` Logs logs.LogConfig `json:"logs" pflag:"-,Log configuration for ray jobs"` LogsSidecar *v1.Container `json:"logsSidecar" pflag:"-,Sidecar to inject into head pods for capturing ray job logs"` - DashboardURLTemplate *tasklog.TemplateLogPlugin `json:"dashboardURLTemplate" pflag:",Template for URL of Ray dashboard running on a head node."` + DashboardURLTemplate *tasklog.TemplateLogPlugin `json:"dashboardURLTemplate" pflag:"-,Template for URL of Ray dashboard running on a head node."` Defaults DefaultConfig `json:"defaults" pflag:"-,Default configuration for ray jobs"` EnableUsageStats bool `json:"enableUsageStats" pflag:",Enable usage stats for ray jobs. These stats are submitted to usage-stats.ray.io per https://docs.ray.io/en/latest/cluster/usage-stats.html"` } diff --git a/flyteplugins/go/tasks/plugins/k8s/ray/ray.go b/flyteplugins/go/tasks/plugins/k8s/ray/ray.go index f6abb58c93..3f8d678ef5 100644 --- a/flyteplugins/go/tasks/plugins/k8s/ray/ray.go +++ b/flyteplugins/go/tasks/plugins/k8s/ray/ray.go @@ -457,13 +457,13 @@ func getEventInfoForRayJob(logConfig logs.LogConfig, pluginContext k8s.PluginCon taskExecID := pluginContext.TaskExecutionMetadata().GetTaskExecutionID() input := tasklog.Input{ - Namespace: rayJob.Namespace, - TaskExecutionID: taskExecID, - ExtraTemplateVarsByScheme: &tasklog.TemplateVarsByScheme{}, + Namespace: rayJob.Namespace, + TaskExecutionID: taskExecID, + ExtraTemplateVars: []tasklog.TemplateVar{}, } if rayJob.Status.JobId != "" { - input.ExtraTemplateVarsByScheme.Common = append( - input.ExtraTemplateVarsByScheme.Common, + input.ExtraTemplateVars = append( + input.ExtraTemplateVars, tasklog.TemplateVar{ Regex: logTemplateRegexes.RayJobID, Value: rayJob.Status.JobId, @@ -471,8 +471,8 @@ func getEventInfoForRayJob(logConfig logs.LogConfig, pluginContext k8s.PluginCon ) } if rayJob.Status.RayClusterName != "" { - input.ExtraTemplateVarsByScheme.Common = append( - input.ExtraTemplateVarsByScheme.Common, + input.ExtraTemplateVars = append( + input.ExtraTemplateVars, tasklog.TemplateVar{ Regex: logTemplateRegexes.RayClusterName, Value: rayJob.Status.RayClusterName, diff --git a/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go b/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go index b171ae9aa8..beab938768 100644 --- a/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/ray/ray_test.go @@ -738,7 +738,6 @@ func TestGetEventInfo_LogTemplates(t *testing.T) { TemplateURIs: []tasklog.TemplateURI{ "http://test/projects/{{ .executionProject }}/domains/{{ .executionDomain }}/executions/{{ .executionName }}/nodeId/{{ .nodeID }}/taskId/{{ .taskID }}/attempt/{{ .taskRetryAttempt }}", }, - Scheme: tasklog.TemplateSchemeTaskExecution, }, expectedTaskLogs: []*core.TaskLog{ { @@ -823,7 +822,6 @@ func TestGetEventInfo_DashboardURL(t *testing.T) { dashboardURLTemplate: tasklog.TemplateLogPlugin{ DisplayName: "Ray Dashboard", TemplateURIs: []tasklog.TemplateURI{"http://test/{{.generatedName}}"}, - Scheme: tasklog.TemplateSchemeTaskExecution, }, expectedTaskLogs: []*core.TaskLog{ { diff --git a/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go b/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go index b07ab0ef33..264f9514e9 100644 --- a/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go +++ b/flyteplugins/go/tasks/plugins/k8s/spark/spark_test.go @@ -379,6 +379,7 @@ func dummySparkTaskContext(taskTemplate *core.TaskTemplate, interruptible bool) }, }) tID.On("GetGeneratedName").Return("some-acceptable-name") + tID.On("GetUniqueNodeID").Return("an-unique-id") overrides := &mocks.TaskOverrides{} overrides.On("GetResources").Return(&corev1.ResourceRequirements{}) diff --git a/flyteplugins/go/tasks/plugins/presto/helpers_test.go b/flyteplugins/go/tasks/plugins/presto/helpers_test.go index 60fc1cc050..fb421b444f 100644 --- a/flyteplugins/go/tasks/plugins/presto/helpers_test.go +++ b/flyteplugins/go/tasks/plugins/presto/helpers_test.go @@ -42,8 +42,8 @@ func GetPrestoQueryTaskTemplate() idlCore.TaskTemplate { var resourceRequirements = &v1.ResourceRequirements{ Limits: v1.ResourceList{ - v1.ResourceCPU: resource.MustParse("1024m"), - v1.ResourceStorage: resource.MustParse("100M"), + v1.ResourceCPU: resource.MustParse("1024m"), + v1.ResourceEphemeralStorage: resource.MustParse("100M"), }, } diff --git a/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go b/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go index 827af0d907..a2b2135591 100644 --- a/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go +++ b/flyteplugins/go/tasks/plugins/webapi/agent/integration_test.go @@ -40,9 +40,25 @@ type MockPlugin struct { type MockAsyncTask struct { } +func (m *MockAsyncTask) GetTaskMetrics(ctx context.Context, in *admin.GetTaskMetricsRequest, opts ...grpc.CallOption) (*admin.GetTaskMetricsResponse, error) { + panic("not implemented") +} + +func (m *MockAsyncTask) GetTaskLogs(ctx context.Context, in *admin.GetTaskLogsRequest, opts ...grpc.CallOption) (*admin.GetTaskLogsResponse, error) { + panic("not implemented") +} + type MockSyncTask struct { } +func (m *MockSyncTask) GetTaskMetrics(ctx context.Context, in *admin.GetTaskMetricsRequest, opts ...grpc.CallOption) (*admin.GetTaskMetricsResponse, error) { + panic("not implemented") +} + +func (m *MockSyncTask) GetTaskLogs(ctx context.Context, in *admin.GetTaskLogsRequest, opts ...grpc.CallOption) (*admin.GetTaskLogsResponse, error) { + panic("not implemented") +} + func (m *MockAsyncTask) CreateTask(_ context.Context, createTaskRequest *admin.CreateTaskRequest, _ ...grpc.CallOption) (*admin.CreateTaskResponse, error) { expectedArgs := []string{"pyflyte-fast-execute", "--output-prefix", "fake://bucket/prefix/nhv"} if slices.Equal(createTaskRequest.Template.GetContainer().Args, expectedArgs) { diff --git a/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go b/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go index 73a98782bc..7a9f92caaa 100644 --- a/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/agent/plugin.go @@ -42,7 +42,8 @@ type Plugin struct { } type ResourceWrapper struct { - State admin.State + Phase flyteIdl.TaskExecution_Phase + State admin.State // This is deprecated. Outputs *flyteIdl.LiteralMap Message string LogLinks []*flyteIdl.TaskLog @@ -121,6 +122,7 @@ func (p Plugin) Create(ctx context.Context, taskCtx webapi.TaskExecutionContextR logger.Infof(ctx, "Agent is executing a synchronous task.") return nil, ResourceWrapper{ + Phase: res.GetResource().Phase, State: res.GetResource().State, Outputs: res.GetResource().Outputs, Message: res.GetResource().Message, @@ -159,6 +161,7 @@ func (p Plugin) Get(ctx context.Context, taskCtx webapi.GetContext) (latest weba } return ResourceWrapper{ + Phase: res.Resource.Phase, State: res.Resource.State, Outputs: res.Resource.Outputs, Message: res.Resource.Message, @@ -190,6 +193,34 @@ func (p Plugin) Status(ctx context.Context, taskCtx webapi.StatusContext) (phase resource := taskCtx.Resource().(ResourceWrapper) taskInfo := &core.TaskInfo{Logs: resource.LogLinks} + switch resource.Phase { + case flyteIdl.TaskExecution_QUEUED: + return core.PhaseInfoQueuedWithTaskInfo(core.DefaultPhaseVersion, resource.Message, taskInfo), nil + case flyteIdl.TaskExecution_WAITING_FOR_RESOURCES: + return core.PhaseInfoWaitingForResourcesInfo(time.Now(), core.DefaultPhaseVersion, resource.Message, taskInfo), nil + case flyteIdl.TaskExecution_INITIALIZING: + return core.PhaseInfoInitializing(time.Now(), core.DefaultPhaseVersion, resource.Message, taskInfo), nil + case flyteIdl.TaskExecution_RUNNING: + return core.PhaseInfoRunning(core.DefaultPhaseVersion, taskInfo), nil + case flyteIdl.TaskExecution_SUCCEEDED: + err = writeOutput(ctx, taskCtx, resource) + if err != nil { + logger.Errorf(ctx, "Failed to write output with err %s", err.Error()) + return core.PhaseInfoUndefined, err + } + return core.PhaseInfoSuccess(taskInfo), nil + case flyteIdl.TaskExecution_ABORTED: + return core.PhaseInfoFailure(pluginErrors.TaskFailedWithError, "failed to run the job with aborted phase", taskInfo), nil + case flyteIdl.TaskExecution_FAILED: + return core.PhaseInfoFailure(pluginErrors.TaskFailedWithError, "failed to run the job", taskInfo), nil + } + + // The default phase is undefined. + if resource.Phase != flyteIdl.TaskExecution_UNDEFINED { + return core.PhaseInfoUndefined, pluginErrors.Errorf(core.SystemErrorCode, "unknown execution phase [%v].", resource.Phase) + } + + // If the phase is undefined, we will use state to determine the phase. switch resource.State { case admin.State_PENDING: return core.PhaseInfoInitializing(time.Now(), core.DefaultPhaseVersion, resource.Message, taskInfo), nil @@ -207,7 +238,7 @@ func (p Plugin) Status(ctx context.Context, taskCtx webapi.StatusContext) (phase } return core.PhaseInfoSuccess(taskInfo), nil } - return core.PhaseInfoUndefined, pluginErrors.Errorf(core.SystemErrorCode, "unknown execution phase [%v].", resource.State) + return core.PhaseInfoUndefined, pluginErrors.Errorf(core.SystemErrorCode, "unknown execution state [%v].", resource.State) } func writeOutput(ctx context.Context, taskCtx webapi.StatusContext, resource ResourceWrapper) error { @@ -366,10 +397,10 @@ func initializeAgentRegistry(cfg *Config, connectionCache map[*Agent]*grpc.Clien } if !ok { - return nil, fmt.Errorf("failed to list agent with a non-gRPC error : [%v]", err) + return nil, fmt.Errorf("failed to list agent: [%v] with a non-gRPC error: [%v]", agentDeployment, err) } - return nil, fmt.Errorf("failed to list agent with error: [%v]", err) + return nil, fmt.Errorf("failed to list agent: [%v] with error: [%v]", agentDeployment, err) } agents := res.GetAgents() diff --git a/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go b/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go index b9fd7e1b35..d5b5fae5fe 100644 --- a/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go +++ b/flyteplugins/go/tasks/plugins/webapi/agent/plugin_test.go @@ -13,6 +13,7 @@ import ( "github.com/flyteorg/flyte/flyteidl/clients/go/coreutils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin" + flyteIdl "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" flyteIdlCore "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/service" "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery" @@ -219,6 +220,118 @@ func TestPlugin(t *testing.T) { assert.Error(t, err) assert.Equal(t, pluginsCore.PhaseUndefined, phase.Phase()) }) + + t.Run("test TaskExecution_UNDEFINED Status", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdl.TaskExecution_UNDEFINED, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhaseRetryableFailure, phase.Phase()) + }) + + t.Run("test TaskExecution_QUEUED Status", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdl.TaskExecution_QUEUED, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhaseQueued, phase.Phase()) + }) + + t.Run("test TaskExecution_WAITING_FOR_RESOURCES Status", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdl.TaskExecution_WAITING_FOR_RESOURCES, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhaseWaitingForResources, phase.Phase()) + }) + + t.Run("test TaskExecution_INITIALIZING Status", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdl.TaskExecution_INITIALIZING, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhaseInitializing, phase.Phase()) + }) + + t.Run("test TaskExecution_RUNNING Status", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdl.TaskExecution_RUNNING, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhaseRunning, phase.Phase()) + }) + + t.Run("test TaskExecution_ABORTED Status", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdl.TaskExecution_ABORTED, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhasePermanentFailure, phase.Phase()) + }) + + t.Run("test TaskExecution_FAILED Status", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + Phase: flyteIdl.TaskExecution_FAILED, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.NoError(t, err) + assert.Equal(t, pluginsCore.PhasePermanentFailure, phase.Phase()) + }) + + t.Run("test UNDEFINED Phase", func(t *testing.T) { + taskContext := new(webapiPlugin.StatusContext) + taskContext.On("Resource").Return(ResourceWrapper{ + State: 8, + Outputs: nil, + Message: "", + LogLinks: []*flyteIdlCore.TaskLog{{Uri: "http://localhost:3000/log", Name: "Log Link"}}, + }) + + phase, err := plugin.Status(context.Background(), taskContext) + assert.Error(t, err) + assert.Equal(t, pluginsCore.PhaseUndefined, phase.Phase()) + }) } func TestInitializeAgentRegistry(t *testing.T) { diff --git a/flyteplugins/go/tasks/plugins/webapi/athena/plugin.go b/flyteplugins/go/tasks/plugins/webapi/athena/plugin.go index 826a12e45f..b485589736 100644 --- a/flyteplugins/go/tasks/plugins/webapi/athena/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/athena/plugin.go @@ -128,7 +128,7 @@ func (p Plugin) Delete(ctx context.Context, tCtx webapi.DeleteContext) error { return err } - logger.Info(ctx, "Deleted query execution [%v]", resp) + logger.Infof(ctx, "Deleted query execution [%v]", resp) return nil } diff --git a/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go b/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go index f08d1d15ea..6661550530 100644 --- a/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/bigquery/plugin.go @@ -256,7 +256,7 @@ func (p Plugin) Delete(ctx context.Context, taskCtx webapi.DeleteContext) error return err } - logger.Info(ctx, "Cancelled job [%s]", formatJobReference(resourceMeta.JobReference)) + logger.Infof(ctx, "Cancelled job [%s]", formatJobReference(resourceMeta.JobReference)) return nil } diff --git a/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go b/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go index 3bd03135dc..3e9b37ea93 100644 --- a/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/databricks/plugin.go @@ -203,7 +203,7 @@ func (p Plugin) Delete(ctx context.Context, taskCtx webapi.DeleteContext) error return err } defer resp.Body.Close() - logger.Info(ctx, "Deleted query execution [%v]", resp) + logger.Infof(ctx, "Deleted query execution [%v]", resp) return nil } diff --git a/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go b/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go index 33334b4003..02bf947fd4 100644 --- a/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go +++ b/flyteplugins/go/tasks/plugins/webapi/snowflake/plugin.go @@ -179,7 +179,7 @@ func (p Plugin) Delete(ctx context.Context, taskCtx webapi.DeleteContext) error return err } defer resp.Body.Close() - logger.Info(ctx, "Deleted query execution [%v]", resp) + logger.Infof(ctx, "Deleted query execution [%v]", resp) return nil } diff --git a/flyteplugins/pull_request_template.md b/flyteplugins/pull_request_template.md deleted file mode 100644 index 9cdab99b46..0000000000 --- a/flyteplugins/pull_request_template.md +++ /dev/null @@ -1,35 +0,0 @@ -## _Read then delete this section_ - -_- Make sure to use a concise title for the pull-request._ - -_- Use #patch, #minor or #major in the pull-request title to bump the corresponding version. Otherwise, the patch version -will be bumped. [More details](https://github.com/marketplace/actions/github-tag-bump)_ - -# TL;DR -_Please replace this text with a description of what this PR accomplishes._ - -## Type - - [ ] Bug Fix - - [ ] Feature - - [ ] Plugin - -## Are all requirements met? - - - [ ] Code completed - - [ ] Smoke tested - - [ ] Unit tests added - - [ ] Code documentation added - - [ ] Any pending items have an associated Issue - -## Complete description - _How did you fix the bug, make the feature etc. Link to any design docs etc_ - -## Tracking Issue -_Remove the '*fixes*' keyword if there will be multiple PRs to fix the linked issue_ - -fixes https://github.com/flyteorg/flyte/issues/ - -## Follow-up issue -_NA_ -OR -_https://github.com/flyteorg/flyte/issues/_ diff --git a/flytepropeller/Dockerfile b/flytepropeller/Dockerfile deleted file mode 100644 index 84ae3b8585..0000000000 --- a/flytepropeller/Dockerfile +++ /dev/null @@ -1,42 +0,0 @@ -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'LYFT/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/lyft/boilerplate/blob/master/Readme.rst - -FROM --platform=${BUILDPLATFORM} golang:1.19-alpine3.16 as builder - -ARG TARGETARCH -ENV GOARCH "${TARGETARCH}" -ENV GOOS linux - -RUN apk add git openssh-client make curl - -# COPY only the go mod files for efficient caching -COPY go.mod go.sum /go/src/github.com/flyteorg/flytepropeller/ -WORKDIR /go/src/github.com/flyteorg/flytepropeller - -# Pull dependencies -RUN go mod download - -# COPY the rest of the source code -COPY . /go/src/github.com/flyteorg/flytepropeller/ - -# This 'linux_compile' target should compile binaries to the /artifacts directory -# The main entrypoint should be compiled to /artifacts/flytepropeller -RUN make linux_compile - -# update the PATH to include the /artifacts directory -ENV PATH="/artifacts:${PATH}" - -# This will eventually move to centurylink/ca-certs:latest for minimum possible image size -FROM alpine:3.16 -LABEL org.opencontainers.image.source https://github.com/flyteorg/flytepropeller - -COPY --from=builder /artifacts /bin - -RUN apk --update add ca-certificates - -RUN addgroup -S flyte && adduser -S flyte -G flyte -USER flyte - -CMD ["flytepropeller"] diff --git a/flytepropeller/cmd/controller/cmd/root.go b/flytepropeller/cmd/controller/cmd/root.go index a3db18833c..e1069650ad 100644 --- a/flytepropeller/cmd/controller/cmd/root.go +++ b/flytepropeller/cmd/controller/cmd/root.go @@ -12,16 +12,15 @@ import ( "github.com/spf13/cobra" "github.com/spf13/pflag" "golang.org/x/sync/errgroup" - "k8s.io/client-go/rest" "k8s.io/klog" "sigs.k8s.io/controller-runtime/pkg/cache" - "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/manager" "sigs.k8s.io/controller-runtime/pkg/metrics" metricsserver "sigs.k8s.io/controller-runtime/pkg/metrics/server" "github.com/flyteorg/flyte/flytepropeller/pkg/controller" config2 "github.com/flyteorg/flyte/flytepropeller/pkg/controller/config" + "github.com/flyteorg/flyte/flytepropeller/pkg/controller/executors" "github.com/flyteorg/flyte/flytepropeller/pkg/signals" "github.com/flyteorg/flyte/flytestdlib/config" "github.com/flyteorg/flyte/flytestdlib/config/viper" @@ -144,22 +143,8 @@ func executeRootCmd(baseCtx context.Context, cfg *config2.Config) error { SyncPeriod: &cfg.DownstreamEval.Duration, DefaultNamespaces: namespaceConfigs, }, - NewCache: func(config *rest.Config, options cache.Options) (cache.Cache, error) { - k8sCache, err := cache.New(config, options) - if err != nil { - return k8sCache, err - } - - return otelutils.WrapK8sCache(k8sCache), nil - }, - NewClient: func(config *rest.Config, options client.Options) (client.Client, error) { - k8sClient, err := client.New(config, options) - if err != nil { - return k8sClient, err - } - - return otelutils.WrapK8sClient(k8sClient), nil - }, + NewCache: executors.NewCache, + NewClient: executors.BuildNewClientFunc(propellerScope), Metrics: metricsserver.Options{ // Disable metrics serving BindAddress: "0", diff --git a/flytepropeller/cmd/controller/cmd/webhook.go b/flytepropeller/cmd/controller/cmd/webhook.go index f34f21d12c..ae538385fb 100644 --- a/flytepropeller/cmd/controller/cmd/webhook.go +++ b/flytepropeller/cmd/controller/cmd/webhook.go @@ -5,9 +5,7 @@ import ( "github.com/spf13/cobra" "golang.org/x/sync/errgroup" - "k8s.io/client-go/rest" "sigs.k8s.io/controller-runtime/pkg/cache" - "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/manager" metricsserver "sigs.k8s.io/controller-runtime/pkg/metrics/server" ctrlWebhook "sigs.k8s.io/controller-runtime/pkg/webhook" @@ -110,9 +108,8 @@ func runWebhook(origContext context.Context, propellerCfg *config.Config, cfg *w SyncPeriod: &propellerCfg.DownstreamEval.Duration, DefaultNamespaces: namespaceConfigs, }, - NewClient: func(config *rest.Config, options client.Options) (client.Client, error) { - return executors.NewFallbackClientBuilder(webhookScope).Build(nil, config, options) - }, + NewCache: executors.NewCache, + NewClient: executors.BuildNewClientFunc(webhookScope), Metrics: metricsserver.Options{ // Disable metrics serving BindAddress: "0", diff --git a/flytepropeller/cmd/kubectl-flyte/cmd/root.go b/flytepropeller/cmd/kubectl-flyte/cmd/root.go index 4bfabe47b5..f6e2d92110 100644 --- a/flytepropeller/cmd/kubectl-flyte/cmd/root.go +++ b/flytepropeller/cmd/kubectl-flyte/cmd/root.go @@ -24,7 +24,7 @@ func init() { pflag.CommandLine.AddGoFlagSet(flag.CommandLine) err := flag.CommandLine.Parse([]string{}) if err != nil { - logger.Error(context.TODO(), "Error in initializing: %v", err) + logger.Errorf(context.TODO(), "Error in initializing: %v", err) os.Exit(-1) } } diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go index b3d744bd77..6590aaa04a 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/iface.go @@ -318,7 +318,7 @@ type MutableNodeStatus interface { SetOutputDir(d DataReference) SetParentNodeID(n *NodeID) SetParentTaskID(t *core.TaskExecutionIdentifier) - UpdatePhase(phase NodePhase, occurredAt metav1.Time, reason string, err *core.ExecutionError) + UpdatePhase(phase NodePhase, occurredAt metav1.Time, reason string, enableCRDebugMetadata bool, err *core.ExecutionError) IncrementAttempts() uint32 IncrementSystemFailures() uint32 SetCached() diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableNodeStatus.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableNodeStatus.go index cb447e06fc..cdf3f1b6ab 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableNodeStatus.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/ExecutableNodeStatus.go @@ -1187,9 +1187,9 @@ func (_m *ExecutableNodeStatus) SetParentTaskID(t *core.TaskExecutionIdentifier) _m.Called(t) } -// UpdatePhase provides a mock function with given fields: phase, occurredAt, reason, err -func (_m *ExecutableNodeStatus) UpdatePhase(phase v1alpha1.NodePhase, occurredAt v1.Time, reason string, err *core.ExecutionError) { - _m.Called(phase, occurredAt, reason, err) +// UpdatePhase provides a mock function with given fields: phase, occurredAt, reason, enableCRDebugMetadata, err +func (_m *ExecutableNodeStatus) UpdatePhase(phase v1alpha1.NodePhase, occurredAt v1.Time, reason string, enableCRDebugMetadata bool, err *core.ExecutionError) { + _m.Called(phase, occurredAt, reason, enableCRDebugMetadata, err) } // VisitNodeStatuses provides a mock function with given fields: visitor diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableNodeStatus.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableNodeStatus.go index b8c97f6be7..3f103bc2ec 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableNodeStatus.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/mocks/MutableNodeStatus.go @@ -587,7 +587,7 @@ func (_m *MutableNodeStatus) SetParentTaskID(t *core.TaskExecutionIdentifier) { _m.Called(t) } -// UpdatePhase provides a mock function with given fields: phase, occurredAt, reason, err -func (_m *MutableNodeStatus) UpdatePhase(phase v1alpha1.NodePhase, occurredAt v1.Time, reason string, err *core.ExecutionError) { - _m.Called(phase, occurredAt, reason, err) +// UpdatePhase provides a mock function with given fields: phase, occurredAt, reason, enableCRDebugMetadata, err +func (_m *MutableNodeStatus) UpdatePhase(phase v1alpha1.NodePhase, occurredAt v1.Time, reason string, enableCRDebugMetadata bool, err *core.ExecutionError) { + _m.Called(phase, occurredAt, reason, enableCRDebugMetadata, err) } diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go index 9fb891d01a..aab034224d 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status.go @@ -594,7 +594,7 @@ func (in *NodeStatus) GetOrCreateArrayNodeStatus() MutableArrayNodeStatus { return in.ArrayNodeStatus } -func (in *NodeStatus) UpdatePhase(p NodePhase, occurredAt metav1.Time, reason string, err *core.ExecutionError) { +func (in *NodeStatus) UpdatePhase(p NodePhase, occurredAt metav1.Time, reason string, enableCRDebugMetadata bool, err *core.ExecutionError) { if in.Phase == p && in.Message == reason { // We will not update the phase multiple times. This prevents the comparison from returning false positive return @@ -607,6 +607,7 @@ func (in *NodeStatus) UpdatePhase(p NodePhase, occurredAt metav1.Time, reason st } n := occurredAt + in.LastUpdatedAt = &n if occurredAt.IsZero() { n = metav1.Now() } @@ -625,35 +626,31 @@ func (in *NodeStatus) UpdatePhase(p NodePhase, occurredAt metav1.Time, reason st in.LastAttemptStartedAt = &n } } else if IsPhaseTerminal(p) { - // If we are in terminal phase then we will clear out all our fields as they are not required anymore - // Only thing required is stopped at and lastupdatedat time if in.StoppedAt == nil { in.StoppedAt = &n } - if in.StartedAt == nil { - in.StartedAt = &n - } - if in.LastAttemptStartedAt == nil { - in.LastAttemptStartedAt = &n + if p == NodePhaseSucceeded || p == NodePhaseSkipped || !enableCRDebugMetadata { + // Clear most status related fields after reaching a terminal state. This keeps the CR state small to avoid + // etcd size limits. Importantly we keep Phase, StoppedAt and Error which will be needed further. + in.Message = "" + in.QueuedAt = nil + in.StartedAt = nil + in.LastUpdatedAt = nil + in.LastAttemptStartedAt = nil + in.DynamicNodeStatus = nil + in.BranchStatus = nil + in.SubNodeStatus = nil + in.TaskNodeStatus = nil + in.WorkflowNodeStatus = nil + } else { + if in.StartedAt == nil { + in.StartedAt = &n + } + if in.LastAttemptStartedAt == nil { + in.LastAttemptStartedAt = &n + } } } - in.LastUpdatedAt = &n - - // For cases in which the node is either Succeeded or Skipped we clear most fields from the status - // except for StoppedAt and Phase. StoppedAt is used to calculate transition latency between this node and - // any downstream nodes and Phase is required for propeller to continue to downstream nodes. - if p == NodePhaseSucceeded || p == NodePhaseSkipped { - in.Message = "" - in.QueuedAt = nil - in.StartedAt = nil - in.LastAttemptStartedAt = nil - in.DynamicNodeStatus = nil - in.BranchStatus = nil - in.SubNodeStatus = nil - in.TaskNodeStatus = nil - in.WorkflowNodeStatus = nil - in.LastUpdatedAt = nil - } in.SetDirty() } diff --git a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status_test.go b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status_test.go index 45c299687a..0278d62f55 100644 --- a/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status_test.go +++ b/flytepropeller/pkg/apis/flyteworkflow/v1alpha1/node_status_test.go @@ -259,71 +259,202 @@ func TestNodeStatus_UpdatePhase(t *testing.T) { n := metav1.NewTime(time.Now()) const queued = "queued" - t.Run("identical-phase", func(t *testing.T) { - p := NodePhaseQueued - ns := NodeStatus{ - Phase: p, - Message: queued, - } - msg := queued - ns.UpdatePhase(p, n, msg, nil) - assert.Nil(t, ns.QueuedAt) - }) + const success = "success" + for _, enableCRDebugMetadata := range []bool{false, true} { + t.Run("identical-phase", func(t *testing.T) { + p := NodePhaseQueued + ns := NodeStatus{ + Phase: p, + Message: queued, + } + msg := queued + ns.UpdatePhase(p, n, msg, enableCRDebugMetadata, nil) + assert.Nil(t, ns.QueuedAt) + }) - t.Run("zero", func(t *testing.T) { - p := NodePhaseQueued - ns := NodeStatus{} - msg := queued - ns.UpdatePhase(p, metav1.NewTime(time.Time{}), msg, nil) - assert.NotNil(t, ns.QueuedAt) - }) + t.Run("zero", func(t *testing.T) { + p := NodePhaseQueued + ns := NodeStatus{} + msg := queued + ns.UpdatePhase(p, metav1.NewTime(time.Time{}), msg, enableCRDebugMetadata, nil) + assert.NotNil(t, ns.QueuedAt) + }) - t.Run("non-terminal", func(t *testing.T) { - ns := NodeStatus{} - p := NodePhaseQueued - msg := queued - ns.UpdatePhase(p, n, msg, nil) + t.Run("non-terminal", func(t *testing.T) { + ns := NodeStatus{} + p := NodePhaseQueued + msg := queued + ns.UpdatePhase(p, n, msg, enableCRDebugMetadata, nil) + + assert.Equal(t, *ns.LastUpdatedAt, n) + assert.Equal(t, *ns.QueuedAt, n) + assert.Nil(t, ns.LastAttemptStartedAt) + assert.Nil(t, ns.StartedAt) + assert.Nil(t, ns.StoppedAt) + assert.Equal(t, p, ns.Phase) + assert.Equal(t, msg, ns.Message) + assert.Nil(t, ns.Error) + }) - assert.Equal(t, *ns.LastUpdatedAt, n) - assert.Equal(t, *ns.QueuedAt, n) - assert.Nil(t, ns.LastAttemptStartedAt) - assert.Nil(t, ns.StartedAt) - assert.Nil(t, ns.StoppedAt) - assert.Equal(t, p, ns.Phase) - assert.Equal(t, msg, ns.Message) - assert.Nil(t, ns.Error) - }) + t.Run("non-terminal-running", func(t *testing.T) { + ns := NodeStatus{} + p := NodePhaseRunning + msg := "running" + ns.UpdatePhase(p, n, msg, enableCRDebugMetadata, nil) + + assert.Equal(t, *ns.LastUpdatedAt, n) + assert.Nil(t, ns.QueuedAt) + assert.Equal(t, *ns.LastAttemptStartedAt, n) + assert.Equal(t, *ns.StartedAt, n) + assert.Nil(t, ns.StoppedAt) + assert.Equal(t, p, ns.Phase) + assert.Equal(t, msg, ns.Message) + assert.Nil(t, ns.Error) + }) + + t.Run("non-terminal-timing-out", func(t *testing.T) { + ns := NodeStatus{} + p := NodePhaseTimingOut + msg := "timing-out" + ns.UpdatePhase(p, n, msg, enableCRDebugMetadata, nil) + + assert.Equal(t, *ns.LastUpdatedAt, n) + assert.Nil(t, ns.QueuedAt) + assert.Nil(t, ns.LastAttemptStartedAt) + assert.Nil(t, ns.StartedAt) + assert.Nil(t, ns.StoppedAt) + assert.Equal(t, p, ns.Phase) + assert.Equal(t, msg, ns.Message) + assert.Nil(t, ns.Error) + }) - t.Run("non-terminal-running", func(t *testing.T) { + t.Run("terminal-success", func(t *testing.T) { + ns := NodeStatus{} + p := NodePhaseSucceeded + msg := success + ns.UpdatePhase(p, n, msg, enableCRDebugMetadata, nil) + + assert.Nil(t, ns.LastUpdatedAt) + assert.Nil(t, ns.QueuedAt) + assert.Nil(t, ns.LastAttemptStartedAt) + assert.Nil(t, ns.StartedAt) + assert.Equal(t, *ns.StoppedAt, n) + assert.Equal(t, p, ns.Phase) + assert.Empty(t, ns.Message) + assert.Nil(t, ns.Error) + }) + + t.Run("terminal-skipped", func(t *testing.T) { + ns := NodeStatus{} + p := NodePhaseSucceeded + msg := success + ns.UpdatePhase(p, n, msg, enableCRDebugMetadata, nil) + + assert.Nil(t, ns.LastUpdatedAt) + assert.Nil(t, ns.QueuedAt) + assert.Nil(t, ns.LastAttemptStartedAt) + assert.Nil(t, ns.StartedAt) + assert.Equal(t, *ns.StoppedAt, n) + assert.Equal(t, p, ns.Phase) + assert.Empty(t, ns.Message) + assert.Nil(t, ns.Error) + }) + + t.Run("terminal-success-preset", func(t *testing.T) { + ns := NodeStatus{ + QueuedAt: &n, + StartedAt: &n, + LastUpdatedAt: &n, + LastAttemptStartedAt: &n, + WorkflowNodeStatus: &WorkflowNodeStatus{}, + BranchStatus: &BranchNodeStatus{}, + DynamicNodeStatus: &DynamicNodeStatus{}, + TaskNodeStatus: &TaskNodeStatus{}, + SubNodeStatus: map[NodeID]*NodeStatus{}, + } + p := NodePhaseSucceeded + msg := success + ns.UpdatePhase(p, n, msg, enableCRDebugMetadata, nil) + + assert.Nil(t, ns.LastUpdatedAt) + assert.Nil(t, ns.QueuedAt) + assert.Nil(t, ns.LastAttemptStartedAt) + assert.Nil(t, ns.StartedAt) + assert.Equal(t, *ns.StoppedAt, n) + assert.Equal(t, p, ns.Phase) + assert.Empty(t, ns.Message) + assert.Nil(t, ns.Error) + assert.Nil(t, ns.SubNodeStatus) + assert.Nil(t, ns.DynamicNodeStatus) + assert.Nil(t, ns.WorkflowNodeStatus) + assert.Nil(t, ns.BranchStatus) + assert.Nil(t, ns.TaskNodeStatus) + }) + + t.Run("non-terminal-preset", func(t *testing.T) { + ns := NodeStatus{ + QueuedAt: &n, + StartedAt: &n, + LastUpdatedAt: &n, + LastAttemptStartedAt: &n, + WorkflowNodeStatus: &WorkflowNodeStatus{}, + BranchStatus: &BranchNodeStatus{}, + DynamicNodeStatus: &DynamicNodeStatus{}, + TaskNodeStatus: &TaskNodeStatus{}, + SubNodeStatus: map[NodeID]*NodeStatus{}, + } + n2 := metav1.NewTime(time.Now()) + p := NodePhaseRunning + msg := "running" + ns.UpdatePhase(p, n2, msg, enableCRDebugMetadata, nil) + + assert.Equal(t, *ns.LastUpdatedAt, n2) + assert.Equal(t, *ns.QueuedAt, n) + assert.Equal(t, *ns.LastAttemptStartedAt, n) + assert.Equal(t, *ns.StartedAt, n) + assert.Nil(t, ns.StoppedAt) + assert.Equal(t, p, ns.Phase) + assert.Equal(t, msg, ns.Message) + assert.Nil(t, ns.Error) + assert.NotNil(t, ns.SubNodeStatus) + assert.NotNil(t, ns.DynamicNodeStatus) + assert.NotNil(t, ns.WorkflowNodeStatus) + assert.NotNil(t, ns.BranchStatus) + assert.NotNil(t, ns.TaskNodeStatus) + }) + } + + t.Run("terminal-fail", func(t *testing.T) { ns := NodeStatus{} - p := NodePhaseRunning - msg := "running" - ns.UpdatePhase(p, n, msg, nil) + p := NodePhaseFailed + msg := "failed" + err := &core.ExecutionError{} + ns.UpdatePhase(p, n, msg, true, err) assert.Equal(t, *ns.LastUpdatedAt, n) assert.Nil(t, ns.QueuedAt) assert.Equal(t, *ns.LastAttemptStartedAt, n) assert.Equal(t, *ns.StartedAt, n) - assert.Nil(t, ns.StoppedAt) + assert.Equal(t, *ns.StoppedAt, n) assert.Equal(t, p, ns.Phase) assert.Equal(t, msg, ns.Message) - assert.Nil(t, ns.Error) + assert.Equal(t, ns.Error.ExecutionError, err) }) - t.Run("terminal-fail", func(t *testing.T) { + t.Run("terminal-fail-clear-state-on-any-termination", func(t *testing.T) { ns := NodeStatus{} p := NodePhaseFailed msg := "failed" err := &core.ExecutionError{} - ns.UpdatePhase(p, n, msg, err) + ns.UpdatePhase(p, n, msg, false, err) - assert.Equal(t, *ns.LastUpdatedAt, n) + assert.Nil(t, ns.LastUpdatedAt) assert.Nil(t, ns.QueuedAt) - assert.Equal(t, *ns.LastAttemptStartedAt, n) - assert.Equal(t, *ns.StartedAt, n) + assert.Nil(t, ns.LastAttemptStartedAt) + assert.Nil(t, ns.StartedAt) assert.Equal(t, *ns.StoppedAt, n) assert.Equal(t, p, ns.Phase) - assert.Equal(t, msg, ns.Message) + assert.Equal(t, ns.Message, "") assert.Equal(t, ns.Error.ExecutionError, err) }) @@ -332,7 +463,7 @@ func TestNodeStatus_UpdatePhase(t *testing.T) { p := NodePhaseTimedOut msg := "tm" err := &core.ExecutionError{} - ns.UpdatePhase(p, n, msg, err) + ns.UpdatePhase(p, n, msg, true, err) assert.Equal(t, *ns.LastUpdatedAt, n) assert.Nil(t, ns.QueuedAt) @@ -344,54 +475,12 @@ func TestNodeStatus_UpdatePhase(t *testing.T) { assert.Equal(t, ns.Error.ExecutionError, err) }) - const success = "success" - t.Run("terminal-success", func(t *testing.T) { - ns := NodeStatus{} - p := NodePhaseSucceeded - msg := success - ns.UpdatePhase(p, n, msg, nil) - - assert.Nil(t, ns.LastUpdatedAt) - assert.Nil(t, ns.QueuedAt) - assert.Nil(t, ns.LastAttemptStartedAt) - assert.Nil(t, ns.StartedAt) - assert.Equal(t, *ns.StoppedAt, n) - assert.Equal(t, p, ns.Phase) - assert.Empty(t, ns.Message) - assert.Nil(t, ns.Error) - }) - - t.Run("terminal-skipped", func(t *testing.T) { + t.Run("terminal-timeout-clear-state-on-any-termination", func(t *testing.T) { ns := NodeStatus{} - p := NodePhaseSucceeded - msg := success - ns.UpdatePhase(p, n, msg, nil) - - assert.Nil(t, ns.LastUpdatedAt) - assert.Nil(t, ns.QueuedAt) - assert.Nil(t, ns.LastAttemptStartedAt) - assert.Nil(t, ns.StartedAt) - assert.Equal(t, *ns.StoppedAt, n) - assert.Equal(t, p, ns.Phase) - assert.Empty(t, ns.Message) - assert.Nil(t, ns.Error) - }) - - t.Run("terminal-success-preset", func(t *testing.T) { - ns := NodeStatus{ - QueuedAt: &n, - StartedAt: &n, - LastUpdatedAt: &n, - LastAttemptStartedAt: &n, - WorkflowNodeStatus: &WorkflowNodeStatus{}, - BranchStatus: &BranchNodeStatus{}, - DynamicNodeStatus: &DynamicNodeStatus{}, - TaskNodeStatus: &TaskNodeStatus{}, - SubNodeStatus: map[NodeID]*NodeStatus{}, - } - p := NodePhaseSucceeded - msg := success - ns.UpdatePhase(p, n, msg, nil) + p := NodePhaseTimedOut + msg := "tm" + err := &core.ExecutionError{} + ns.UpdatePhase(p, n, msg, false, err) assert.Nil(t, ns.LastUpdatedAt) assert.Nil(t, ns.QueuedAt) @@ -399,44 +488,7 @@ func TestNodeStatus_UpdatePhase(t *testing.T) { assert.Nil(t, ns.StartedAt) assert.Equal(t, *ns.StoppedAt, n) assert.Equal(t, p, ns.Phase) - assert.Empty(t, ns.Message) - assert.Nil(t, ns.Error) - assert.Nil(t, ns.SubNodeStatus) - assert.Nil(t, ns.DynamicNodeStatus) - assert.Nil(t, ns.WorkflowNodeStatus) - assert.Nil(t, ns.BranchStatus) - assert.Nil(t, ns.TaskNodeStatus) - }) - - t.Run("non-terminal-preset", func(t *testing.T) { - ns := NodeStatus{ - QueuedAt: &n, - StartedAt: &n, - LastUpdatedAt: &n, - LastAttemptStartedAt: &n, - WorkflowNodeStatus: &WorkflowNodeStatus{}, - BranchStatus: &BranchNodeStatus{}, - DynamicNodeStatus: &DynamicNodeStatus{}, - TaskNodeStatus: &TaskNodeStatus{}, - SubNodeStatus: map[NodeID]*NodeStatus{}, - } - n2 := metav1.NewTime(time.Now()) - p := NodePhaseRunning - msg := "running" - ns.UpdatePhase(p, n2, msg, nil) - - assert.Equal(t, *ns.LastUpdatedAt, n2) - assert.Equal(t, *ns.QueuedAt, n) - assert.Equal(t, *ns.LastAttemptStartedAt, n) - assert.Equal(t, *ns.StartedAt, n) - assert.Nil(t, ns.StoppedAt) - assert.Equal(t, p, ns.Phase) - assert.Equal(t, msg, ns.Message) - assert.Nil(t, ns.Error) - assert.NotNil(t, ns.SubNodeStatus) - assert.NotNil(t, ns.DynamicNodeStatus) - assert.NotNil(t, ns.WorkflowNodeStatus) - assert.NotNil(t, ns.BranchStatus) - assert.NotNil(t, ns.TaskNodeStatus) + assert.Equal(t, ns.Message, "") + assert.Equal(t, ns.Error.ExecutionError, err) }) } diff --git a/flytepropeller/pkg/controller/config/config.go b/flytepropeller/pkg/controller/config/config.go index 1afc986287..698883fb48 100644 --- a/flytepropeller/pkg/controller/config/config.go +++ b/flytepropeller/pkg/controller/config/config.go @@ -98,6 +98,7 @@ var ( InterruptibleFailureThreshold: -1, DefaultMaxAttempts: 1, IgnoreRetryCause: false, + EnableCRDebugMetadata: false, }, MaxStreakLength: 8, // Turbo mode is enabled by default ProfilerPort: config.Port{ @@ -213,6 +214,7 @@ type NodeConfig struct { InterruptibleFailureThreshold int32 `json:"interruptible-failure-threshold" pflag:"1,number of failures for a node to be still considered interruptible. Negative numbers are treated as complementary (ex. -1 means last attempt is non-interruptible).'"` DefaultMaxAttempts int32 `json:"default-max-attempts" pflag:"3,Default maximum number of attempts for a node"` IgnoreRetryCause bool `json:"ignore-retry-cause" pflag:",Ignore retry cause and count all attempts toward a node's max attempts"` + EnableCRDebugMetadata bool `json:"enable-cr-debug-metadata" pflag:",Collapse node on any terminal state, not just successful terminations. This is useful to reduce the size of workflow state in etcd."` } // DefaultDeadlines contains default values for timeouts diff --git a/flytepropeller/pkg/controller/config/config_flags.go b/flytepropeller/pkg/controller/config/config_flags.go index 07a4fba742..c60f724ee2 100755 --- a/flytepropeller/pkg/controller/config/config_flags.go +++ b/flytepropeller/pkg/controller/config/config_flags.go @@ -96,6 +96,7 @@ func (cfg Config) GetPFlagSet(prefix string) *pflag.FlagSet { cmdFlags.Int32(fmt.Sprintf("%v%v", prefix, "node-config.interruptible-failure-threshold"), defaultConfig.NodeConfig.InterruptibleFailureThreshold, "number of failures for a node to be still considered interruptible. Negative numbers are treated as complementary (ex. -1 means last attempt is non-interruptible).'") cmdFlags.Int32(fmt.Sprintf("%v%v", prefix, "node-config.default-max-attempts"), defaultConfig.NodeConfig.DefaultMaxAttempts, "Default maximum number of attempts for a node") cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "node-config.ignore-retry-cause"), defaultConfig.NodeConfig.IgnoreRetryCause, "Ignore retry cause and count all attempts toward a node's max attempts") + cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "node-config.enable-cr-debug-metadata"), defaultConfig.NodeConfig.EnableCRDebugMetadata, "Collapse node on any terminal state, not just successful terminations. This is useful to reduce the size of workflow state in etcd.") cmdFlags.Int(fmt.Sprintf("%v%v", prefix, "max-streak-length"), defaultConfig.MaxStreakLength, "Maximum number of consecutive rounds that one propeller worker can use for one workflow - >1 => turbo-mode is enabled.") cmdFlags.String(fmt.Sprintf("%v%v", prefix, "event-config.raw-output-policy"), defaultConfig.EventConfig.RawOutputPolicy, "How output data should be passed along in execution events.") cmdFlags.Bool(fmt.Sprintf("%v%v", prefix, "event-config.fallback-to-output-reference"), defaultConfig.EventConfig.FallbackToOutputReference, "Whether output data should be sent by reference when it is too large to be sent inline in execution events.") diff --git a/flytepropeller/pkg/controller/config/config_flags_test.go b/flytepropeller/pkg/controller/config/config_flags_test.go index 54da9e9fe1..6f3c67b652 100755 --- a/flytepropeller/pkg/controller/config/config_flags_test.go +++ b/flytepropeller/pkg/controller/config/config_flags_test.go @@ -743,6 +743,20 @@ func TestConfig_SetFlags(t *testing.T) { } }) }) + t.Run("Test_node-config.enable-cr-debug-metadata", func(t *testing.T) { + + t.Run("Override", func(t *testing.T) { + testValue := "1" + + cmdFlags.Set("node-config.enable-cr-debug-metadata", testValue) + if vBool, err := cmdFlags.GetBool("node-config.enable-cr-debug-metadata"); err == nil { + testDecodeJson_Config(t, fmt.Sprintf("%v", vBool), &actual.NodeConfig.EnableCRDebugMetadata) + + } else { + assert.FailNow(t, err.Error()) + } + }) + }) t.Run("Test_max-streak-length", func(t *testing.T) { t.Run("Override", func(t *testing.T) { diff --git a/flytepropeller/pkg/controller/controller.go b/flytepropeller/pkg/controller/controller.go index 6b36dc05db..de28612c54 100644 --- a/flytepropeller/pkg/controller/controller.go +++ b/flytepropeller/pkg/controller/controller.go @@ -324,10 +324,21 @@ func New(ctx context.Context, cfg *config.Config, kubeClientset kubernetes.Inter logger.Errorf(ctx, "failed to initialize Admin client, err :%s", err.Error()) return nil, err } + + sCfg := storage.GetConfig() + if sCfg == nil { + logger.Errorf(ctx, "Storage configuration missing.") + } + + store, err := storage.NewDataStore(sCfg, scope.NewSubScope("metastore")) + if err != nil { + return nil, errors.Wrapf(err, "Failed to create Metadata storage") + } + var launchPlanActor launchplan.FlyteAdmin if cfg.EnableAdminLauncher { launchPlanActor, err = launchplan.NewAdminLaunchPlanExecutor(ctx, adminClient, cfg.DownstreamEval.Duration, - launchplan.GetAdminConfig(), scope.NewSubScope("admin_launcher")) + launchplan.GetAdminConfig(), scope.NewSubScope("admin_launcher"), store) if err != nil { logger.Errorf(ctx, "failed to create Admin workflow Launcher, err: %v", err.Error()) return nil, err @@ -401,16 +412,6 @@ func New(ctx context.Context, cfg *config.Config, kubeClientset kubernetes.Inter flytek8s.DefaultPodTemplateStore.SetDefaultNamespace(podNamespace) - sCfg := storage.GetConfig() - if sCfg == nil { - logger.Errorf(ctx, "Storage configuration missing.") - } - - store, err := storage.NewDataStore(sCfg, scope.NewSubScope("metastore")) - if err != nil { - return nil, errors.Wrapf(err, "Failed to create Metadata storage") - } - logger.Info(ctx, "Setting up Catalog client.") catalogClient, err := catalog.NewCatalogClient(ctx, authOpts...) if err != nil { diff --git a/flytepropeller/pkg/controller/executors/kube.go b/flytepropeller/pkg/controller/executors/kube.go index acd4f5c4f3..d6d89e1711 100644 --- a/flytepropeller/pkg/controller/executors/kube.go +++ b/flytepropeller/pkg/controller/executors/kube.go @@ -9,6 +9,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" "github.com/flyteorg/flyte/flytestdlib/fastcheck" + "github.com/flyteorg/flyte/flytestdlib/otelutils" "github.com/flyteorg/flyte/flytestdlib/promutils" ) @@ -23,76 +24,104 @@ type Client interface { GetCache() cache.Cache } -// ClientBuilder builder is the interface for the client builder. -type ClientBuilder interface { - // Build returns a new client. - Build(cache cache.Cache, config *rest.Config, options client.Options) (client.Client, error) -} +var NewCache = func(config *rest.Config, options cache.Options) (cache.Cache, error) { + k8sCache, err := cache.New(config, options) + if err != nil { + return k8sCache, err + } -type FallbackClientBuilder struct { - scope promutils.Scope + return otelutils.WrapK8sCache(k8sCache), nil } -func (f *FallbackClientBuilder) Build(_ cache.Cache, config *rest.Config, options client.Options) (client.Client, error) { - return client.New(config, options) -} +func BuildNewClientFunc(scope promutils.Scope) func(config *rest.Config, options client.Options) (client.Client, error) { + return func(config *rest.Config, options client.Options) (client.Client, error) { + var cacheReader client.Reader + cachelessOptions := options + if options.Cache != nil && options.Cache.Reader != nil { + cacheReader = options.Cache.Reader + cachelessOptions.Cache = nil + } -// NewFallbackClientBuilder Creates a new k8s client that uses the cached client for reads and falls back to making API -// calls if it failed. Write calls will always go to raw client directly. -func NewFallbackClientBuilder(scope promutils.Scope) *FallbackClientBuilder { - return &FallbackClientBuilder{ - scope: scope, + kubeClient, err := client.New(config, cachelessOptions) + if err != nil { + return nil, err + } + + return newFlyteK8sClient(kubeClient, cacheReader, scope) } } -type writeThroughCachingWriter struct { +type flyteK8sClient struct { client.Client - filter fastcheck.Filter + cacheReader client.Reader + writeFilter fastcheck.Filter } -func IDFromObject(obj client.Object, op string) []byte { - return []byte(fmt.Sprintf("%s:%s:%s:%s", obj.GetObjectKind().GroupVersionKind().String(), obj.GetNamespace(), obj.GetName(), op)) +func (f flyteK8sClient) Get(ctx context.Context, key client.ObjectKey, out client.Object, opts ...client.GetOption) (err error) { + if f.cacheReader != nil { + if err = f.cacheReader.Get(ctx, key, out, opts...); err == nil { + return nil + } + } + + return f.Client.Get(ctx, key, out, opts...) +} + +func (f flyteK8sClient) List(ctx context.Context, list client.ObjectList, opts ...client.ListOption) (err error) { + if f.cacheReader != nil { + if err = f.cacheReader.List(ctx, list, opts...); err == nil { + return nil + } + } + + return f.Client.List(ctx, list, opts...) } // Create first checks the local cache if the object with id was previously successfully saved, if not then // saves the object obj in the Kubernetes cluster -func (w writeThroughCachingWriter) Create(ctx context.Context, obj client.Object, opts ...client.CreateOption) error { +func (f flyteK8sClient) Create(ctx context.Context, obj client.Object, opts ...client.CreateOption) error { // "c" represents create - id := IDFromObject(obj, "c") - if w.filter.Contains(ctx, id) { + id := idFromObject(obj, "c") + if f.writeFilter.Contains(ctx, id) { return nil } - err := w.Client.Create(ctx, obj, opts...) + err := f.Client.Create(ctx, obj, opts...) if err != nil { return err } - w.filter.Add(ctx, id) + f.writeFilter.Add(ctx, id) return nil } // Delete first checks the local cache if the object with id was previously successfully deleted, if not then // deletes the given obj from Kubernetes cluster. -func (w writeThroughCachingWriter) Delete(ctx context.Context, obj client.Object, opts ...client.DeleteOption) error { +func (f flyteK8sClient) Delete(ctx context.Context, obj client.Object, opts ...client.DeleteOption) error { // "d" represents delete - id := IDFromObject(obj, "d") - if w.filter.Contains(ctx, id) { + id := idFromObject(obj, "d") + if f.writeFilter.Contains(ctx, id) { return nil } - err := w.Client.Delete(ctx, obj, opts...) + err := f.Client.Delete(ctx, obj, opts...) if err != nil { return err } - w.filter.Add(ctx, id) + f.writeFilter.Add(ctx, id) return nil } -func newWriteThroughCachingWriter(c client.Client, cacheSize int, scope promutils.Scope) (writeThroughCachingWriter, error) { - filter, err := fastcheck.NewOppoBloomFilter(cacheSize, scope.NewSubScope("kube_filter")) +func idFromObject(obj client.Object, op string) []byte { + return []byte(fmt.Sprintf("%s:%s:%s:%s", obj.GetObjectKind().GroupVersionKind().String(), obj.GetNamespace(), obj.GetName(), op)) +} + +func newFlyteK8sClient(kubeClient client.Client, cacheReader client.Reader, scope promutils.Scope) (flyteK8sClient, error) { + writeFilter, err := fastcheck.NewOppoBloomFilter(50000, scope.NewSubScope("kube_filter")) if err != nil { - return writeThroughCachingWriter{}, err + return flyteK8sClient{}, err } - return writeThroughCachingWriter{ - Client: c, - filter: filter, + + return flyteK8sClient{ + Client: kubeClient, + cacheReader: cacheReader, + writeFilter: writeFilter, }, nil } diff --git a/flytepropeller/pkg/controller/executors/kube_test.go b/flytepropeller/pkg/controller/executors/kube_test.go index bcaa64ff6f..4d84d3fb08 100644 --- a/flytepropeller/pkg/controller/executors/kube_test.go +++ b/flytepropeller/pkg/controller/executors/kube_test.go @@ -2,13 +2,14 @@ package executors import ( "context" - "fmt" "reflect" "testing" "github.com/stretchr/testify/assert" v1 "k8s.io/api/core/v1" + k8serrors "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" "sigs.k8s.io/controller-runtime/pkg/client" "github.com/flyteorg/flyte/flytestdlib/contextutils" @@ -45,42 +46,46 @@ func TestIdFromObject(t *testing.T) { APIVersion: "v1", }, } - if got := IDFromObject(p, tt.args.op); !reflect.DeepEqual(got, []byte(tt.want)) { - t.Errorf("IDFromObject() = %s, want %s", string(got), tt.want) + if got := idFromObject(p, tt.args.op); !reflect.DeepEqual(got, []byte(tt.want)) { + t.Errorf("idFromObject() = %s, want %s", string(got), tt.want) } }) } } -type singleInvokeClient struct { +type mockKubeClient struct { client.Client - createCalled bool - deleteCalled bool + createCalledCount int + deleteCalledCount int + getCalledCount int + getMissCount int } -func (f *singleInvokeClient) Create(ctx context.Context, obj client.Object, opts ...client.CreateOption) error { - if f.createCalled { - return fmt.Errorf("create called more than once") - } - f.createCalled = true +func (m *mockKubeClient) Create(ctx context.Context, obj client.Object, opts ...client.CreateOption) error { + m.createCalledCount++ + return nil +} + +func (m *mockKubeClient) Delete(ctx context.Context, obj client.Object, opts ...client.DeleteOption) error { + m.deleteCalledCount++ return nil } -func (f *singleInvokeClient) Delete(ctx context.Context, obj client.Object, opts ...client.DeleteOption) error { - if f.deleteCalled { - return fmt.Errorf("delete called more than once") +func (m *mockKubeClient) Get(ctx context.Context, objectKey types.NamespacedName, obj client.Object, opts ...client.GetOption) error { + if m.getCalledCount < m.getMissCount { + m.getMissCount-- + return k8serrors.NewNotFound(v1.Resource("pod"), "name") } - f.deleteCalled = true + + m.getCalledCount++ return nil } -func TestWriteThroughCachingWriter_Create(t *testing.T) { +func TestFlyteK8sClient(t *testing.T) { ctx := context.TODO() - c := &singleInvokeClient{} - w, err := newWriteThroughCachingWriter(c, 1000, promutils.NewTestScope()) - assert.NoError(t, err) + scope := promutils.NewTestScope() - p := &v1.Pod{ + pod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: "name", @@ -91,39 +96,73 @@ func TestWriteThroughCachingWriter_Create(t *testing.T) { }, } - err = w.Create(ctx, p) - assert.NoError(t, err) + objectKey := types.NamespacedName{ + Namespace: pod.Namespace, + Name: pod.Name, + } - assert.True(t, c.createCalled) + // test cache reader + tests := []struct { + name string + initCacheReader bool + cacheMissCount int + expectedClientGetCount int + }{ + {"no-cache", false, 0, 2}, + {"with-cache-one-miss", true, 1, 1}, + {"with-cache-no-misses", true, 0, 0}, + } - err = w.Create(ctx, p) - assert.NoError(t, err) -} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var cacheReader client.Reader + if tt.initCacheReader { + cacheReader = &mockKubeClient{ + getMissCount: tt.cacheMissCount, + } + } -func TestWriteThroughCachingWriter_Delete(t *testing.T) { - ctx := context.TODO() - c := &singleInvokeClient{} - w, err := newWriteThroughCachingWriter(c, 1000, promutils.NewTestScope()) - assert.NoError(t, err) + kubeClient := &mockKubeClient{} - p := &v1.Pod{ - ObjectMeta: metav1.ObjectMeta{ - Namespace: "ns", - Name: "name", - }, - TypeMeta: metav1.TypeMeta{ - Kind: "pod", - APIVersion: "v1", - }, - } + flyteK8sClient, err := newFlyteK8sClient(kubeClient, cacheReader, scope.NewSubScope(tt.name)) + assert.NoError(t, err) - err = w.Delete(ctx, p) - assert.NoError(t, err) + for i := 0; i < 2; i++ { + err := flyteK8sClient.Get(ctx, objectKey, pod) + assert.NoError(t, err) + } - assert.True(t, c.deleteCalled) + assert.Equal(t, tt.expectedClientGetCount, kubeClient.getCalledCount) + }) + } - err = w.Delete(ctx, p) - assert.NoError(t, err) + // test create + t.Run("create", func(t *testing.T) { + kubeClient := &mockKubeClient{} + flyteK8sClient, err := newFlyteK8sClient(kubeClient, nil, scope.NewSubScope("create")) + assert.NoError(t, err) + + for i := 0; i < 5; i++ { + err = flyteK8sClient.Create(ctx, pod) + assert.NoError(t, err) + } + + assert.Equal(t, 1, kubeClient.createCalledCount) + }) + + // test delete + t.Run("delete", func(t *testing.T) { + kubeClient := &mockKubeClient{} + flyteK8sClient, err := newFlyteK8sClient(kubeClient, nil, scope.NewSubScope("delete")) + assert.NoError(t, err) + + for i := 0; i < 5; i++ { + err = flyteK8sClient.Delete(ctx, pod) + assert.NoError(t, err) + } + + assert.Equal(t, 1, kubeClient.deleteCalledCount) + }) } func init() { diff --git a/flytepropeller/pkg/controller/executors/mocks/client_builder.go b/flytepropeller/pkg/controller/executors/mocks/client_builder.go deleted file mode 100644 index 3180f480fd..0000000000 --- a/flytepropeller/pkg/controller/executors/mocks/client_builder.go +++ /dev/null @@ -1,100 +0,0 @@ -// Code generated by mockery v1.0.1. DO NOT EDIT. - -package mocks - -import ( - cache "sigs.k8s.io/controller-runtime/pkg/cache" - client "sigs.k8s.io/controller-runtime/pkg/client" - - executors "github.com/flyteorg/flyte/flytepropeller/pkg/controller/executors" - - mock "github.com/stretchr/testify/mock" - - rest "k8s.io/client-go/rest" -) - -// ClientBuilder is an autogenerated mock type for the ClientBuilder type -type ClientBuilder struct { - mock.Mock -} - -type ClientBuilder_Build struct { - *mock.Call -} - -func (_m ClientBuilder_Build) Return(_a0 client.Client, _a1 error) *ClientBuilder_Build { - return &ClientBuilder_Build{Call: _m.Call.Return(_a0, _a1)} -} - -func (_m *ClientBuilder) OnBuild(_a0 cache.Cache, config *rest.Config, options client.Options) *ClientBuilder_Build { - c_call := _m.On("Build", _a0, config, options) - return &ClientBuilder_Build{Call: c_call} -} - -func (_m *ClientBuilder) OnBuildMatch(matchers ...interface{}) *ClientBuilder_Build { - c_call := _m.On("Build", matchers...) - return &ClientBuilder_Build{Call: c_call} -} - -// Build provides a mock function with given fields: _a0, config, options -func (_m *ClientBuilder) Build(_a0 cache.Cache, config *rest.Config, options client.Options) (client.Client, error) { - ret := _m.Called(_a0, config, options) - - var r0 client.Client - if rf, ok := ret.Get(0).(func(cache.Cache, *rest.Config, client.Options) client.Client); ok { - r0 = rf(_a0, config, options) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(client.Client) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(cache.Cache, *rest.Config, client.Options) error); ok { - r1 = rf(_a0, config, options) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -type ClientBuilder_WithUncached struct { - *mock.Call -} - -func (_m ClientBuilder_WithUncached) Return(_a0 executors.ClientBuilder) *ClientBuilder_WithUncached { - return &ClientBuilder_WithUncached{Call: _m.Call.Return(_a0)} -} - -func (_m *ClientBuilder) OnWithUncached(objs ...client.Object) *ClientBuilder_WithUncached { - c_call := _m.On("WithUncached", objs) - return &ClientBuilder_WithUncached{Call: c_call} -} - -func (_m *ClientBuilder) OnWithUncachedMatch(matchers ...interface{}) *ClientBuilder_WithUncached { - c_call := _m.On("WithUncached", matchers...) - return &ClientBuilder_WithUncached{Call: c_call} -} - -// WithUncached provides a mock function with given fields: objs -func (_m *ClientBuilder) WithUncached(objs ...client.Object) executors.ClientBuilder { - _va := make([]interface{}, len(objs)) - for _i := range objs { - _va[_i] = objs[_i] - } - var _ca []interface{} - _ca = append(_ca, _va...) - ret := _m.Called(_ca...) - - var r0 executors.ClientBuilder - if rf, ok := ret.Get(0).(func(...client.Object) executors.ClientBuilder); ok { - r0 = rf(objs...) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(executors.ClientBuilder) - } - } - - return r0 -} diff --git a/flytepropeller/pkg/controller/garbage_collector.go b/flytepropeller/pkg/controller/garbage_collector.go index 373da8a155..e02e1beca2 100644 --- a/flytepropeller/pkg/controller/garbage_collector.go +++ b/flytepropeller/pkg/controller/garbage_collector.go @@ -169,7 +169,7 @@ func (g *GarbageCollector) StartGC(ctx context.Context) error { func NewGarbageCollector(cfg *config.Config, scope promutils.Scope, clk clock.WithTicker, namespaceClient corev1.NamespaceInterface, wfClient v1alpha1.FlyteworkflowV1alpha1Interface) (*GarbageCollector, error) { ttl := 23 - if cfg.MaxTTLInHours < 23 { + if cfg.MaxTTLInHours <= 23 { ttl = cfg.MaxTTLInHours } else { logger.Warningf(context.TODO(), "defaulting max ttl for workflows to 23 hours, since configured duration is larger than 23 [%d]", cfg.MaxTTLInHours) diff --git a/flytepropeller/pkg/controller/handler.go b/flytepropeller/pkg/controller/handler.go index cc6b18e819..94e8ab6c12 100644 --- a/flytepropeller/pkg/controller/handler.go +++ b/flytepropeller/pkg/controller/handler.go @@ -385,11 +385,24 @@ func (p *Propeller) streak(ctx context.Context, w *v1alpha1.FlyteWorkflow, wfClo // Workflow is too large, we will mark the workflow as failing and record it. This will automatically // propagate the failure in the next round. mutableW := w.DeepCopy() - mutableW.Status.UpdatePhase(v1alpha1.WorkflowPhaseFailing, "Workflow size has breached threshold, aborting", &core.ExecutionError{ - Kind: core.ExecutionError_SYSTEM, - Code: "WorkflowTooLarge", - Message: "Workflow execution state is too large for Flyte to handle.", - }) + // catch potential indefinite update loop + if mutatedWf.GetExecutionStatus().IsTerminated() { + ResetFinalizers(mutableW) + SetDefinitionVersionIfEmpty(mutableW, v1alpha1.LatestWorkflowDefinitionVersion) + SetCompletedLabel(mutableW, time.Now()) + msg := fmt.Sprintf("Workflow size has breached threshold. Finalized with status: %v", mutatedWf.GetExecutionStatus().GetPhase()) + mutableW.Status.UpdatePhase(v1alpha1.WorkflowPhaseFailed, msg, &core.ExecutionError{ + Kind: core.ExecutionError_SYSTEM, + Code: "WorkflowTooLarge", + Message: "Workflow execution state is too large for Flyte to handle.", + }) + } else { + mutableW.Status.UpdatePhase(v1alpha1.WorkflowPhaseFailing, "Workflow size has breached threshold, aborting", &core.ExecutionError{ + Kind: core.ExecutionError_SYSTEM, + Code: "WorkflowTooLarge", + Message: "Workflow execution state is too large for Flyte to handle.", + }) + } if _, e := p.wfStore.Update(ctx, mutableW, workflowstore.PriorityClassCritical); e != nil { logger.Errorf(ctx, "Failed recording a large workflow as failed, reason: %s. Retrying...", e) return nil, e diff --git a/flytepropeller/pkg/controller/handler_test.go b/flytepropeller/pkg/controller/handler_test.go index ce1ca63818..3469c1da80 100644 --- a/flytepropeller/pkg/controller/handler_test.go +++ b/flytepropeller/pkg/controller/handler_test.go @@ -815,8 +815,36 @@ func TestNewPropellerHandler_UpdateFailure(t *testing.T) { } s.OnGetMatch(mock.Anything, mock.Anything, mock.Anything).Return(wf, nil) s.On("Update", mock.Anything, mock.Anything, mock.Anything).Return(nil, errors.Wrap(workflowstore.ErrWorkflowToLarge, "too large")).Once() - s.On("Update", mock.Anything, mock.Anything, mock.Anything).Return(nil, nil).Once() + s.On("Update", mock.Anything, mock.MatchedBy(func(w *v1alpha1.FlyteWorkflow) bool { + return w.Status.Phase == v1alpha1.WorkflowPhaseFailing + }), mock.Anything).Return(nil, nil).Once() + err := p.Handle(ctx, namespace, name) + assert.NoError(t, err) + }) + t.Run("too-large-terminal", func(t *testing.T) { + scope := promutils.NewTestScope() + s := &mocks.FlyteWorkflow{} + exec := &mockExecutor{} + p := NewPropellerHandler(ctx, cfg, nil, s, exec, scope) + wf := &v1alpha1.FlyteWorkflow{ + ObjectMeta: v1.ObjectMeta{ + Name: name, + Namespace: namespace, + }, + WorkflowSpec: &v1alpha1.WorkflowSpec{ + ID: "w1", + }, + } + exec.HandleCb = func(ctx context.Context, w *v1alpha1.FlyteWorkflow) error { + w.GetExecutionStatus().UpdatePhase(v1alpha1.WorkflowPhaseFailed, "done", nil) + return nil + } + s.OnGetMatch(mock.Anything, mock.Anything, mock.Anything).Return(wf, nil) + s.On("Update", mock.Anything, mock.Anything, mock.Anything).Return(nil, errors.Wrap(workflowstore.ErrWorkflowToLarge, "too large")).Once() + s.On("Update", mock.Anything, mock.MatchedBy(func(w *v1alpha1.FlyteWorkflow) bool { + return w.Status.Phase == v1alpha1.WorkflowPhaseFailed && !HasFinalizer(w) && HasCompletedLabel(w) + }), mock.Anything).Return(nil, nil).Once() err := p.Handle(ctx, namespace, name) assert.NoError(t, err) }) diff --git a/flytepropeller/pkg/controller/nodes/array/event_recorder.go b/flytepropeller/pkg/controller/nodes/array/event_recorder.go index c4e3004011..c2e0c96ed8 100644 --- a/flytepropeller/pkg/controller/nodes/array/event_recorder.go +++ b/flytepropeller/pkg/controller/nodes/array/event_recorder.go @@ -3,21 +3,24 @@ package array import ( "context" "fmt" + "strconv" "time" "github.com/golang/protobuf/ptypes" idlcore "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event" + "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/encoding" "github.com/flyteorg/flyte/flytepropeller/pkg/apis/flyteworkflow/v1alpha1" "github.com/flyteorg/flyte/flytepropeller/pkg/controller/config" "github.com/flyteorg/flyte/flytepropeller/pkg/controller/nodes/common" "github.com/flyteorg/flyte/flytepropeller/pkg/controller/nodes/interfaces" + "github.com/flyteorg/flyte/flytepropeller/pkg/controller/nodes/task" ) type arrayEventRecorder interface { interfaces.EventRecorder - process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) + process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) error finalize(ctx context.Context, nCtx interfaces.NodeExecutionContext, taskPhase idlcore.TaskExecution_Phase, taskPhaseVersion uint32, eventConfig *config.EventConfig) error finalizeRequired(ctx context.Context) bool } @@ -39,8 +42,23 @@ func (e *externalResourcesEventRecorder) RecordTaskEvent(ctx context.Context, ev return nil } -func (e *externalResourcesEventRecorder) process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) { - externalResourceID := fmt.Sprintf("%s-%d", buildSubNodeID(nCtx, index), retryAttempt) +func (e *externalResourcesEventRecorder) process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) error { + // generate externalResourceID + currentNodeUniqueID := nCtx.NodeID() + if nCtx.ExecutionContext().GetEventVersion() != v1alpha1.EventVersion0 { + var err error + currentNodeUniqueID, err = common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nCtx.NodeID()) + if err != nil { + return err + } + } + + uniqueID, err := encoding.FixedLengthUniqueIDForParts(task.IDMaxLength, []string{nCtx.NodeExecutionMetadata().GetOwnerID().Name, currentNodeUniqueID, strconv.Itoa(int(retryAttempt))}) + if err != nil { + return err + } + + externalResourceID := fmt.Sprintf("%s-n%d-%d", uniqueID, index, retryAttempt) // process events cacheStatus := idlcore.CatalogCacheStatus_CACHE_DISABLED @@ -83,6 +101,8 @@ func (e *externalResourcesEventRecorder) process(ctx context.Context, nCtx inter // clear nodeEvents and taskEvents e.nodeEvents = e.nodeEvents[:0] e.taskEvents = e.taskEvents[:0] + + return nil } func (e *externalResourcesEventRecorder) finalize(ctx context.Context, nCtx interfaces.NodeExecutionContext, @@ -94,6 +114,17 @@ func (e *externalResourcesEventRecorder) finalize(ctx context.Context, nCtx inte return err } + var taskID *idlcore.Identifier + subNode := nCtx.Node().GetArrayNode().GetSubNodeSpec() + if subNode != nil && subNode.Kind == v1alpha1.NodeKindTask { + executableTask, err := nCtx.ExecutionContext().GetTask(*subNode.GetTaskID()) + if err != nil { + return err + } + + taskID = executableTask.CoreTask().GetId() + } + nodeExecutionID := *nCtx.NodeExecutionMetadata().GetNodeExecutionID() if nCtx.ExecutionContext().GetEventVersion() != v1alpha1.EventVersion0 { currentNodeUniqueID, err := common.GenerateUniqueID(nCtx.ExecutionContext().GetParentInfo(), nodeExecutionID.NodeId) @@ -103,16 +134,8 @@ func (e *externalResourcesEventRecorder) finalize(ctx context.Context, nCtx inte nodeExecutionID.NodeId = currentNodeUniqueID } - workflowExecutionID := nodeExecutionID.ExecutionId - taskExecutionEvent := &event.TaskExecutionEvent{ - TaskId: &idlcore.Identifier{ - ResourceType: idlcore.ResourceType_TASK, - Project: workflowExecutionID.Project, - Domain: workflowExecutionID.Domain, - Name: nCtx.NodeID(), - Version: "v1", // this value is irrelevant but necessary for the identifier to be valid - }, + TaskId: taskID, ParentNodeExecutionId: &nodeExecutionID, RetryAttempt: 0, // ArrayNode will never retry Phase: taskPhase, @@ -120,9 +143,9 @@ func (e *externalResourcesEventRecorder) finalize(ctx context.Context, nCtx inte OccurredAt: occurredAt, Metadata: &event.TaskExecutionMetadata{ ExternalResources: e.externalResources, - PluginIdentifier: "container", + PluginIdentifier: "k8s-array", }, - TaskType: "k8s-array", + TaskType: "container_array", EventVersion: 1, } @@ -165,7 +188,8 @@ type passThroughEventRecorder struct { interfaces.EventRecorder } -func (*passThroughEventRecorder) process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) { +func (*passThroughEventRecorder) process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) error { + return nil } func (*passThroughEventRecorder) finalize(ctx context.Context, nCtx interfaces.NodeExecutionContext, diff --git a/flytepropeller/pkg/controller/nodes/array/handler.go b/flytepropeller/pkg/controller/nodes/array/handler.go index 06a693334e..72b2c511eb 100644 --- a/flytepropeller/pkg/controller/nodes/array/handler.go +++ b/flytepropeller/pkg/controller/nodes/array/handler.go @@ -98,7 +98,9 @@ func (a *arrayNodeHandler) Abort(ctx context.Context, nCtx interfaces.NodeExecut logger.Warnf(ctx, "failed to record ArrayNode events: %v", err) } - eventRecorder.process(ctx, nCtx, i, retryAttempt) + if err := eventRecorder.process(ctx, nCtx, i, retryAttempt); err != nil { + logger.Warnf(ctx, "failed to record ArrayNode events: %v", err) + } } } } @@ -241,7 +243,9 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu logger.Warnf(ctx, "failed to record ArrayNode events: %v", err) } - eventRecorder.process(ctx, nCtx, i, 0) + if err := eventRecorder.process(ctx, nCtx, i, 0); err != nil { + logger.Warnf(ctx, "failed to record ArrayNode events: %v", err) + } } // transition ArrayNode to `ArrayNodePhaseExecuting` @@ -331,7 +335,9 @@ func (a *arrayNodeHandler) Handle(ctx context.Context, nCtx interfaces.NodeExecu } } } - eventRecorder.process(ctx, nCtx, index, subNodeStatus.GetAttempts()) + if err := eventRecorder.process(ctx, nCtx, index, subNodeStatus.GetAttempts()); err != nil { + return handler.UnknownTransition, err + } // update subNode state arrayNodeState.SubNodePhases.SetItem(index, uint64(subNodeStatus.GetPhase())) diff --git a/flytepropeller/pkg/controller/nodes/array/handler_test.go b/flytepropeller/pkg/controller/nodes/array/handler_test.go index b0328250ab..f4107c1f11 100644 --- a/flytepropeller/pkg/controller/nodes/array/handler_test.go +++ b/flytepropeller/pkg/controller/nodes/array/handler_test.go @@ -7,6 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" + "k8s.io/apimachinery/pkg/types" idlcore "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/event" @@ -145,6 +146,10 @@ func createNodeExecutionContext(dataStore *storage.DataStore, eventRecorder inte Name: "name", }, }) + nodeExecutionMetadata.OnGetOwnerID().Return(types.NamespacedName{ + Namespace: "wf-namespace", + Name: "wf-name", + }) nCtx.OnNodeExecutionMetadata().Return(nodeExecutionMetadata) // NodeID diff --git a/flytepropeller/pkg/controller/nodes/array/mocks/array_event_recorder.go b/flytepropeller/pkg/controller/nodes/array/mocks/array_event_recorder.go index b51ba86931..7e235daa25 100644 --- a/flytepropeller/pkg/controller/nodes/array/mocks/array_event_recorder.go +++ b/flytepropeller/pkg/controller/nodes/array/mocks/array_event_recorder.go @@ -149,7 +149,34 @@ func (_m *arrayEventRecorder) finalizeRequired(ctx context.Context) bool { return r0 } +type arrayEventRecorder_process struct { + *mock.Call +} + +func (_m arrayEventRecorder_process) Return(_a0 error) *arrayEventRecorder_process { + return &arrayEventRecorder_process{Call: _m.Call.Return(_a0)} +} + +func (_m *arrayEventRecorder) Onprocess(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) *arrayEventRecorder_process { + c_call := _m.On("process", ctx, nCtx, index, retryAttempt) + return &arrayEventRecorder_process{Call: c_call} +} + +func (_m *arrayEventRecorder) OnprocessMatch(matchers ...interface{}) *arrayEventRecorder_process { + c_call := _m.On("process", matchers...) + return &arrayEventRecorder_process{Call: c_call} +} + // process provides a mock function with given fields: ctx, nCtx, index, retryAttempt -func (_m *arrayEventRecorder) process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) { - _m.Called(ctx, nCtx, index, retryAttempt) +func (_m *arrayEventRecorder) process(ctx context.Context, nCtx interfaces.NodeExecutionContext, index int, retryAttempt uint32) error { + ret := _m.Called(ctx, nCtx, index, retryAttempt) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, interfaces.NodeExecutionContext, int, uint32) error); ok { + r0 = rf(ctx, nCtx, index, retryAttempt) + } else { + r0 = ret.Error(0) + } + + return r0 } diff --git a/flytepropeller/pkg/controller/nodes/branch/evaluator.go b/flytepropeller/pkg/controller/nodes/branch/evaluator.go index 44dc8711e8..4bc1676745 100644 --- a/flytepropeller/pkg/controller/nodes/branch/evaluator.go +++ b/flytepropeller/pkg/controller/nodes/branch/evaluator.go @@ -129,7 +129,9 @@ func DecideBranch(ctx context.Context, nl executors.NodeLookup, nodeID v1alpha1. } nStatus := nl.GetNodeExecutionStatus(ctx, n.GetID()) logger.Infof(ctx, "Branch Setting Node[%v] status to Skipped!", skippedNodeID) - nStatus.UpdatePhase(v1alpha1.NodePhaseSkipped, v1.Now(), "Branch evaluated to false", nil) + // We hard code enableCRDebugMetadata=true because it has no effect when setting phase to + // NodePhaseSkipped. This saves us passing the config all the way down from the nodeExecutor. + nStatus.UpdatePhase(v1alpha1.NodePhaseSkipped, v1.Now(), "Branch evaluated to false", true, nil) } if selectedNodeID == nil { diff --git a/flytepropeller/pkg/controller/nodes/executor.go b/flytepropeller/pkg/controller/nodes/executor.go index 8e96ee9645..0c98aeeb5f 100644 --- a/flytepropeller/pkg/controller/nodes/executor.go +++ b/flytepropeller/pkg/controller/nodes/executor.go @@ -475,6 +475,7 @@ func (c *recursiveNodeExecutor) WithNodeExecutionContextBuilder(nCtxBuilder inte type nodeExecutor struct { catalog catalog.Client clusterID string + enableCRDebugMetadata bool defaultActiveDeadline time.Duration defaultDataSandbox storage.DataReference defaultExecutionDeadline time.Duration @@ -582,7 +583,7 @@ func (c *nodeExecutor) attemptRecovery(ctx context.Context, nCtx interfaces.Node state.PreviousNodeExecutionCheckpointURI = storage.DataReference(metadata.TaskNodeMetadata.CheckpointUri) err = nCtx.NodeStateWriter().PutTaskNodeState(state) if err != nil { - logger.Warn(ctx, "failed to save recovered checkpoint uri for [%+v]: [%+v]", + logger.Warnf(ctx, "failed to save recovered checkpoint uri for [%+v]: [%+v]", nCtx.NodeExecutionMetadata().GetNodeExecutionID(), err) } } @@ -1005,7 +1006,7 @@ func (c *nodeExecutor) handleNotYetStartedNode(ctx context.Context, dag executor logger.Warningf(ctx, "Failed to record nodeEvent, error [%s]", err.Error()) return interfaces.NodeStatusUndefined, errors.Wrapf(errors.EventRecordingFailed, nCtx.NodeID(), err, "failed to record node event") } - UpdateNodeStatus(np, p, nCtx.NodeStateReader(), nodeStatus) + UpdateNodeStatus(np, p, nCtx.NodeStateReader(), nodeStatus, c.enableCRDebugMetadata) c.RecordTransitionLatency(ctx, dag, nCtx.ContextualNodeLookup(), nCtx.Node(), nodeStatus) } @@ -1271,7 +1272,7 @@ func (c *nodeExecutor) handleQueuedOrRunningNode(ctx context.Context, nCtx inter } } - UpdateNodeStatus(np, p, nCtx.NodeStateReader(), nodeStatus) + UpdateNodeStatus(np, p, nCtx.NodeStateReader(), nodeStatus, c.enableCRDebugMetadata) return finalStatus, nil } @@ -1285,7 +1286,7 @@ func (c *nodeExecutor) handleRetryableFailure(ctx context.Context, nCtx interfac // NOTE: It is important to increment attempts only after abort has been called. Increment attempt mutates the state // Attempt is used throughout the system to determine the idempotent resource version. nodeStatus.IncrementAttempts() - nodeStatus.UpdatePhase(v1alpha1.NodePhaseRunning, metav1.Now(), "retrying", nil) + nodeStatus.UpdatePhase(v1alpha1.NodePhaseRunning, metav1.Now(), "retrying", c.enableCRDebugMetadata, nil) // We are going to retry in the next round, so we should clear all current state nodeStatus.ClearSubNodeStatus() nodeStatus.ClearTaskStatus() @@ -1324,8 +1325,14 @@ func (c *nodeExecutor) HandleNode(ctx context.Context, dag executors.DAGStructur if err := c.Abort(ctx, h, nCtx, "node failing", false); err != nil { return interfaces.NodeStatusUndefined, err } - nodeStatus.UpdatePhase(v1alpha1.NodePhaseFailed, metav1.Now(), nodeStatus.GetMessage(), nodeStatus.GetExecutionError()) - c.metrics.FailureDuration.Observe(ctx, nodeStatus.GetStartedAt().Time, nodeStatus.GetStoppedAt().Time) + t := metav1.Now() + + startedAt := nodeStatus.GetStartedAt() + if startedAt == nil { + startedAt = &t + } + nodeStatus.UpdatePhase(v1alpha1.NodePhaseFailed, t, nodeStatus.GetMessage(), c.enableCRDebugMetadata, nodeStatus.GetExecutionError()) + c.metrics.FailureDuration.Observe(ctx, startedAt.Time, nodeStatus.GetStoppedAt().Time) if nCtx.NodeExecutionMetadata().IsInterruptible() { c.metrics.InterruptibleNodesTerminated.Inc(ctx) } @@ -1338,8 +1345,7 @@ func (c *nodeExecutor) HandleNode(ctx context.Context, dag executors.DAGStructur return interfaces.NodeStatusUndefined, err } - nodeStatus.ClearSubNodeStatus() - nodeStatus.UpdatePhase(v1alpha1.NodePhaseTimedOut, metav1.Now(), nodeStatus.GetMessage(), nodeStatus.GetExecutionError()) + nodeStatus.UpdatePhase(v1alpha1.NodePhaseTimedOut, metav1.Now(), nodeStatus.GetMessage(), c.enableCRDebugMetadata, nodeStatus.GetExecutionError()) c.metrics.TimedOutFailure.Inc(ctx) if nCtx.NodeExecutionMetadata().IsInterruptible() { c.metrics.InterruptibleNodesTerminated.Inc(ctx) @@ -1363,8 +1369,7 @@ func (c *nodeExecutor) HandleNode(ctx context.Context, dag executors.DAGStructur stopped = &t } c.metrics.SuccessDuration.Observe(ctx, started.Time, stopped.Time) - nodeStatus.ClearSubNodeStatus() - nodeStatus.UpdatePhase(v1alpha1.NodePhaseSucceeded, t, "completed successfully", nil) + nodeStatus.UpdatePhase(v1alpha1.NodePhaseSucceeded, t, "completed successfully", c.enableCRDebugMetadata, nil) if nCtx.NodeExecutionMetadata().IsInterruptible() { c.metrics.InterruptibleNodesTerminated.Inc(ctx) } @@ -1431,6 +1436,7 @@ func NewExecutor(ctx context.Context, nodeConfig config.NodeConfig, store *stora nodeExecutor := &nodeExecutor{ catalog: catalogClient, clusterID: clusterID, + enableCRDebugMetadata: nodeConfig.EnableCRDebugMetadata, defaultActiveDeadline: nodeConfig.DefaultDeadlines.DefaultNodeActiveDeadline.Duration, defaultDataSandbox: defaultRawOutputPrefix, defaultExecutionDeadline: nodeConfig.DefaultDeadlines.DefaultNodeExecutionDeadline.Duration, diff --git a/flytepropeller/pkg/controller/nodes/executor_test.go b/flytepropeller/pkg/controller/nodes/executor_test.go index 222e0a05d5..2bc552bab0 100644 --- a/flytepropeller/pkg/controller/nodes/executor_test.go +++ b/flytepropeller/pkg/controller/nodes/executor_test.go @@ -565,7 +565,7 @@ func TestNodeExecutor_RecursiveNodeHandler_Recurse(t *testing.T) { }, } - setupNodePhase := func(n0Phase, n2Phase, expectedN2Phase v1alpha1.NodePhase) (*mocks.ExecutableWorkflow, *mocks.ExecutableNodeStatus) { + setupNodePhase := func(n0Phase, n2Phase, expectedN2Phase v1alpha1.NodePhase, expectedClearStateOnAnyTermination bool) (*mocks.ExecutableWorkflow, *mocks.ExecutableNodeStatus) { taskID := "id" taskID0 := "id1" // Setup @@ -582,7 +582,7 @@ func TestNodeExecutor_RecursiveNodeHandler_Recurse(t *testing.T) { mockN2Status.OnGetStoppedAt().Return(nil) var ee *core.ExecutionError - mockN2Status.On("UpdatePhase", expectedN2Phase, mock.Anything, mock.AnythingOfType("string"), ee) + mockN2Status.On("UpdatePhase", expectedN2Phase, mock.Anything, mock.AnythingOfType("string"), expectedClearStateOnAnyTermination, ee) mockN2Status.OnIsDirty().Return(false) mockN2Status.OnGetTaskNodeStatus().Return(nil) mockN2Status.On("ClearDynamicNodeStatus").Return(nil) @@ -659,17 +659,21 @@ func TestNodeExecutor_RecursiveNodeHandler_Recurse(t *testing.T) { } tests := []struct { - name string - currentNodePhase v1alpha1.NodePhase - parentNodePhase v1alpha1.NodePhase - expectedNodePhase v1alpha1.NodePhase - expectedPhase interfaces.NodePhase - expectedError bool - updateCalled bool + name string + currentNodePhase v1alpha1.NodePhase + parentNodePhase v1alpha1.NodePhase + enableCRDebugMetadata bool + expectedNodePhase v1alpha1.NodePhase + expectedPhase interfaces.NodePhase + expectedError bool + updateCalled bool }{ - {"notYetStarted->skipped", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseFailed, v1alpha1.NodePhaseSkipped, interfaces.NodePhaseFailed, false, false}, - {"notYetStarted->skipped", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseSkipped, v1alpha1.NodePhaseSkipped, interfaces.NodePhaseSuccess, false, true}, - {"notYetStarted->queued", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseSucceeded, v1alpha1.NodePhaseQueued, interfaces.NodePhasePending, false, true}, + {"notYetStarted->skipped", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseFailed, false, v1alpha1.NodePhaseSkipped, interfaces.NodePhaseFailed, false, false}, + {"notYetStarted->skipped", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseSkipped, false, v1alpha1.NodePhaseSkipped, interfaces.NodePhaseSuccess, false, true}, + {"notYetStarted->queued", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseSucceeded, false, v1alpha1.NodePhaseQueued, interfaces.NodePhasePending, false, true}, + {"notYetStarted->skipped enableCRDebugMetadata", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseFailed, true, v1alpha1.NodePhaseSkipped, interfaces.NodePhaseFailed, false, false}, + {"notYetStarted->skipped enableCRDebugMetadata", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseSkipped, true, v1alpha1.NodePhaseSkipped, interfaces.NodePhaseSuccess, false, true}, + {"notYetStarted->queued enableCRDebugMetadata", v1alpha1.NodePhaseNotYetStarted, v1alpha1.NodePhaseSucceeded, true, v1alpha1.NodePhaseQueued, interfaces.NodePhasePending, false, true}, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { @@ -684,12 +688,14 @@ func TestNodeExecutor_RecursiveNodeHandler_Recurse(t *testing.T) { h.OnFinalizeRequired().Return(false) hf.OnGetHandler(v1alpha1.NodeKindTask).Return(h, nil) - mockWf, _ := setupNodePhase(test.parentNodePhase, test.currentNodePhase, test.expectedNodePhase) + mockWf, _ := setupNodePhase(test.parentNodePhase, test.currentNodePhase, test.expectedNodePhase, test.enableCRDebugMetadata) startNode := mockWf.StartNode() store := createInmemoryDataStore(t, promutils.NewTestScope()) adminClient := launchplan.NewFailFastLaunchPlanExecutor() - execIface, err := NewExecutor(ctx, config.GetConfig().NodeConfig, store, enQWf, mockEventSink, adminClient, adminClient, + nodeConfig := config.GetConfig().NodeConfig + nodeConfig.EnableCRDebugMetadata = test.enableCRDebugMetadata + execIface, err := NewExecutor(ctx, nodeConfig, store, enQWf, mockEventSink, adminClient, adminClient, 10, "s3://bucket", fakeKubeClient, catalogClient, recoveryClient, eventConfig, testClusterID, signalClient, hf, promutils.NewTestScope()) assert.NoError(t, err) exec := execIface.(*recursiveNodeExecutor) @@ -1329,7 +1335,7 @@ func TestNodeExecutor_RecursiveNodeHandler_BranchNode(t *testing.T) { if test.phaseUpdateExpected { var ee *core.ExecutionError - branchTakeNodeStatus.On("UpdatePhase", v1alpha1.NodePhaseQueued, mock.Anything, mock.Anything, ee).Return() + branchTakeNodeStatus.On("UpdatePhase", v1alpha1.NodePhaseQueued, mock.Anything, mock.Anything, false, ee).Return() } leafDag := executors.NewLeafNodeDAGStructure(branchTakenNodeID, parentBranchNodeID) diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go index 0c552560a9..29de745acf 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin.go @@ -19,6 +19,7 @@ import ( stdErr "github.com/flyteorg/flyte/flytestdlib/errors" "github.com/flyteorg/flyte/flytestdlib/logger" "github.com/flyteorg/flyte/flytestdlib/promutils" + "github.com/flyteorg/flyte/flytestdlib/storage" ) var isRecovery = true @@ -33,6 +34,7 @@ func IsWorkflowTerminated(p core.WorkflowExecution_Phase) bool { type adminLaunchPlanExecutor struct { adminClient service.AdminServiceClient cache cache.AutoRefresh + store *storage.DataStore } type executionCacheItem struct { @@ -146,7 +148,7 @@ func (a *adminLaunchPlanExecutor) Launch(ctx context.Context, launchCtx LaunchCo _, err = a.cache.GetOrCreate(executionID.String(), executionCacheItem{WorkflowExecutionIdentifier: *executionID}) if err != nil { - logger.Info(ctx, "Failed to add ExecID [%v] to auto refresh cache", executionID) + logger.Infof(ctx, "Failed to add ExecID [%v] to auto refresh cache", executionID) } return nil @@ -258,7 +260,7 @@ func (a *adminLaunchPlanExecutor) syncItem(ctx context.Context, batch cache.Batc continue } - var outputs *core.LiteralMap + var outputs = &core.LiteralMap{} // Retrieve potential outputs only when the workflow succeeded. // TODO: We can optimize further by only retrieving the outputs when the workflow has output variables in the // interface. @@ -266,21 +268,31 @@ func (a *adminLaunchPlanExecutor) syncItem(ctx context.Context, batch cache.Batc execData, err := a.adminClient.GetExecutionData(ctx, &admin.WorkflowExecutionGetDataRequest{ Id: &exec.WorkflowExecutionIdentifier, }) + if err != nil || execData.GetFullOutputs() == nil || execData.GetFullOutputs().GetLiterals() == nil { + outputURI := res.GetClosure().GetOutputs().GetUri() + // attempt remote storage read on GetExecutionData failure + if outputURI != "" { + err = a.store.ReadProtobuf(ctx, storage.DataReference(outputURI), outputs) + if err != nil { + logger.Errorf(ctx, "Failed to read outputs from URI [%s] with err: %v", outputURI, err) + } + } + if err != nil { + resp = append(resp, cache.ItemSyncResponse{ + ID: obj.GetID(), + Item: executionCacheItem{ + WorkflowExecutionIdentifier: exec.WorkflowExecutionIdentifier, + SyncError: err, + }, + Action: cache.Update, + }) + + continue + } - if err != nil { - resp = append(resp, cache.ItemSyncResponse{ - ID: obj.GetID(), - Item: executionCacheItem{ - WorkflowExecutionIdentifier: exec.WorkflowExecutionIdentifier, - SyncError: err, - }, - Action: cache.Update, - }) - - continue + } else { + outputs = execData.GetFullOutputs() } - - outputs = execData.GetFullOutputs() } // Update the cache with the retrieved status @@ -299,9 +311,10 @@ func (a *adminLaunchPlanExecutor) syncItem(ctx context.Context, batch cache.Batc } func NewAdminLaunchPlanExecutor(_ context.Context, client service.AdminServiceClient, - syncPeriod time.Duration, cfg *AdminConfig, scope promutils.Scope) (FlyteAdmin, error) { + syncPeriod time.Duration, cfg *AdminConfig, scope promutils.Scope, store *storage.DataStore) (FlyteAdmin, error) { exec := &adminLaunchPlanExecutor{ adminClient: client, + store: store, } rateLimiter := &workqueue.BucketRateLimiter{Limiter: rate.NewLimiter(rate.Limit(cfg.TPS), cfg.Burst)} diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go index 0af8d1eb16..1f359c1d6a 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/launchplan/admin_test.go @@ -12,11 +12,16 @@ import ( "google.golang.org/grpc/status" "github.com/flyteorg/flyte/flyteidl/clients/go/admin/mocks" + "github.com/flyteorg/flyte/flyteidl/clients/go/coreutils" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/admin" "github.com/flyteorg/flyte/flyteidl/gen/pb-go/flyteidl/core" "github.com/flyteorg/flyte/flytestdlib/cache" mocks2 "github.com/flyteorg/flyte/flytestdlib/cache/mocks" + "github.com/flyteorg/flyte/flytestdlib/contextutils" "github.com/flyteorg/flyte/flytestdlib/promutils" + "github.com/flyteorg/flyte/flytestdlib/promutils/labeled" + "github.com/flyteorg/flyte/flytestdlib/storage" + storageMocks "github.com/flyteorg/flyte/flytestdlib/storage/mocks" ) func TestAdminLaunchPlanExecutor_GetStatus(t *testing.T) { @@ -28,9 +33,12 @@ func TestAdminLaunchPlanExecutor_GetStatus(t *testing.T) { } var result *admin.ExecutionClosure + memStore, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) + assert.NoError(t, err) + t.Run("happy", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope(), memStore) assert.NoError(t, err) mockClient.On("GetExecution", ctx, @@ -42,25 +50,6 @@ func TestAdminLaunchPlanExecutor_GetStatus(t *testing.T) { assert.Equal(t, result, s) }) - t.Run("terminal-sync", func(t *testing.T) { - mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope()) - assert.NoError(t, err) - iwMock := &mocks2.ItemWrapper{} - i := executionCacheItem{ExecutionClosure: &admin.ExecutionClosure{Phase: core.WorkflowExecution_SUCCEEDED, WorkflowId: &core.Identifier{Project: "p"}}} - iwMock.OnGetItem().Return(i) - iwMock.OnGetID().Return("id") - adminExec := exec.(*adminLaunchPlanExecutor) - v, err := adminExec.syncItem(ctx, cache.Batch{ - iwMock, - }) - assert.NoError(t, err) - assert.NotNil(t, v) - assert.Len(t, v, 1) - assert.Equal(t, v[0].ID, "id") - assert.Equal(t, v[0].Item, i) - }) - t.Run("notFound", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} @@ -76,7 +65,7 @@ func TestAdminLaunchPlanExecutor_GetStatus(t *testing.T) { mock.MatchedBy(func(o *admin.WorkflowExecutionGetRequest) bool { return true }), ).Return(nil, status.Error(codes.NotFound, "")) - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope(), memStore) assert.NoError(t, err) assert.NoError(t, exec.Initialize(ctx)) @@ -122,7 +111,7 @@ func TestAdminLaunchPlanExecutor_GetStatus(t *testing.T) { mock.MatchedBy(func(o *admin.WorkflowExecutionGetRequest) bool { return true }), ).Return(nil, status.Error(codes.Canceled, "")) - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope(), memStore) assert.NoError(t, err) assert.NoError(t, exec.Initialize(ctx)) @@ -162,11 +151,13 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { Domain: "d", Project: "p", } + memStore, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) + assert.NoError(t, err) t.Run("happy", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) mockClient.On("CreateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionCreateRequest) bool { @@ -204,7 +195,7 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { Name: "orig", }, } - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) mockClient.On("RecoverExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionRecoverRequest) bool { @@ -240,7 +231,7 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { Name: "orig", }, } - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) assert.NoError(t, err) recoveryErr := status.Error(codes.NotFound, "foo") @@ -282,7 +273,7 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { t.Run("notFound", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) mockClient.On("CreateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionCreateRequest) bool { return true }), @@ -310,7 +301,7 @@ func TestAdminLaunchPlanExecutor_Launch(t *testing.T) { t.Run("other", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) mockClient.On("CreateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionCreateRequest) bool { return true }), @@ -343,12 +334,14 @@ func TestAdminLaunchPlanExecutor_Kill(t *testing.T) { Domain: "d", Project: "p", } + memStore, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) + assert.NoError(t, err) const reason = "reason" t.Run("happy", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) mockClient.On("TerminateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.Id == id && o.Cause == reason }), @@ -361,7 +354,7 @@ func TestAdminLaunchPlanExecutor_Kill(t *testing.T) { t.Run("notFound", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) mockClient.On("TerminateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.Id == id && o.Cause == reason }), @@ -374,7 +367,7 @@ func TestAdminLaunchPlanExecutor_Kill(t *testing.T) { t.Run("other", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) mockClient.On("TerminateExecution", ctx, mock.MatchedBy(func(o *admin.ExecutionTerminateRequest) bool { return o.Id == id && o.Cause == reason }), @@ -395,10 +388,12 @@ func TestNewAdminLaunchPlanExecutor_GetLaunchPlan(t *testing.T) { Project: "p", Version: "v", } + memStore, err := storage.NewDataStore(&storage.Config{Type: storage.TypeMemory}, promutils.NewTestScope()) + assert.NoError(t, err) t.Run("launch plan found", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) assert.NoError(t, err) mockClient.OnGetLaunchPlanMatch( ctx, @@ -411,7 +406,7 @@ func TestNewAdminLaunchPlanExecutor_GetLaunchPlan(t *testing.T) { t.Run("launch plan not found", func(t *testing.T) { mockClient := &mocks.AdminServiceClient{} - exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope()) + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Second, defaultAdminConfig, promutils.NewTestScope(), memStore) assert.NoError(t, err) mockClient.OnGetLaunchPlanMatch( ctx, @@ -423,6 +418,165 @@ func TestNewAdminLaunchPlanExecutor_GetLaunchPlan(t *testing.T) { }) } +type test struct { + name string + cacheItem executionCacheItem + getExecutionResp *admin.Execution + getExecutionError error + getExecutionDataResp *admin.WorkflowExecutionGetDataResponse + getExecutionDataError error + storageReadError error + expectSuccess bool + expectError bool + expectedOutputs *core.LiteralMap + expectedErrorContains string +} + +func TestAdminLaunchPlanExecutorScenarios(t *testing.T) { + ctx := context.TODO() + + mockExecutionRespWithOutputs := &admin.Execution{ + Closure: &admin.ExecutionClosure{ + Phase: core.WorkflowExecution_SUCCEEDED, + OutputResult: &admin.ExecutionClosure_Outputs{ + Outputs: &admin.LiteralMapBlob{ + Data: &admin.LiteralMapBlob_Uri{ + Uri: "s3://foo/bar", + }, + }, + }, + }, + } + mockExecutionRespWithoutOutputs := &admin.Execution{ + Closure: &admin.ExecutionClosure{ + Phase: core.WorkflowExecution_SUCCEEDED, + }, + } + outputLiteral := &core.LiteralMap{ + Literals: map[string]*core.Literal{ + "foo-1": coreutils.MustMakeLiteral("foo-value-1"), + }, + } + + tests := []test{ + { + name: "terminal-sync", + expectSuccess: true, + cacheItem: executionCacheItem{ + ExecutionClosure: &admin.ExecutionClosure{ + Phase: core.WorkflowExecution_SUCCEEDED, + WorkflowId: &core.Identifier{Project: "p"}}, + ExecutionOutputs: outputLiteral, + }, + expectedOutputs: outputLiteral, + }, + { + name: "GetExecution-fails", + expectError: true, + cacheItem: executionCacheItem{}, + getExecutionError: status.Error(codes.NotFound, ""), + expectedErrorContains: RemoteErrorNotFound, + }, + { + name: "GetExecution-fails-system", + expectError: true, + cacheItem: executionCacheItem{}, + getExecutionError: status.Error(codes.Internal, ""), + expectedErrorContains: RemoteErrorSystem, + }, + { + name: "GetExecutionData-succeeds", + expectSuccess: true, + cacheItem: executionCacheItem{}, + expectedOutputs: outputLiteral, + getExecutionDataResp: &admin.WorkflowExecutionGetDataResponse{ + FullOutputs: outputLiteral, + }, + getExecutionDataError: nil, + getExecutionResp: mockExecutionRespWithOutputs, + }, + { + name: "GetExecutionData-error-no-retry", + expectError: true, + cacheItem: executionCacheItem{}, + getExecutionDataError: status.Error(codes.NotFound, ""), + expectedErrorContains: codes.NotFound.String(), + getExecutionResp: mockExecutionRespWithoutOutputs, + }, + { + name: "GetExecutionData-error-retry-fails", + expectError: true, + cacheItem: executionCacheItem{}, + getExecutionDataError: status.Error(codes.NotFound, ""), + storageReadError: status.Error(codes.Internal, ""), + expectedErrorContains: codes.Internal.String(), + getExecutionResp: mockExecutionRespWithOutputs, + }, + { + name: "GetExecutionData-empty-retry-fails", + expectError: true, + cacheItem: executionCacheItem{}, + getExecutionDataResp: &admin.WorkflowExecutionGetDataResponse{ + FullOutputs: &core.LiteralMap{}, + }, + getExecutionDataError: nil, + storageReadError: status.Error(codes.Internal, ""), + expectedErrorContains: codes.Internal.String(), + getExecutionResp: mockExecutionRespWithOutputs, + }, + { + name: "GetExecutionData-empty-retry-succeeds", + expectSuccess: true, + cacheItem: executionCacheItem{}, + getExecutionDataResp: &admin.WorkflowExecutionGetDataResponse{ + FullOutputs: &core.LiteralMap{}, + }, + getExecutionDataError: nil, + expectedOutputs: &core.LiteralMap{}, + getExecutionResp: mockExecutionRespWithOutputs, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + mockClient := &mocks.AdminServiceClient{} + pbStore := &storageMocks.ComposedProtobufStore{} + pbStore.On("ReadProtobuf", mock.Anything, mock.Anything, mock.Anything).Return(tc.storageReadError) + storageClient := &storage.DataStore{ + ComposedProtobufStore: pbStore, + ReferenceConstructor: &storageMocks.ReferenceConstructor{}, + } + exec, err := NewAdminLaunchPlanExecutor(ctx, mockClient, time.Millisecond, defaultAdminConfig, promutils.NewTestScope(), storageClient) + assert.NoError(t, err) + + iwMock := &mocks2.ItemWrapper{} + i := tc.cacheItem + iwMock.OnGetItem().Return(i) + iwMock.OnGetID().Return("id") + + mockClient.On("GetExecution", mock.Anything, mock.Anything).Return(tc.getExecutionResp, tc.getExecutionError) + mockClient.On("GetExecutionData", mock.Anything, mock.Anything).Return(tc.getExecutionDataResp, tc.getExecutionDataError) + + adminExec := exec.(*adminLaunchPlanExecutor) + + v, err := adminExec.syncItem(ctx, cache.Batch{iwMock}) + assert.NoError(t, err) + assert.Len(t, v, 1) + item, ok := v[0].Item.(executionCacheItem) + assert.True(t, ok) + + if tc.expectSuccess { + assert.Nil(t, item.SyncError) + assert.Equal(t, tc.expectedOutputs, item.ExecutionOutputs) + } + if tc.expectError { + assert.NotNil(t, item.SyncError) + assert.Contains(t, item.SyncError.Error(), tc.expectedErrorContains) + } + }) + } +} + func TestIsWorkflowTerminated(t *testing.T) { assert.True(t, IsWorkflowTerminated(core.WorkflowExecution_SUCCEEDED)) assert.True(t, IsWorkflowTerminated(core.WorkflowExecution_ABORTED)) @@ -434,3 +588,7 @@ func TestIsWorkflowTerminated(t *testing.T) { assert.False(t, IsWorkflowTerminated(core.WorkflowExecution_QUEUED)) assert.False(t, IsWorkflowTerminated(core.WorkflowExecution_UNDEFINED)) } + +func init() { + labeled.SetMetricKeys(contextutils.ProjectKey, contextutils.DomainKey, contextutils.WorkflowIDKey, contextutils.TaskIDKey) +} diff --git a/flytepropeller/pkg/controller/nodes/subworkflow/subworkflow.go b/flytepropeller/pkg/controller/nodes/subworkflow/subworkflow.go index ee90fe581e..d71f0b7e20 100644 --- a/flytepropeller/pkg/controller/nodes/subworkflow/subworkflow.go +++ b/flytepropeller/pkg/controller/nodes/subworkflow/subworkflow.go @@ -88,6 +88,25 @@ func (s *subworkflowHandler) handleSubWorkflow(ctx context.Context, nCtx interfa return handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoRunning(nil)), nil } + if state.HasTimedOut() { + workflowNodeState := handler.WorkflowNodeState{ + Phase: v1alpha1.WorkflowNodePhaseFailing, + Error: &core.ExecutionError{ + Kind: core.ExecutionError_USER, + Code: "Timeout", + Message: "Timeout in node", + }, + } + + err = nCtx.NodeStateWriter().PutWorkflowNodeState(workflowNodeState) + if err != nil { + logger.Warnf(ctx, "failed to store failing subworkflow state with err: [%v]", err) + return handler.UnknownTransition, err + } + + return handler.DoTransition(handler.TransitionTypeEphemeral, handler.PhaseInfoRunning(nil)), nil + } + if state.IsComplete() { // If the WF interface has outputs, validate that the outputs file was written. var oInfo *handler.OutputInfo diff --git a/flytepropeller/pkg/controller/nodes/task/backoff/handler.go b/flytepropeller/pkg/controller/nodes/task/backoff/handler.go index 757c2420f8..fc890c7a09 100644 --- a/flytepropeller/pkg/controller/nodes/task/backoff/handler.go +++ b/flytepropeller/pkg/controller/nodes/task/backoff/handler.go @@ -49,7 +49,7 @@ func (b *SimpleBackOffBlocker) reset() { } func (b *SimpleBackOffBlocker) backOff(ctx context.Context) time.Duration { - logger.Debug(ctx, "BackOff params [BackOffBaseSecond: %v] [BackOffExponent: %v] [MaxBackOffDuration: %v]", + logger.Debugf(ctx, "BackOff params [BackOffBaseSecond: %v] [BackOffExponent: %v] [MaxBackOffDuration: %v]", b.BackOffBaseSecond, b.BackOffExponent, b.MaxBackOffDuration) backOffDuration := time.Duration(time.Second.Nanoseconds() * int64(math.Pow(float64(b.BackOffBaseSecond), diff --git a/flytepropeller/pkg/controller/nodes/task/k8s/plugin_manager_test.go b/flytepropeller/pkg/controller/nodes/task/k8s/plugin_manager_test.go index a96536e7ac..2c7fda0f6e 100644 --- a/flytepropeller/pkg/controller/nodes/task/k8s/plugin_manager_test.go +++ b/flytepropeller/pkg/controller/nodes/task/k8s/plugin_manager_test.go @@ -200,6 +200,7 @@ func getMockTaskExecutionMetadata() pluginsCore.TaskExecutionMetadata { taskExecutionMetadata.On("GetAnnotations").Return(map[string]string{"aKey": "aVal"}) taskExecutionMetadata.On("GetLabels").Return(map[string]string{"lKey": "lVal"}) taskExecutionMetadata.On("GetOwnerReference").Return(metav1.OwnerReference{Name: "x"}) + taskExecutionMetadata.On("GetSecurityContext").Return(core.SecurityContext{RunAs: &core.Identity{}}) id := &pluginsCoreMock.TaskExecutionID{} id.On("GetGeneratedName").Return("test") diff --git a/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go b/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go index 14984e3f97..17bbce5398 100644 --- a/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go +++ b/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context.go @@ -7,6 +7,8 @@ import ( "github.com/flyteorg/flyte/flyteplugins/go/tasks/pluginmachinery/utils/secrets" ) +const executionIdentityVariable = "execution-identity" + // TaskExecutionContext provides a layer on top of core TaskExecutionContext with a custom TaskExecutionMetadata. type TaskExecutionContext struct { pluginsCore.TaskExecutionContext @@ -42,25 +44,28 @@ func (t TaskExecutionMetadata) GetAnnotations() map[string]string { } // newTaskExecutionMetadata creates a TaskExecutionMetadata with secrets serialized as annotations and a label added -// to trigger the flyte pod webhook +// to trigger the flyte pod webhook. If known, the execution identity is injected as a label. func newTaskExecutionMetadata(tCtx pluginsCore.TaskExecutionMetadata, taskTmpl *core.TaskTemplate) (TaskExecutionMetadata, error) { var err error secretsMap := make(map[string]string) - injectSecretsLabel := make(map[string]string) + injectLabels := make(map[string]string) if taskTmpl.SecurityContext != nil && len(taskTmpl.SecurityContext.Secrets) > 0 { secretsMap, err = secrets.MarshalSecretsToMapStrings(taskTmpl.SecurityContext.Secrets) if err != nil { return TaskExecutionMetadata{}, err } - injectSecretsLabel = map[string]string{ - secrets.PodLabel: secrets.PodLabelValue, - } + injectLabels[secrets.PodLabel] = secrets.PodLabelValue + } + + id := tCtx.GetSecurityContext().RunAs.ExecutionIdentity + if len(id) > 0 { + injectLabels[executionIdentityVariable] = id } return TaskExecutionMetadata{ TaskExecutionMetadata: tCtx, annotations: utils.UnionMaps(tCtx.GetAnnotations(), secretsMap), - labels: utils.UnionMaps(tCtx.GetLabels(), injectSecretsLabel), + labels: utils.UnionMaps(tCtx.GetLabels(), injectLabels), }, nil } diff --git a/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context_test.go b/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context_test.go index 8807236bff..bf9ca1eadb 100644 --- a/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context_test.go +++ b/flytepropeller/pkg/controller/nodes/task/k8s/task_exec_context_test.go @@ -21,6 +21,7 @@ func Test_newTaskExecutionMetadata(t *testing.T) { "existingLabel": "existingLabelValue", } existingMetadata.OnGetLabels().Return(existingLabels) + existingMetadata.OnGetSecurityContext().Return(core.SecurityContext{RunAs: &core.Identity{}}) actual, err := newTaskExecutionMetadata(existingMetadata, &core.TaskTemplate{}) assert.NoError(t, err) @@ -40,6 +41,7 @@ func Test_newTaskExecutionMetadata(t *testing.T) { "existingLabel": "existingLabelValue", } existingMetadata.OnGetLabels().Return(existingLabels) + existingMetadata.OnGetSecurityContext().Return(core.SecurityContext{RunAs: &core.Identity{}}) actual, err := newTaskExecutionMetadata(existingMetadata, &core.TaskTemplate{ SecurityContext: &core.SecurityContext{ @@ -64,6 +66,26 @@ func Test_newTaskExecutionMetadata(t *testing.T) { "inject-flyte-secrets": "true", }, actual.GetLabels()) }) + + t.Run("Inject exec identity", func(t *testing.T) { + + existingMetadata := &mocks.TaskExecutionMetadata{} + existingAnnotations := map[string]string{} + existingMetadata.OnGetAnnotations().Return(existingAnnotations) + + existingMetadata.OnGetSecurityContext().Return(core.SecurityContext{RunAs: &core.Identity{ExecutionIdentity: "test-exec-identity"}}) + + existingLabels := map[string]string{ + "existingLabel": "existingLabelValue", + } + existingMetadata.OnGetLabels().Return(existingLabels) + + actual, err := newTaskExecutionMetadata(existingMetadata, &core.TaskTemplate{}) + assert.NoError(t, err) + + assert.Equal(t, 2, len(actual.GetLabels())) + assert.Equal(t, "test-exec-identity", actual.GetLabels()[executionIdentityVariable]) + }) } func Test_newTaskExecutionContext(t *testing.T) { diff --git a/flytepropeller/pkg/controller/nodes/task/taskexec_context.go b/flytepropeller/pkg/controller/nodes/task/taskexec_context.go index 4a6f9750a2..25b936a8e4 100644 --- a/flytepropeller/pkg/controller/nodes/task/taskexec_context.go +++ b/flytepropeller/pkg/controller/nodes/task/taskexec_context.go @@ -197,14 +197,12 @@ func convertTaskResourcesToRequirements(taskResources v1alpha1.TaskResources) *v v1.ResourceCPU: taskResources.Requests.CPU, v1.ResourceMemory: taskResources.Requests.Memory, v1.ResourceEphemeralStorage: taskResources.Requests.EphemeralStorage, - v1.ResourceStorage: taskResources.Requests.Storage, utils.ResourceNvidiaGPU: taskResources.Requests.GPU, }, Limits: v1.ResourceList{ v1.ResourceCPU: taskResources.Limits.CPU, v1.ResourceMemory: taskResources.Limits.Memory, v1.ResourceEphemeralStorage: taskResources.Limits.EphemeralStorage, - v1.ResourceStorage: taskResources.Limits.Storage, utils.ResourceNvidiaGPU: taskResources.Limits.GPU, }, } diff --git a/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go b/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go index b0106e8ff9..84245cea8f 100644 --- a/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go +++ b/flytepropeller/pkg/controller/nodes/task/taskexec_context_test.go @@ -381,14 +381,12 @@ func TestConvertTaskResourcesToRequirements(t *testing.T) { CPU: resource.MustParse("1"), Memory: resource.MustParse("2"), EphemeralStorage: resource.MustParse("3"), - Storage: resource.MustParse("4"), GPU: resource.MustParse("5"), }, Limits: v1alpha1.TaskResourceSpec{ CPU: resource.MustParse("10"), Memory: resource.MustParse("20"), EphemeralStorage: resource.MustParse("30"), - Storage: resource.MustParse("40"), GPU: resource.MustParse("50"), }, }) @@ -397,14 +395,12 @@ func TestConvertTaskResourcesToRequirements(t *testing.T) { corev1.ResourceCPU: resource.MustParse("1"), corev1.ResourceMemory: resource.MustParse("2"), corev1.ResourceEphemeralStorage: resource.MustParse("3"), - corev1.ResourceStorage: resource.MustParse("4"), utils.ResourceNvidiaGPU: resource.MustParse("5"), }, Limits: corev1.ResourceList{ corev1.ResourceCPU: resource.MustParse("10"), corev1.ResourceMemory: resource.MustParse("20"), corev1.ResourceEphemeralStorage: resource.MustParse("30"), - corev1.ResourceStorage: resource.MustParse("40"), utils.ResourceNvidiaGPU: resource.MustParse("50"), }, }, resourceRequirements) diff --git a/flytepropeller/pkg/controller/nodes/transformers.go b/flytepropeller/pkg/controller/nodes/transformers.go index 8c0db1e57a..eb96a38d9c 100644 --- a/flytepropeller/pkg/controller/nodes/transformers.go +++ b/flytepropeller/pkg/controller/nodes/transformers.go @@ -69,6 +69,8 @@ func ToNodeExecEventPhase(p handler.EPhase) core.NodeExecution_Phase { return core.NodeExecution_FAILED case handler.EPhaseRecovered: return core.NodeExecution_RECOVERED + case handler.EPhaseTimedout: + return core.NodeExecution_TIMED_OUT default: return core.NodeExecution_UNDEFINED } @@ -228,10 +230,10 @@ func ToK8sTime(t time.Time) v1.Time { return v1.Time{Time: t} } -func UpdateNodeStatus(np v1alpha1.NodePhase, p handler.PhaseInfo, n interfaces.NodeStateReader, s v1alpha1.ExecutableNodeStatus) { +func UpdateNodeStatus(np v1alpha1.NodePhase, p handler.PhaseInfo, n interfaces.NodeStateReader, s v1alpha1.ExecutableNodeStatus, enableCRDebugMetadata bool) { // We update the phase and / or reason only if they are not already updated if np != s.GetPhase() || p.GetReason() != s.GetMessage() { - s.UpdatePhase(np, ToK8sTime(p.GetOccurredAt()), p.GetReason(), p.GetErr()) + s.UpdatePhase(np, ToK8sTime(p.GetOccurredAt()), p.GetReason(), enableCRDebugMetadata, p.GetErr()) } // Update TaskStatus if n.HasTaskNodeState() { diff --git a/flytepropeller/pull_request_template.md b/flytepropeller/pull_request_template.md deleted file mode 100644 index 9cdab99b46..0000000000 --- a/flytepropeller/pull_request_template.md +++ /dev/null @@ -1,35 +0,0 @@ -## _Read then delete this section_ - -_- Make sure to use a concise title for the pull-request._ - -_- Use #patch, #minor or #major in the pull-request title to bump the corresponding version. Otherwise, the patch version -will be bumped. [More details](https://github.com/marketplace/actions/github-tag-bump)_ - -# TL;DR -_Please replace this text with a description of what this PR accomplishes._ - -## Type - - [ ] Bug Fix - - [ ] Feature - - [ ] Plugin - -## Are all requirements met? - - - [ ] Code completed - - [ ] Smoke tested - - [ ] Unit tests added - - [ ] Code documentation added - - [ ] Any pending items have an associated Issue - -## Complete description - _How did you fix the bug, make the feature etc. Link to any design docs etc_ - -## Tracking Issue -_Remove the '*fixes*' keyword if there will be multiple PRs to fix the linked issue_ - -fixes https://github.com/flyteorg/flyte/issues/ - -## Follow-up issue -_NA_ -OR -_https://github.com/flyteorg/flyte/issues/_ diff --git a/flytestdlib/database/db.go b/flytestdlib/database/db.go index fe884c75f4..8046c7ead4 100644 --- a/flytestdlib/database/db.go +++ b/flytestdlib/database/db.go @@ -107,17 +107,12 @@ func withDB(ctx context.Context, databaseConfig *DbConfig, do func(db *gorm.DB) } // Migrate runs all configured migrations -func Migrate(ctx context.Context, databaseConfig *DbConfig, migrations []*gormigrate.Migration, initializationSQL string) error { +func Migrate(ctx context.Context, databaseConfig *DbConfig, migrations []*gormigrate.Migration) error { if len(migrations) == 0 { logger.Infof(ctx, "No migrations to run") return nil } return withDB(ctx, databaseConfig, func(db *gorm.DB) error { - tx := db.Exec(initializationSQL) - if tx.Error != nil { - return tx.Error - } - m := gormigrate.New(db, gormigrate.DefaultOptions, migrations) if err := m.Migrate(); err != nil { return fmt.Errorf("database migration failed: %v", err) diff --git a/flytestdlib/database/postgres.go b/flytestdlib/database/postgres.go index be5c118e58..ec43330af1 100644 --- a/flytestdlib/database/postgres.go +++ b/flytestdlib/database/postgres.go @@ -62,6 +62,7 @@ func CreatePostgresDbIfNotExists(ctx context.Context, gormConfig *gorm.Config, p return gormDb, nil } if !IsPgErrorWithCode(err, PqInvalidDBCode) { + logger.Errorf(ctx, "Unhandled error connecting to postgres, pg [%v], gorm [%v]: %v", pgConfig, gormConfig, err) return nil, err } logger.Warningf(ctx, "Database [%v] does not exist", pgConfig.DbName) diff --git a/flytestdlib/go.mod b/flytestdlib/go.mod index 16d0fe3b06..947fbe4b5c 100644 --- a/flytestdlib/go.mod +++ b/flytestdlib/go.mod @@ -147,7 +147,6 @@ replace ( github.com/flyteorg/flyte/flyteplugins => ../flyteplugins github.com/flyteorg/flyte/flytepropeller => ../flytepropeller github.com/flyteorg/flyte/flytestdlib => ../flytestdlib - github.com/flyteorg/flyteidl => ../flyteidl k8s.io/api => k8s.io/api v0.28.2 k8s.io/apimachinery => k8s.io/apimachinery v0.28.2 k8s.io/client-go => k8s.io/client-go v0.28.2 diff --git a/flytestdlib/pull_request_template.md b/flytestdlib/pull_request_template.md deleted file mode 100644 index 9cdab99b46..0000000000 --- a/flytestdlib/pull_request_template.md +++ /dev/null @@ -1,35 +0,0 @@ -## _Read then delete this section_ - -_- Make sure to use a concise title for the pull-request._ - -_- Use #patch, #minor or #major in the pull-request title to bump the corresponding version. Otherwise, the patch version -will be bumped. [More details](https://github.com/marketplace/actions/github-tag-bump)_ - -# TL;DR -_Please replace this text with a description of what this PR accomplishes._ - -## Type - - [ ] Bug Fix - - [ ] Feature - - [ ] Plugin - -## Are all requirements met? - - - [ ] Code completed - - [ ] Smoke tested - - [ ] Unit tests added - - [ ] Code documentation added - - [ ] Any pending items have an associated Issue - -## Complete description - _How did you fix the bug, make the feature etc. Link to any design docs etc_ - -## Tracking Issue -_Remove the '*fixes*' keyword if there will be multiple PRs to fix the linked issue_ - -fixes https://github.com/flyteorg/flyte/issues/ - -## Follow-up issue -_NA_ -OR -_https://github.com/flyteorg/flyte/issues/_ diff --git a/flytestdlib/random/weighted_random_list.go b/flytestdlib/random/weighted_random_list.go index 7a0d44bbaa..d9e23943a4 100644 --- a/flytestdlib/random/weighted_random_list.go +++ b/flytestdlib/random/weighted_random_list.go @@ -84,7 +84,7 @@ func NewWeightedRandom(ctx context.Context, entries []Entry) (WeightedRandomList currentTotal += 1.0 / float32(numberOfEntries) } else if e.Weight == 0 { // Entries which have zero weight are ignored - logger.Debug(ctx, "ignoring entry due to empty weight %v", e) + logger.Debugf(ctx, "ignoring entry due to empty weight %v", e) continue } diff --git a/flytestdlib/storage/protobuf_store.go b/flytestdlib/storage/protobuf_store.go index 0fb5bdc5c2..cf5a0c790c 100644 --- a/flytestdlib/storage/protobuf_store.go +++ b/flytestdlib/storage/protobuf_store.go @@ -46,7 +46,7 @@ func (s DefaultProtobufStore) ReadProtobuf(ctx context.Context, reference DataRe defer func() { err = rc.Close() if err != nil { - logger.Warn(ctx, "Failed to close reference [%v]. Error: %v", reference, err) + logger.Warnf(ctx, "Failed to close reference [%v]. Error: %v", reference, err) } }() diff --git a/kustomize/overlays/gcp/kustomization.yaml b/kustomize/overlays/gcp/kustomization.yaml index f264946c08..e8dc107ffa 100644 --- a/kustomize/overlays/gcp/kustomization.yaml +++ b/kustomize/overlays/gcp/kustomization.yaml @@ -23,7 +23,7 @@ bases: images: # FlyteAdmin - name: flyteadmin # match images with this name - newTag: v1.9.37 # FLYTEADMIN_TAG override the tag + newTag: v1.10.7-b2 # FLYTEADMIN_TAG override the tag newName: cr.flyte.org/flyteorg/flyteadmin # override the name # FlyteConsole - name: flyteconsole # match images with this name @@ -31,15 +31,15 @@ images: newName: cr.flyte.org/flyteorg/flyteconsole # override the namep # Flyte DataCatalog - name: datacatalog # match images with this name - newTag: v1.9.37 # DATACATALOG_TAG override the tag + newTag: v1.10.7-b2 # DATACATALOG_TAG override the tag newName: cr.flyte.org/flyteorg/datacatalog # override the name # FlytePropeller - name: flytepropeller # match images with this name - newTag: v1.9.37 # FLYTEPROPELLER_TAG override the tag + newTag: v1.10.7-b2 # FLYTEPROPELLER_TAG override the tag newName: cr.flyte.org/flyteorg/flytepropeller # override the name # Webhook - name: webhook # match images with this name - newTag: v1.9.37 # FLYTEPROPELLER_TAG override the tag + newTag: v1.10.7-b2 # FLYTEPROPELLER_TAG override the tag newName: cr.flyte.org/flyteorg/flytepropeller # override the name # Override postgres image to use alpine based (rather smaller) docker image - name: postgres diff --git a/monodocs-environment.lock.yaml b/monodocs-environment.lock.yaml index 4efaa5fe6c..d0b4d14608 100644 --- a/monodocs-environment.lock.yaml +++ b/monodocs-environment.lock.yaml @@ -49,27 +49,27 @@ package: category: main optional: false - name: absl-py - version: 2.0.0 + version: 2.1.0 manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.0.0-pyhd8ed1ab_0.conda + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.1.0-pyhd8ed1ab_0.conda hash: - md5: 2361c56617987bb6623dcbf8bab5cdc4 - sha256: 99e2a54d079d91909d6d12d2336d792fd2ab666620d2ac761cd09d0e552dd44d + md5: 035d1d58677c13ec93122d9eb6b8803b + sha256: 6c84575fe0c3a860c7b6a52cb36dc548c838503c8da0f950a63a64c29b443937 category: main optional: false - name: absl-py - version: 2.0.0 + version: 2.1.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.0.0-pyhd8ed1ab_0.conda + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/absl-py-2.1.0-pyhd8ed1ab_0.conda hash: - md5: 2361c56617987bb6623dcbf8bab5cdc4 - sha256: 99e2a54d079d91909d6d12d2336d792fd2ab666620d2ac761cd09d0e552dd44d + md5: 035d1d58677c13ec93122d9eb6b8803b + sha256: 6c84575fe0c3a860c7b6a52cb36dc548c838503c8da0f950a63a64c29b443937 category: main optional: false - name: adal @@ -141,23 +141,23 @@ package: category: main optional: false - name: aiobotocore - version: 2.8.0 + version: 2.9.1 manager: conda platform: linux-64 dependencies: aiohttp: '>=3.7.4.post0,<4.0.0' aioitertools: '>=0.5.1,<1.0.0' - botocore: '>=1.32.4,<1.33.2' + botocore: '>=1.33.2,<1.33.14' python: '>=3.8' wrapt: '>=1.10.10,<2.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.8.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.9.1-pyhd8ed1ab_0.conda hash: - md5: 877b0ea381ffddb7f98768b6851a2bba - sha256: a8ddcb54ba540f61f6a68b84db7c6e0a324c0dbe0ea95882ddc3f25253824977 + md5: f48c1808e5d5386427cb47cd6e59de42 + sha256: 21e75838ac262c6443dab996cf299034c9736fa7ac912f9324683771988d8a37 category: main optional: false - name: aiobotocore - version: 2.8.0 + version: 2.9.1 manager: conda platform: osx-arm64 dependencies: @@ -165,11 +165,11 @@ package: wrapt: '>=1.10.10,<2.0.0' aioitertools: '>=0.5.1,<1.0.0' aiohttp: '>=3.7.4.post0,<4.0.0' - botocore: '>=1.32.4,<1.33.2' - url: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.8.0-pyhd8ed1ab_0.conda + botocore: '>=1.33.2,<1.33.14' + url: https://conda.anaconda.org/conda-forge/noarch/aiobotocore-2.9.1-pyhd8ed1ab_0.conda hash: - md5: 877b0ea381ffddb7f98768b6851a2bba - sha256: a8ddcb54ba540f61f6a68b84db7c6e0a324c0dbe0ea95882ddc3f25253824977 + md5: f48c1808e5d5386427cb47cd6e59de42 + sha256: 21e75838ac262c6443dab996cf299034c9736fa7ac912f9324683771988d8a37 category: main optional: false - name: aiohttp @@ -264,61 +264,61 @@ package: category: main optional: false - name: alabaster - version: 0.7.13 + version: 0.7.16 manager: conda platform: linux-64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda hash: - md5: 06006184e203b61d3525f90de394471e - sha256: b2d160a050996950434c6e87a174fc01c4a937cbeffbdd20d1b46126b4478a95 + md5: def531a3ac77b7fb8c21d17bb5d0badb + sha256: fd39ad2fabec1569bbb0dfdae34ab6ce7de6ec09dcec8638f83dad0373594069 category: main optional: false - name: alabaster - version: 0.7.13 + version: 0.7.16 manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda + python: '>=3.9' + url: https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.16-pyhd8ed1ab_0.conda hash: - md5: 06006184e203b61d3525f90de394471e - sha256: b2d160a050996950434c6e87a174fc01c4a937cbeffbdd20d1b46126b4478a95 + md5: def531a3ac77b7fb8c21d17bb5d0badb + sha256: fd39ad2fabec1569bbb0dfdae34ab6ce7de6ec09dcec8638f83dad0373594069 category: main optional: false - name: alembic - version: 1.13.0 + version: 1.13.1 manager: conda platform: linux-64 dependencies: importlib-metadata: '' importlib_resources: '' mako: '' - python: '>=3.7' + python: '>=3.8' sqlalchemy: '>=1.3.0' typing-extensions: '>=4' - url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.13.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.13.1-pyhd8ed1ab_1.conda hash: - md5: 7f0372c1cfa41891787ddf267334c0c7 - sha256: d6dd010632bc5272fe7e51646651c5fb9ae9b7663113b94aa585d6bcb43834b0 + md5: 7b7b0062b0de9f3f71502d31215fcbbb + sha256: e4bc9aa5a6e866461274826bb750407a407fed9207a5adb70bf727f6addd7fe6 category: main optional: false - name: alembic - version: 1.13.0 + version: 1.13.1 manager: conda platform: osx-arm64 dependencies: importlib-metadata: '' importlib_resources: '' mako: '' - python: '>=3.7' + python: '>=3.8' sqlalchemy: '>=1.3.0' typing-extensions: '>=4' - url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.13.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/alembic-1.13.1-pyhd8ed1ab_1.conda hash: - md5: 7f0372c1cfa41891787ddf267334c0c7 - sha256: d6dd010632bc5272fe7e51646651c5fb9ae9b7663113b94aa585d6bcb43834b0 + md5: 7b7b0062b0de9f3f71502d31215fcbbb + sha256: e4bc9aa5a6e866461274826bb750407a407fed9207a5adb70bf727f6addd7fe6 category: main optional: false - name: altair @@ -384,7 +384,7 @@ package: category: main optional: false - name: anyio - version: 4.1.0 + version: 4.2.0 manager: conda platform: linux-64 dependencies: @@ -392,25 +392,27 @@ package: idna: '>=2.8' python: '>=3.8' sniffio: '>=1.1' - url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.1.0-pyhd8ed1ab_0.conda + typing_extensions: '>=4.1' + url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.2.0-pyhd8ed1ab_0.conda hash: - md5: 76a3b574717769c4c937c2afa2f1069f - sha256: d9d64b29d8a4f58a8d5f9cb0af80b70fd4e038a6e328b039899f7cd93863a82e + md5: 81ce9f3d9697b534d95118bb86c8a07e + sha256: 68458e31bdf3334f0e85f08767718ca9bc35bc2a79a6c503942ac99da98e510a category: main optional: false - name: anyio - version: 4.1.0 + version: 4.2.0 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' sniffio: '>=1.1' + typing_extensions: '>=4.1' idna: '>=2.8' exceptiongroup: '>=1.0.2' - url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/anyio-4.2.0-pyhd8ed1ab_0.conda hash: - md5: 76a3b574717769c4c937c2afa2f1069f - sha256: d9d64b29d8a4f58a8d5f9cb0af80b70fd4e038a6e328b039899f7cd93863a82e + md5: 81ce9f3d9697b534d95118bb86c8a07e + sha256: 68458e31bdf3334f0e85f08767718ca9bc35bc2a79a6c503942ac99da98e510a category: main optional: false - name: aplus @@ -718,110 +720,109 @@ package: category: main optional: false - name: attrs - version: 23.1.0 + version: 23.2.0 manager: conda platform: linux-64 dependencies: python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/attrs-23.2.0-pyh71513ae_0.conda hash: - md5: 3edfead7cedd1ab4400a6c588f3e75f8 - sha256: 063639cd568f5c7a557b0fb1cc27f098598c0d8ff869088bfeb82934674f8821 + md5: 5e4c0743c70186509d1412e03c2d8dfa + sha256: 77c7d03bdb243a048fff398cedc74327b7dc79169ebe3b4c8448b0331ea55fea category: main optional: false - name: attrs - version: 23.1.0 + version: 23.2.0 manager: conda platform: osx-arm64 dependencies: python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/attrs-23.2.0-pyh71513ae_0.conda hash: - md5: 3edfead7cedd1ab4400a6c588f3e75f8 - sha256: 063639cd568f5c7a557b0fb1cc27f098598c0d8ff869088bfeb82934674f8821 + md5: 5e4c0743c70186509d1412e03c2d8dfa + sha256: 77c7d03bdb243a048fff398cedc74327b7dc79169ebe3b4c8448b0331ea55fea category: main optional: false - name: aws-c-auth - version: 0.7.3 + version: 0.7.11 manager: conda platform: linux-64 dependencies: - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' - aws-c-sdkutils: '>=0.1.12,<0.1.13.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' + aws-c-sdkutils: '>=0.1.13,<0.1.14.0a0' libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.3-h28f7589_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.11-h0b4cabd_1.conda hash: - md5: 97503d3e565004697f1651753aa95b9e - sha256: d811d310ea601fc37f8a843e8aa7992c3286702d163472bd80bd81c7cb301f7b + md5: e9a6562446d81183d1483bb23bfc478c + sha256: ef98131dbff55f482b0af10d17aa6c478e59987661cf3c22dddb30a441986aa5 category: main optional: false - name: aws-c-auth - version: 0.7.3 + version: 0.7.11 manager: conda platform: osx-arm64 dependencies: - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' - aws-c-sdkutils: '>=0.1.12,<0.1.13.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.7.3-h109ad1a_1.conda + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' + aws-c-sdkutils: '>=0.1.13,<0.1.14.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-auth-0.7.11-ha4ce7b8_1.conda hash: - md5: 55852033dc22d5a2c00d58583447d051 - sha256: 547c24057cb8459186aa09cbc97c58eeca7c9e44d227a01457a2261e1954a36a + md5: ed467f71fac4eca9454ca1ff99be7f84 + sha256: 4b4318d4ad5cb9d3f3e141e43528e3c7f161e8f167195ff2627e6ec778bd890f category: main optional: false - name: aws-c-cal - version: 0.6.1 + version: 0.6.9 manager: conda platform: linux-64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' libgcc-ng: '>=12' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.1-hc309b26_1.conda + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.9-h14ec70c_3.conda hash: - md5: cc09293a2c2b7fd77aff284f370c12c0 - sha256: e5de0a103e6eb6597c012d67daa5d8ccb65e25e1fe22ef5cb758835276be90b9 + md5: 7da4b84275e63f56d158d6250727a70f + sha256: d4f593f586378d7544900847b16d922a10c4d92aec7add6e3cb5dbe69965ab2f category: main optional: false - name: aws-c-cal - version: 0.6.1 + version: 0.6.9 manager: conda platform: osx-arm64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.6.1-hb406d48_1.conda + aws-c-common: '>=0.9.12,<0.9.13.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-cal-0.6.9-h4fd42c2_3.conda hash: - md5: 2461f9de775b064f549d8e1c2e74c682 - sha256: 066a1933fe8c0c8c0e8a7dc11bada019813b1c09ab290cbaf904bddcd11e036d + md5: c06a837ae2f0c217141c32cb408c8b92 + sha256: dde08312c4db4e2e646e37bf5e3dc96affa0dfa87a3044821f545635cad2b440 category: main optional: false - name: aws-c-common - version: 0.9.0 + version: 0.9.12 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.0-hd590300_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.12-hd590300_0.conda hash: - md5: 71b89db63b5b504e7afc8ad901172e1e - sha256: d70c478150d2551bf7b200bfa3d7cb8a016471819a58bb7fe18a4e526dae3567 + md5: 7dbb94ffb9df66406f3101625807cac1 + sha256: 22e7c9438f2fe3c46a1747efcaae4ab3a078714ff8992a6ec3c213f50b9d6704 category: main optional: false - name: aws-c-common - version: 0.9.0 + version: 0.9.12 manager: conda platform: osx-arm64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.9.0-hb547adb_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-common-0.9.12-h93a5062_0.conda hash: - md5: 7d7f91d3daa8d7bbf1da8c97038e336f - sha256: ec15d841f362e6a4468e06117d985f7f6007568ba832416ff146d239b2f7f0c0 + md5: afe8c81d8e34a96a124640788296b02e + sha256: 75d963943aefae31ab1a02956a5ee41c0fa347da9550bd1ce57b5cbbea7ea7e6 category: main optional: false - name: aws-c-compression @@ -829,12 +830,12 @@ package: manager: conda platform: linux-64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h4d4d85c_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h572eabf_8.conda hash: - md5: 9ca99452635fe03eb5fa937f5ae604b0 - sha256: 8ff6538db97a861be404f11dbc01abe7d4bc9978df3e573af1d08e2590eb500e + md5: cc6630010cb1211cc15fb348f7c7eb70 + sha256: 0627434bcee61f94cf35d7719a395d4b7c9967f20bb877f1bd05868013a8a93c category: main optional: false - name: aws-c-compression @@ -842,191 +843,191 @@ package: manager: conda platform: osx-arm64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.2.17-he70778a_2.conda + aws-c-common: '>=0.9.12,<0.9.13.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-compression-0.2.17-h4fd42c2_8.conda hash: - md5: d7a30e85a98d14dcd2d0ca8ae8ca4372 - sha256: 0e5a93b88d77405af89586f4b776f68a3cfd1c9ed44da57ac2a6b042dc96a26c + md5: c9b738b496c34db0d27b42491eb16c23 + sha256: 0500a040f6d2838af312c26fbea6ed2a9cac54d8a74347a9c1964af8f57ff033 category: main optional: false - name: aws-c-event-stream - version: 0.3.1 + version: 0.4.1 manager: conda platform: linux-64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' aws-checksums: '>=0.1.17,<0.1.18.0a0' libgcc-ng: '>=12' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h2e3709c_4.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.1-h97bb272_2.conda hash: - md5: 2cf21b1cbc1c096a28ffa2892257a2c1 - sha256: e733a19296044d23f785f4c1c18f82efce1b032e0913af93e70dcce10de6f688 + md5: 5a16088be732d54b50c134203f712d24 + sha256: a7cb50ccb2779d2934cae3a4fcc3d032a4525b63a464d6bd23957650381d633e category: main optional: false - name: aws-c-event-stream - version: 0.3.1 + version: 0.4.1 manager: conda platform: osx-arm64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' aws-checksums: '>=0.1.17,<0.1.18.0a0' - libcxx: '>=15.0.7' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.3.1-hcf14f3f_4.conda + libcxx: '>=15' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-event-stream-0.4.1-he66824e_2.conda hash: - md5: e8196ecb071eb3c07067008403182323 - sha256: eb127382b09a721a916ebf54c7aef8acbeefd00d5e2b14c45ec69f847bb50bf1 + md5: 64e84b88c3e9ff59fbd63816877a23d5 + sha256: 7ba075401a7963fd50d9f364053806c4a86e4f51cd8d2f063be875a78306e689 category: main optional: false - name: aws-c-http - version: 0.7.11 + version: 0.8.0 manager: conda platform: linux-64 dependencies: - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' aws-c-compression: '>=0.2.17,<0.2.18.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-h00aa349_4.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.0-h9129f04_2.conda hash: - md5: cb932dff7328ff620ce8059c9968b095 - sha256: a4a90f366fe9235be7700306efef999d2d95ebc53e3701efb899ff59eadae6cd + md5: ec632590307b47ac47d22ebcf91f4043 + sha256: 5929ac8e3118146f9d23a5fdff54e2025501ee20a2cd9d8dd2b0115a60442dce category: main optional: false - name: aws-c-http - version: 0.7.11 + version: 0.8.0 manager: conda platform: osx-arm64 dependencies: - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' aws-c-compression: '>=0.2.17,<0.2.18.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.7.11-hcbec455_4.conda + aws-c-io: '>=0.14.0,<0.14.1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-http-0.8.0-hd3d28cd_2.conda hash: - md5: 13a5f11f32954de3c6ee5680700ce421 - sha256: de3b7d7729acbe9883ce8b4ab28130042b9c2e39616b616ee57555522afed65d + md5: bf12b06426420df2055eaa104889bc07 + sha256: 8e80c37e5f2cc84f92634c9c60a4eaa062c2b57dcc1001c5faf711b77318abb8 category: main optional: false - name: aws-c-io - version: 0.13.32 + version: 0.14.0 manager: conda platform: linux-64 dependencies: - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' libgcc-ng: '>=12' - s2n: '>=1.3.49,<1.3.50.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.32-he9a53bd_1.conda + s2n: '>=1.4.1,<1.4.2.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.0-hf8f278a_1.conda hash: - md5: 8a24e5820f4a0ffd2ed9c4722cd5d7ca - sha256: 14911a1d4b66b5b031129d173fd34cedf852e3698eddd1972cf57974ce280f49 + md5: 30ebacf5b5fd61294851301887dc7518 + sha256: dd52e17a5be987b384c62574d90ddafbba68fa65b1f1344236b90c50ffed304d category: main optional: false - name: aws-c-io - version: 0.13.32 + version: 0.14.0 manager: conda platform: osx-arm64 dependencies: - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.13.32-he8ad1d2_1.conda + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-io-0.14.0-h8daa835_1.conda hash: - md5: f7b3961185b3476c4c5a2a5cd9ab0523 - sha256: 621cc2c5d73e245c002a7c6ede7b7f74dfdcaf7dc52f3284f339b2c09df41acf + md5: c15089f0a5df47a3278232235a6c2d3a + sha256: 026567637cd89f840fdb70550aa2fe5d325ca3cf52b8d66b48e588d3f0cfc2ea category: main optional: false - name: aws-c-mqtt - version: 0.9.3 + version: 0.10.1 manager: conda platform: linux-64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.9.3-hb447be9_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.1-h2b97f5f_0.conda hash: - md5: c520669eb0be9269a5f0d8ef62531882 - sha256: e804a788e6147f4466bd2a24b7d9c327bffbe98bfb7420ddeae47dae41b140d6 + md5: 4cba7afc0f74a7cce3159c0bceb607c3 + sha256: 8edcb09a2d93c24320f517f837a0e46e98749b72dc7c9d55ce1fa0c4fa5db116 category: main optional: false - name: aws-c-mqtt - version: 0.9.3 + version: 0.10.1 manager: conda platform: osx-arm64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.9.3-hf45dd20_1.conda + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-mqtt-0.10.1-h59ff425_0.conda hash: - md5: 2602feff0c3c7904cc9f743d5c5bdb74 - sha256: 1b0e3ac3b683f00f37e6caac12f3a3e257ded1c5f8e3f7cd59bdfb7068a5abee + md5: aef14e17e37ef7ff95c1deb57cee8a23 + sha256: 2e88ba1370be78b0532870bd1a5cffbc464e31b5d64f5763d2517b5c53753af4 category: main optional: false - name: aws-c-s3 - version: 0.3.14 + version: 0.4.9 manager: conda platform: linux-64 dependencies: - aws-c-auth: '>=0.7.3,<0.7.4.0a0' - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' + aws-c-auth: '>=0.7.11,<0.7.12.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' aws-checksums: '>=0.1.17,<0.1.18.0a0' libgcc-ng: '>=12' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.14-hf3aad02_1.conda + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.4.9-hca09fc5_0.conda hash: - md5: a968ffa7e9fe0c257628033d393e512f - sha256: f986c222f1f22e9e8d4869846a975595f1d0043794b41a5cf30ac53b0e4f4852 + md5: 44f261ca46a671789f59dc305d51afeb + sha256: b10ad88a1b1f7bf8bb999e06b4bb92e87fa9ede81a10492a373d354f4276a77b category: main optional: false - name: aws-c-s3 - version: 0.3.14 + version: 0.4.9 manager: conda platform: osx-arm64 dependencies: - aws-c-auth: '>=0.7.3,<0.7.4.0a0' - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' + aws-c-auth: '>=0.7.11,<0.7.12.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' aws-checksums: '>=0.1.17,<0.1.18.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.3.14-hf0e1321_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-s3-0.4.9-h7f99a2c_0.conda hash: - md5: 01eb867a0851892b41cb57a7ecaeee8b - sha256: 02b7cd21bf1db9284d5e26cf9fb5a7f8a3e0a856ada807eb51d60e1d66cd8e78 + md5: 912d57a741e590a1f568345088409393 + sha256: 9ce65a457cc2a02e12badb0110615bbb8498c6a33c8b96d98bec9f9aea520172 category: main optional: false - name: aws-c-sdkutils - version: 0.1.12 + version: 0.1.13 manager: conda platform: linux-64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.12-h4d4d85c_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.13-h572eabf_1.conda hash: - md5: eba092fc6de212a01de0065f38fe8bbb - sha256: eb092d65be4e42301a0babcfccd44dbd086eadd6b84f7929c7e07c7449748280 + md5: 7c56e8a2c4e8729443217e62e0bf65ba + sha256: eb54d7573f9bbd1d01458203dd83e9c0c94c73be91af9142dd78e1a928be5b7e category: main optional: false - name: aws-c-sdkutils - version: 0.1.12 + version: 0.1.13 manager: conda platform: osx-arm64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.1.12-he70778a_1.conda + aws-c-common: '>=0.9.12,<0.9.13.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-c-sdkutils-0.1.13-h4fd42c2_1.conda hash: - md5: a90a6413bbf361584033773c19cecb65 - sha256: 60bf0d3e7bd7bb368bbbc03f917b2a97198642ce8ed491f80c5cacbb6498c566 + md5: d45de4f4fd881f65d794f86a4471e370 + sha256: da5567016574499b732dbf276c0840751dafe7efbd61159917ea527c079a8c01 category: main optional: false - name: aws-checksums @@ -1034,12 +1035,12 @@ package: manager: conda platform: linux-64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.17-h4d4d85c_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.17-h572eabf_7.conda hash: - md5: 30f9df85ce23cd14faa9a4dfa50cca2b - sha256: df2356cf1cac39d9b872026275527718dac1d39f6e88fa5b0b56d06f6b90eb98 + md5: f7323eedc2685a24661cd6b57d7ed321 + sha256: c29ca126f9dd520cc749e8cb99b07168badb333b4b1b95577bb1788c432fe2d0 category: main optional: false - name: aws-checksums @@ -1047,121 +1048,152 @@ package: manager: conda platform: osx-arm64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.1.17-he70778a_1.conda + aws-c-common: '>=0.9.12,<0.9.13.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-checksums-0.1.17-h4fd42c2_7.conda hash: - md5: 5892420ce138d63e1f6e08c9915f0308 - sha256: 52606dcecd60ccc587d3c8f100cc30b7a8101f655e8d9b26a90dac9996030f52 + md5: 22e536282755e9e87ff48c652c9eec7b + sha256: 92a00157c3e3f387d0ba171bcbb6516893ea16fc34c34f535dd74ae38fb3db8e category: main optional: false - name: aws-crt-cpp - version: 0.21.0 + version: 0.26.0 manager: conda platform: linux-64 dependencies: - aws-c-auth: '>=0.7.3,<0.7.4.0a0' - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-event-stream: '>=0.3.1,<0.3.2.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' - aws-c-mqtt: '>=0.9.3,<0.9.4.0a0' - aws-c-s3: '>=0.3.14,<0.3.15.0a0' - aws-c-sdkutils: '>=0.1.12,<0.1.13.0a0' + aws-c-auth: '>=0.7.11,<0.7.12.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-event-stream: '>=0.4.1,<0.4.2.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' + aws-c-mqtt: '>=0.10.1,<0.10.2.0a0' + aws-c-s3: '>=0.4.9,<0.4.10.0a0' + aws-c-sdkutils: '>=0.1.13,<0.1.14.0a0' libgcc-ng: '>=12' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.21.0-hb942446_5.conda + url: https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.26.0-h04327c0_8.conda hash: - md5: 07d92ed5403ad7b5c66ffd7d5b8f7e57 - sha256: 7456d3f9c3069be9c1faf82cd37885c270d0ee945bb5adb4cc3223c9439ccc7e + md5: 8d2aeb8c24b47ad3ff87166957b216fd + sha256: 4bdef70ff6362d8a3350b4c4181d078e7b1f654a249d63294e9ab1c5a9ca72c7 category: main optional: false - name: aws-crt-cpp - version: 0.21.0 + version: 0.26.0 manager: conda platform: osx-arm64 dependencies: - aws-c-auth: '>=0.7.3,<0.7.4.0a0' - aws-c-cal: '>=0.6.1,<0.6.2.0a0' - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-event-stream: '>=0.3.1,<0.3.2.0a0' - aws-c-http: '>=0.7.11,<0.7.12.0a0' - aws-c-io: '>=0.13.32,<0.13.33.0a0' - aws-c-mqtt: '>=0.9.3,<0.9.4.0a0' - aws-c-s3: '>=0.3.14,<0.3.15.0a0' - aws-c-sdkutils: '>=0.1.12,<0.1.13.0a0' - libcxx: '>=15.0.7' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.21.0-hda227cd_5.conda + aws-c-auth: '>=0.7.11,<0.7.12.0a0' + aws-c-cal: '>=0.6.9,<0.6.10.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-event-stream: '>=0.4.1,<0.4.2.0a0' + aws-c-http: '>=0.8.0,<0.8.1.0a0' + aws-c-io: '>=0.14.0,<0.14.1.0a0' + aws-c-mqtt: '>=0.10.1,<0.10.2.0a0' + aws-c-s3: '>=0.4.9,<0.4.10.0a0' + aws-c-sdkutils: '>=0.1.13,<0.1.14.0a0' + libcxx: '>=15' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-crt-cpp-0.26.0-hfff802b_8.conda hash: - md5: dd47301eea0a420faa1546f5c5d708ff - sha256: 861bb8ca04a5cad23dfd444967e29735b2430350c7789dc5e869a4e71dac82ae + md5: 9f4ebd51ab78bed865f2cea217cc2800 + sha256: a88854f232025c297d3161a43795909d8a00a936cb782780fa2e3fc83ea6d489 category: main optional: false - name: aws-sdk-cpp - version: 1.10.57 + version: 1.11.210 manager: conda platform: linux-64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-event-stream: '>=0.3.1,<0.3.2.0a0' - aws-crt-cpp: '>=0.21.0,<0.21.1.0a0' - libcurl: '>=8.2.1,<9.0a0' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-event-stream: '>=0.4.1,<0.4.2.0a0' + aws-checksums: '>=0.1.17,<0.1.18.0a0' + aws-crt-cpp: '>=0.26.0,<0.26.1.0a0' + libcurl: '>=8.5.0,<9.0a0' libgcc-ng: '>=12' libstdcxx-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-h85b1a90_19.conda + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.210-hba3e011_10.conda hash: - md5: 0605d3d60857fc07bd6a11e878fe0f08 - sha256: 2f44eaea9d88203f400229c3247575dd8ea7c20907cc6ea2e2d859f43c395141 + md5: a4f975a959587b0e75df8e0f9f2d4347 + sha256: 97b50927c4312ad80f3729669fa8b55195c066710e0af73c818c244df01b7604 category: main optional: false - name: aws-sdk-cpp - version: 1.10.57 + version: 1.11.210 manager: conda platform: osx-arm64 dependencies: - aws-c-common: '>=0.9.0,<0.9.1.0a0' - aws-c-event-stream: '>=0.3.1,<0.3.2.0a0' - aws-crt-cpp: '>=0.21.0,<0.21.1.0a0' - libcurl: '>=8.2.1,<9.0a0' - libcxx: '>=15.0.7' + aws-c-common: '>=0.9.12,<0.9.13.0a0' + aws-c-event-stream: '>=0.4.1,<0.4.2.0a0' + aws-checksums: '>=0.1.17,<0.1.18.0a0' + aws-crt-cpp: '>=0.26.0,<0.26.1.0a0' + libcurl: '>=8.5.0,<9.0a0' + libcxx: '>=15' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.10.57-h1190f42_19.conda + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/aws-sdk-cpp-1.11.210-he93ac2d_10.conda hash: - md5: bd16f7f2a92c33d1767592c45588dbc7 - sha256: b176a2cf86d8c6891f5d234b2be4503492b37554241bad183b4524988c2e82e9 + md5: 7b36897c51a1a12db6b3a79a3c6e0a80 + sha256: 861ef77ea13a8ca24f115bf7aea446b38ad491977188350cb74df1423a8b7841 category: main optional: false - name: azure-core - version: 1.29.5 + version: 1.29.7 manager: conda platform: linux-64 dependencies: - python: '>=3.6' - requests: '>=2.18.4' + python: '>=3.7' + requests: '>=2.21.0' six: '>=1.11.0' - typing-extensions: '>=4.0.1' - url: https://conda.anaconda.org/conda-forge/noarch/azure-core-1.29.5-pyhd8ed1ab_0.conda + typing-extensions: '>=4.6.0' + url: https://conda.anaconda.org/conda-forge/noarch/azure-core-1.29.7-pyhd8ed1ab_0.conda hash: - md5: 6e97f7d5387626f896515442002ac920 - sha256: 3f3ec0617e825bcabb70722ace9153dfdc02895aebb2179fc20b82eb30f79ec8 + md5: 64d436079b1422e0483b0fbb326622a2 + sha256: 9a9ea330870d2655348fcb8c87a5fa421f3b6c3e347653131d7104f04daad5b8 category: main optional: false - name: azure-core - version: 1.29.5 + version: 1.29.7 manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' - requests: '>=2.18.4' + python: '>=3.7' six: '>=1.11.0' - typing-extensions: '>=4.0.1' - url: https://conda.anaconda.org/conda-forge/noarch/azure-core-1.29.5-pyhd8ed1ab_0.conda + requests: '>=2.21.0' + typing-extensions: '>=4.6.0' + url: https://conda.anaconda.org/conda-forge/noarch/azure-core-1.29.7-pyhd8ed1ab_0.conda + hash: + md5: 64d436079b1422e0483b0fbb326622a2 + sha256: 9a9ea330870d2655348fcb8c87a5fa421f3b6c3e347653131d7104f04daad5b8 + category: main + optional: false +- name: azure-core-cpp + version: 1.10.3 + manager: conda + platform: linux-64 + dependencies: + libcurl: '>=8.3.0,<9.0a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + openssl: '>=3.1.3,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.10.3-h91d86a7_0.conda hash: - md5: 6e97f7d5387626f896515442002ac920 - sha256: 3f3ec0617e825bcabb70722ace9153dfdc02895aebb2179fc20b82eb30f79ec8 + md5: db7a05c674efad9c19f8a2ff76e4976c + sha256: e2965b736f80cc66f4a90314592569dd7d87039e791b0e6b88870d32ab3e2901 + category: main + optional: false +- name: azure-core-cpp + version: 1.11.0 + manager: conda + platform: osx-arm64 + dependencies: + libcurl: '>=8.5.0,<9.0a0' + libcxx: '>=15' + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-core-cpp-1.11.0-he231e37_0.conda + hash: + md5: 331d81c4fe103355442a51b1e381a45b + sha256: e016272a05b81da26eb633643c1c9601528771820ebe132eac20029239461168 category: main optional: false - name: azure-datalake-store @@ -1258,6 +1290,68 @@ package: sha256: 385c2a82fd5183d2cc791747f63d81e8bda11e068ee0e29e945fe7cdfe20a817 category: main optional: false +- name: azure-storage-blobs-cpp + version: 12.10.0 + manager: conda + platform: linux-64 + dependencies: + azure-core-cpp: '>=1.10.3,<2.0a0' + azure-storage-common-cpp: '>=12.5.0,<13.0a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.10.0-h00ab1b0_0.conda + hash: + md5: 64eec459779f01803594f5272cdde23c + sha256: ea323e7028590b1877af92b76bc3cda52db5a1d90b8321ec91b9db0689f07fb3 + category: main + optional: false +- name: azure-storage-blobs-cpp + version: 12.10.0 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=10.9' + azure-core-cpp: '>=1.10.3,<2.0a0' + azure-storage-common-cpp: '>=12.5.0,<13.0a0' + libcxx: '>=16.0.6' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-blobs-cpp-12.10.0-h6aa02a4_0.conda + hash: + md5: a2ae520245fd026fcbfac906c5350834 + sha256: a85bb29ab61207489f91e239b687bb97a2bf22a09c9b0e2cf32dd003f9a4c366 + category: main + optional: false +- name: azure-storage-common-cpp + version: 12.5.0 + manager: conda + platform: linux-64 + dependencies: + azure-core-cpp: '>=1.10.3,<2.0a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libxml2: '>=2.12.1,<3.0.0a0' + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.5.0-hb858b4b_2.conda + hash: + md5: 19f23b45d1925a9a8f701a3f6f9cce4f + sha256: 68e177ae983d63323b9bd1c1528776bb0e03d5d5aef0addba97aed4537e649a6 + category: main + optional: false +- name: azure-storage-common-cpp + version: 12.5.0 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=10.9' + azure-core-cpp: '>=1.10.3,<2.0a0' + libcxx: '>=16.0.6' + libxml2: '>=2.12.1,<3.0.0a0' + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/azure-storage-common-cpp-12.5.0-h607ffeb_2.conda + hash: + md5: 457b5b7cfda7d6bec46e95cbe6554bc5 + sha256: 1c020b792916289eec5b203e6cb301e80d434dc74de3ad9269ffa5b3fb9fa8c3 + category: main + optional: false - name: babel version: 2.14.0 manager: conda @@ -1314,29 +1408,29 @@ package: category: main optional: false - name: beautifulsoup4 - version: 4.12.2 + version: 4.12.3 manager: conda platform: linux-64 dependencies: python: '>=3.6' soupsieve: '>=1.2' - url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda hash: - md5: a362ff7d976217f8fa78c0f1c4f59717 - sha256: 52d3e6bcd442537e22699cd227d8fdcfd54b708eeb8ee5b4c671a6a9b9cd74da + md5: 332493000404d8411859539a5a630865 + sha256: 7b05b2d0669029326c623b9df7a29fa49d1982a9e7e31b2fea34b4c9a4a72317 category: main optional: false - name: beautifulsoup4 - version: 4.12.2 + version: 4.12.3 manager: conda platform: osx-arm64 dependencies: python: '>=3.6' soupsieve: '>=1.2' - url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda hash: - md5: a362ff7d976217f8fa78c0f1c4f59717 - sha256: 52d3e6bcd442537e22699cd227d8fdcfd54b708eeb8ee5b4c671a6a9b9cd74da + md5: 332493000404d8411859539a5a630865 + sha256: 7b05b2d0669029326c623b9df7a29fa49d1982a9e7e31b2fea34b4c9a4a72317 category: main optional: false - name: binaryornot @@ -1366,7 +1460,7 @@ package: category: main optional: false - name: black - version: 23.11.0 + version: 23.12.1 manager: conda platform: linux-64 dependencies: @@ -1379,14 +1473,14 @@ package: python_abi: 3.9.* tomli: '>=1.1.0' typing_extensions: '>=4.0.1' - url: https://conda.anaconda.org/conda-forge/linux-64/black-23.11.0-py39hf3d152e_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/black-23.12.1-py39hf3d152e_0.conda hash: - md5: 6b828d6f788dcbad32d4e2b8f89cb47c - sha256: 825751ba565514bb9c729e690f08119f3831f5cd0678afcace5c3a2a9d3a6a8c + md5: 849aa17d61fb4c71b77297d4a6d117f7 + sha256: 80de71a38893c9f47fb7fb93fdd429298a1cab9017b9fb4bc4b909c4fa9ebb03 category: main optional: false - name: black - version: 23.11.0 + version: 23.12.1 manager: conda platform: osx-arm64 dependencies: @@ -1399,10 +1493,10 @@ package: python_abi: 3.9.* tomli: '>=1.1.0' typing_extensions: '>=4.0.1' - url: https://conda.anaconda.org/conda-forge/osx-arm64/black-23.11.0-py39h2804cbe_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/black-23.12.1-py39h2804cbe_0.conda hash: - md5: 94746897e02fb5be3aafc99ae9862c56 - sha256: 62603be057db836bdee1b39ffc27a7c0be6b97dee5801b88d72cee00b55443d9 + md5: bb96f4fe6f8e9049370e0932f26604e1 + sha256: cfb861ec4aec0f5f99f62f8d04584239d79e085aa007a70f3201025008c79a3a category: main optional: false - name: blake3 @@ -1524,7 +1618,7 @@ package: category: main optional: false - name: bokeh - version: 3.3.2 + version: 3.3.3 manager: conda platform: linux-64 dependencies: @@ -1538,10 +1632,10 @@ package: pyyaml: '>=3.10' tornado: '>=5.1' xyzservices: '>=2021.09.1' - url: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.3.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/bokeh-3.3.3-pyhd8ed1ab_0.conda hash: - md5: c02a7e79365121bd3bcc25f1b65f48a9 - sha256: 71c36a77422e58d66d1e1c126b09481ad05f7e80b1a86860496de566ed990e0e + md5: e6b7ca7e29811c2f72f93e8188171caa + sha256: 68a7e59b98235cef7fecced56a76056e54392d6961129c2b3b135fc5ec92a6c3 category: main optional: false - name: bokeh @@ -1564,7 +1658,7 @@ package: category: main optional: false - name: botocore - version: 1.33.1 + version: 1.33.13 manager: conda platform: linux-64 dependencies: @@ -1572,14 +1666,14 @@ package: python: '>=3.7' python-dateutil: '>=2.1,<3.0.0' urllib3: '>=1.25.4,<1.27' - url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.33.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.33.13-pyhd8ed1ab_0.conda hash: - md5: b40066840291a0406bffe70e0334de62 - sha256: c9d214ae266f03797b3c06129b1a173593fc573affc09cfd02bba3bdd36c4827 + md5: d2566fd9134b6f8b8e69a07e9a1fa17e + sha256: 498d08274880ef279e9a6dd68f66b384d71321d92301fa60330712f9edceab0c category: main optional: false - name: botocore - version: 1.33.1 + version: 1.33.13 manager: conda platform: osx-arm64 dependencies: @@ -1587,10 +1681,10 @@ package: python-dateutil: '>=2.1,<3.0.0' jmespath: '>=0.7.1,<2.0.0' urllib3: '>=1.25.4,<1.27' - url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.33.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/botocore-1.33.13-pyhd8ed1ab_0.conda hash: - md5: b40066840291a0406bffe70e0334de62 - sha256: c9d214ae266f03797b3c06129b1a173593fc573affc09cfd02bba3bdd36c4827 + md5: d2566fd9134b6f8b8e69a07e9a1fa17e + sha256: 498d08274880ef279e9a6dd68f66b384d71321d92301fa60330712f9edceab0c category: main optional: false - name: branca @@ -1620,63 +1714,63 @@ package: category: main optional: false - name: brotli - version: 1.0.9 + version: 1.1.0 manager: conda platform: linux-64 dependencies: - brotli-bin: 1.0.9 - libbrotlidec: 1.0.9 - libbrotlienc: 1.0.9 + brotli-bin: 1.1.0 + libbrotlidec: 1.1.0 + libbrotlienc: 1.1.0 libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hd590300_1.conda hash: - md5: 4601544b4982ba1861fa9b9c607b2c06 - sha256: 2357d205931912def55df0dc53573361156b27856f9bf359d464da162812ec1f + md5: f27a24d46e3ea7b70a1f98e50c62508f + sha256: f2d918d351edd06c55a6c2d84b488fe392f85ea018ff227daac07db22b408f6b category: main optional: false - name: brotli - version: 1.0.9 + version: 1.1.0 manager: conda platform: osx-arm64 dependencies: - brotli-bin: 1.0.9 - libbrotlidec: 1.0.9 - libbrotlienc: 1.0.9 - url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.0.9-h1a8c8d9_9.conda + brotli-bin: 1.1.0 + libbrotlidec: 1.1.0 + libbrotlienc: 1.1.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-1.1.0-hb547adb_1.conda hash: - md5: 856692dff5e450c269465e3256e1277b - sha256: 92c3062dd7e593a502c2f8d12e9ccca7ae1ef0363eb0b12faa47e1bb4fae42c7 + md5: a33aa58d448cbc054f887e39dd1dfaea + sha256: 62d1587deab752fcee07adc371eb20fcadc09f72c0c85399c22b637ca858020f category: main optional: false - name: brotli-bin - version: 1.0.9 + version: 1.1.0 manager: conda platform: linux-64 dependencies: - libbrotlidec: 1.0.9 - libbrotlienc: 1.0.9 + libbrotlidec: 1.1.0 + libbrotlienc: 1.1.0 libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hd590300_1.conda hash: - md5: d47dee1856d9cb955b8076eeff304a5b - sha256: 1c128f136a59ee2fa47d7fbd9b6fc8afa8460d340e4ae0e6f5419ebbd7539a10 + md5: 39f910d205726805a958da408ca194ba + sha256: a641abfbaec54f454c8434061fffa7fdaa9c695e8a5a400ed96b4f07c0c00677 category: main optional: false - name: brotli-bin - version: 1.0.9 + version: 1.1.0 manager: conda platform: osx-arm64 dependencies: - libbrotlidec: 1.0.9 - libbrotlienc: 1.0.9 - url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.0.9-h1a8c8d9_9.conda + libbrotlidec: 1.1.0 + libbrotlienc: 1.1.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-bin-1.1.0-hb547adb_1.conda hash: - md5: 19ad562adca69541e67613022b41df5b - sha256: 4731892fb855f5993129515c5b63b36d049cc64e70611c6afa8f64aae5f51323 + md5: 990d04f8c017b1b77103f9a7730a5f12 + sha256: 8fbfc2834606292016f2faffac67deea4c5cdbc21a61169f0b355e1600105a24 category: main optional: false - name: brotli-python - version: 1.0.9 + version: 1.1.0 manager: conda platform: linux-64 dependencies: @@ -1684,24 +1778,24 @@ package: libstdcxx-ng: '>=12' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py39h5a03fae_9.conda + url: https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_1.conda hash: - md5: d1601752c6f47af7bedf838be3d8ca6b - sha256: 18f296067e5f3f9fd9ea55ec89a0d91149455a5e5d1ddd26c53fb7fb2806dab0 + md5: c48418c8b35f1d59ae9ae1174812b40a + sha256: e22afb19527a93da24c1108c3e91532811f9c3df64a9473989faf332c98af082 category: main optional: false - name: brotli-python - version: 1.0.9 + version: 1.1.0 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=14.0.6' + libcxx: '>=15.0.7' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.0.9-py39h23fbdae_9.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/brotli-python-1.1.0-py39hb198ff7_1.conda hash: - md5: ef4c58e8f1811e0730accf3f425e2e5f - sha256: 217f4ef7481ee63935768668496426d16773b3933492c026ad2922a950a81c41 + md5: ddf01dd9a743bd3ec9cf829d18bb8002 + sha256: 014639c1f57be1dadf7b5c17e53df562e7e6bab71d3435fdd5bd56213dece9df category: main optional: false - name: bzip2 @@ -1728,26 +1822,26 @@ package: category: main optional: false - name: c-ares - version: 1.23.0 + version: 1.25.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.25.0-hd590300_0.conda hash: - md5: d459949bc10f64dee1595c176c2e6291 - sha256: 6b0eee827bade11c2964a05867499a50ad2a9d1b14dfe18fb867a3bc9357f56f + md5: 89e40af02dd3a0846c0c1131c5126706 + sha256: c4bbdafd6791583e3c77e8ed0e1df9e0021d542249c3543de3d72788f5c8a0c4 category: main optional: false - name: c-ares - version: 1.23.0 + version: 1.25.0 manager: conda platform: osx-arm64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.23.0-h93a5062_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/c-ares-1.25.0-h93a5062_0.conda hash: - md5: b187f2b99e52905042d661f824666964 - sha256: de5385280dcad805428068adb1f4a7eb1e6ec8987e2f25c4ff5766e3fec3b4a2 + md5: 0a40466cb14b485738f1910340775f71 + sha256: 2a3df1920cfc493592c311db87138442475849ff11a14eb1dbf12caa80d8077b category: main optional: false - name: ca-certificates @@ -2207,29 +2301,29 @@ package: category: main optional: false - name: comm - version: 0.1.4 + version: 0.2.1 manager: conda platform: linux-64 dependencies: python: '>=3.6' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/comm-0.1.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.1-pyhd8ed1ab_0.conda hash: - md5: c8eaca39e2b6abae1fc96acc929ae939 - sha256: 11057745946a95ee7cc4c98900a60c7362266a4cb28bc97d96cd88e3056eb701 + md5: f4385072f4909bc974f6675a36e76796 + sha256: bd90a200e6f7092a89f02c4800729a4a6d2b2de49d70a9706aeb083a635308c1 category: main optional: false - name: comm - version: 0.1.4 + version: 0.2.1 manager: conda platform: osx-arm64 dependencies: python: '>=3.6' traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/noarch/comm-0.1.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/comm-0.2.1-pyhd8ed1ab_0.conda hash: - md5: c8eaca39e2b6abae1fc96acc929ae939 - sha256: 11057745946a95ee7cc4c98900a60c7362266a4cb28bc97d96cd88e3056eb701 + md5: f4385072f4909bc974f6675a36e76796 + sha256: bd90a200e6f7092a89f02c4800729a4a6d2b2de49d70a9706aeb083a635308c1 category: main optional: false - name: commonmark @@ -2467,14 +2561,14 @@ package: category: main optional: false - name: dask - version: 2023.12.0 + version: 2024.1.0 manager: conda platform: linux-64 dependencies: bokeh: '>=2.4.2,!=3.0.*' cytoolz: '>=0.11.0' - dask-core: '>=2023.12.0,<2023.12.1.0a0' - distributed: '>=2023.12.0,<2023.12.1.0a0' + dask-core: '>=2024.1.0,<2024.1.1.0a0' + distributed: '>=2024.1.0,<2024.1.1.0a0' jinja2: '>=2.10.3' lz4: '>=4.3.2' numpy: '>=1.21' @@ -2482,10 +2576,10 @@ package: pyarrow: '>=7.0' pyarrow-hotfix: '' python: '>=3.9' - url: https://conda.anaconda.org/conda-forge/noarch/dask-2023.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/dask-2024.1.0-pyhd8ed1ab_0.conda hash: - md5: 5cfc00e93b71fba459bede86419d0f01 - sha256: c9725f37d8b05b2b1407ff00b17c9bcee39ed5a33f33f34d771e5bbf4762e737 + md5: 972955a551de85754e66bb61bd628232 + sha256: b6f6cfbd68026633b3b65ce817ac7f92f1defc2fa6aed9d4a2d70f2224d06d0a category: main optional: false - name: dask @@ -2509,7 +2603,7 @@ package: category: main optional: false - name: dask-core - version: 2023.12.0 + version: 2024.1.0 manager: conda platform: linux-64 dependencies: @@ -2522,10 +2616,10 @@ package: python: '>=3.9' pyyaml: '>=5.3.1' toolz: '>=0.10.0' - url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.1.0-pyhd8ed1ab_0.conda hash: - md5: 95eae0785aed72998493140dc0115382 - sha256: e366163aa7325d14ca38ca72ca4672eeb4b7a7453573c47cfa90d0db60136b48 + md5: cab4cec272dc1e30086f7d32faa4f130 + sha256: a5d2dd0ed941e2c0067b3b6c4d594838f70c59ed4450e7344049bde9a3c2b654 category: main optional: false - name: dask-core @@ -2620,14 +2714,14 @@ package: category: main optional: false - name: datasets - version: 2.15.0 + version: 2.16.1 manager: conda platform: linux-64 dependencies: aiohttp: '' dill: '>=0.3.0,<0.3.8' fsspec: '>=2023.1.0,<=2023.10.0' - huggingface_hub: '>=0.18.0' + huggingface_hub: '>=0.19.4' importlib-metadata: '' multiprocess: '' numpy: '>=1.17' @@ -2640,14 +2734,14 @@ package: pyyaml: '>=5.1' requests: '>=2.19.0' tqdm: '>=4.62.1' - url: https://conda.anaconda.org/conda-forge/noarch/datasets-2.15.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/datasets-2.16.1-pyhd8ed1ab_0.conda hash: - md5: 12c03aa3ead93ebb57139efad4729a10 - sha256: 3a29aec8a57f756df148c337f83df5cd53e2df822bd4f1265ad0fd502fdf59d4 + md5: 99516e8430074c50911e058326eaa450 + sha256: ce835aeba2a2e987d6c5da996cf60d5d82a583ea903c410640382b45a00bbe43 category: main optional: false - name: datasets - version: 2.15.0 + version: 2.16.1 manager: conda platform: osx-arm64 dependencies: @@ -2660,17 +2754,17 @@ package: pyarrow-hotfix: '' pyyaml: '>=5.1' numpy: '>=1.17' - requests: '>=2.19.0' pyarrow: '>=8.0.0' + requests: '>=2.19.0' python: '>=3.8.0' tqdm: '>=4.62.1' dill: '>=0.3.0,<0.3.8' fsspec: '>=2023.1.0,<=2023.10.0' - huggingface_hub: '>=0.18.0' - url: https://conda.anaconda.org/conda-forge/noarch/datasets-2.15.0-pyhd8ed1ab_0.conda + huggingface_hub: '>=0.19.4' + url: https://conda.anaconda.org/conda-forge/noarch/datasets-2.16.1-pyhd8ed1ab_0.conda hash: - md5: 12c03aa3ead93ebb57139efad4729a10 - sha256: 3a29aec8a57f756df148c337f83df5cd53e2df822bd4f1265ad0fd502fdf59d4 + md5: 99516e8430074c50911e058326eaa450 + sha256: ce835aeba2a2e987d6c5da996cf60d5d82a583ea903c410640382b45a00bbe43 category: main optional: false - name: db-dtypes @@ -2869,14 +2963,14 @@ package: category: main optional: false - name: distributed - version: 2023.12.0 + version: 2024.1.0 manager: conda platform: linux-64 dependencies: click: '>=8.0' cloudpickle: '>=1.5.0' cytoolz: '>=0.10.1' - dask-core: '>=2023.12.0,<2023.12.1.0a0' + dask-core: '>=2024.1.0,<2024.1.1.0a0' jinja2: '>=2.10.3' locket: '>=1.0.0' msgpack-python: '>=1.0.0' @@ -2890,10 +2984,10 @@ package: tornado: '>=6.0.4' urllib3: '>=1.24.3' zict: '>=3.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/distributed-2024.1.0-pyhd8ed1ab_0.conda hash: - md5: 22d620e1079e99c34578cb0c615d2789 - sha256: 7f16084ca5cbc66c4673d1eb032bd49f03a96a846656a18d38fd70fa8f2f16cc + md5: 6c7822204c9e438b2f298e82127f05ec + sha256: 3a376ff500035e602c8fcfb1624aee75ccf12ec89409e620576d97194ff87590 category: main optional: false - name: distributed @@ -3040,10 +3134,10 @@ package: platform: linux-64 dependencies: python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_2.conda hash: - md5: f6c211fee3c98229652b60a9a42ef363 - sha256: cf83dcaf9006015c8ccab3fc6770f478464a66a8769e1763ca5d7dff09d11d08 + md5: 8d652ea2ee8eaee02ed8dc820bc794aa + sha256: a6ae416383bda0e3ed14eaa187c653e22bec94ff2aa3b56970cdf0032761e80d category: main optional: false - name: exceptiongroup @@ -3052,10 +3146,10 @@ package: platform: osx-arm64 dependencies: python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_2.conda hash: - md5: f6c211fee3c98229652b60a9a42ef363 - sha256: cf83dcaf9006015c8ccab3fc6770f478464a66a8769e1763ca5d7dff09d11d08 + md5: 8d652ea2ee8eaee02ed8dc820bc794aa + sha256: a6ae416383bda0e3ed14eaa187c653e22bec94ff2aa3b56970cdf0032761e80d category: main optional: false - name: executing @@ -3108,7 +3202,7 @@ package: category: main optional: false - name: fastavro - version: 1.9.1 + version: 1.9.3 manager: conda platform: linux-64 dependencies: @@ -3116,24 +3210,24 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pytz: '' - url: https://conda.anaconda.org/conda-forge/linux-64/fastavro-1.9.1-py39hd1e30aa_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/fastavro-1.9.3-py39hd1e30aa_0.conda hash: - md5: 6222cc87faa7fe48c2cf35920dd7ff28 - sha256: d8ce574d2ba15706a44966e0e5aafdcf25bdaace3f89435590f0b6ee1efe1a91 + md5: c3ace3a23beb653d3d3575fef56bf009 + sha256: 0c133b55206a64f6ea89105d7d946554ad27f967f7ad3f967d74f732a0c5793f category: main optional: false - name: fastavro - version: 1.9.1 + version: 1.9.3 manager: conda platform: osx-arm64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* pytz: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/fastavro-1.9.1-py39h17cfd9d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/fastavro-1.9.3-py39h17cfd9d_0.conda hash: - md5: a435b41193850bbc3e8e49943b9140e8 - sha256: 1b4944683f3e2dd349c1620406eb1086342efc838d923161ea58a601694ce227 + md5: 8579c8c7faf69742670ace1605dd8d27 + sha256: a6c73461740cbfcdb62b7cdeb740e2821cb515132e5fda0a8054ffba325099ec category: main optional: false - name: filelock @@ -3172,7 +3266,7 @@ package: gdal: '' importlib-metadata: '' libgcc-ng: '>=12' - libgdal: '>=3.8.0,<3.9.0a0' + libgdal: '>=3.8.2,<3.9.0a0' libstdcxx-ng: '>=12' numpy: '>=1.22.4,<2.0a0' python: '>=3.9,<3.10.0a0' @@ -3180,10 +3274,10 @@ package: setuptools: '' shapely: '' six: '' - url: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.5-py39hcfcd403_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.5-py39hcfcd403_3.conda hash: - md5: 6837bf6513255f203669a84a6431ec81 - sha256: 35c1c09a8523c62e084e57fff942a8ca3a9e273aced70b6b1f51250695d0a317 + md5: 4cbedb0d1f0add97a71a9482c15a8122 + sha256: fa3e5ff7c316a94bc8b3f6ebf61c28421a0c339a4318af1b754bfe5b05144508 category: main optional: false - name: fiona @@ -3191,25 +3285,24 @@ package: manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' attrs: '>=19.2.0' click: '>=8.0,<9.dev0' click-plugins: '>=1.0' cligj: '>=0.5' gdal: '' importlib-metadata: '' - libcxx: '>=16.0.6' - libgdal: '>=3.8.0,<3.9.0a0' + libcxx: '>=15' + libgdal: '>=3.8.2,<3.9.0a0' numpy: '>=1.22.4,<2.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* setuptools: '' shapely: '' six: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/fiona-1.9.5-py39hd992b33_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/fiona-1.9.5-py39h97e6c39_3.conda hash: - md5: 691646699137f0496358cdebfdb8baab - sha256: bc6c7a7bebda8fb5b0a6abf08b8d8914c8c85392f346e329e144ce30708ef205 + md5: aa93d96a2e30536aa523a3fbd2c0225f + sha256: 7e9223d6d2bfb98e94e3415ad9f7a77a7d187e416a3158f354a5f77c0cf24833 category: main optional: false - name: flask @@ -3363,8 +3456,8 @@ package: joblib: '' rich: '' gcsfs: '' - grpcio: '' jsonpickle: '' + grpcio: '' adlfs: '' rich-click: '' grpcio-status: '' @@ -3608,7 +3701,7 @@ package: category: main optional: false - name: fonttools - version: 4.46.0 + version: 4.47.2 manager: conda platform: linux-64 dependencies: @@ -3618,14 +3711,14 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* unicodedata2: '>=14.0.0' - url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py39hd1e30aa_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.47.2-py39hd1e30aa_0.conda hash: - md5: 9b58e5973dd3d786253f4ca9534b1aba - sha256: e0d0b4039efce99870b8bd07a5a1bb7700b75a570ab19a38862336218ba66b94 + md5: 4e2b802b69be81944fdcd71018b74226 + sha256: f0834381dcabbaa5df8124bee63c6e26c642000a6f3fe80f521b3c95b1342f27 category: main optional: false - name: fonttools - version: 4.46.0 + version: 4.47.2 manager: conda platform: osx-arm64 dependencies: @@ -3634,10 +3727,10 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* unicodedata2: '>=14.0.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.46.0-py39h17cfd9d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/fonttools-4.47.2-py39h17cfd9d_0.conda hash: - md5: 92cd73c1c1b85f06797dfde1d79a937b - sha256: 1e16d1a54a16af7ddd5a16f7988bf2d371fb3fd78bd95ab7937eff68e1907d74 + md5: cb2b258b7d021ac1e9692e16909a435f + sha256: 3ad5d03952cb87c70d008ac5e4ee15aecd3f66a8fda906a346419ff275826bb9 category: main optional: false - name: fqdn @@ -3746,30 +3839,30 @@ package: category: main optional: false - name: frozendict - version: 2.3.10 + version: 2.4.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/frozendict-2.3.10-py39hd1e30aa_3.conda + url: https://conda.anaconda.org/conda-forge/linux-64/frozendict-2.4.0-py39hd1e30aa_0.conda hash: - md5: 252562a446a331682c5101ee16fd8988 - sha256: 05b9f884d59b0978ef426c3a5e8791b28478e394be3ad52ac25bbfe0474ac6db + md5: 52bd06467f60645e2cf293bd00f957cd + sha256: 40e9730732769b8ef0797f025f108d998d8d2d4edb0c709da9e6dd65475064a6 category: main optional: false - name: frozendict - version: 2.3.10 + version: 2.4.0 manager: conda platform: osx-arm64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/frozendict-2.3.10-py39h17cfd9d_3.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/frozendict-2.4.0-py39h17cfd9d_0.conda hash: - md5: 6d5a1b15362833f5b308f56916d517ba - sha256: 1022b48e9380276276efcf8718669e29c96ec0f291df5af01fe7954a6ed82673 + md5: 30e673c8dfce564d6c2222ed0395ef8b + sha256: dc3079fb53f566c74661a03950d631950784fa18321c0efeb04037aeeb63ba15 category: main optional: false - name: frozenlist @@ -3851,8 +3944,8 @@ package: sphinx-inline-tabs: '' sphinx-basic-ng: '' python: '>=3.6' - sphinx: '>=4' pygments: '>=2.7' + sphinx: '>=4' url: https://conda.anaconda.org/conda-forge/noarch/furo-2023.5.20-pyhd8ed1ab_0.conda hash: md5: 786e474a83de249aecf767102d7fd87d @@ -3946,43 +4039,42 @@ package: category: main optional: false - name: gdal - version: 3.8.1 + version: 3.8.3 manager: conda platform: linux-64 dependencies: hdf5: '>=1.14.3,<1.14.4.0a0' libgcc-ng: '>=12' - libgdal: 3.8.1 + libgdal: 3.8.3 libstdcxx-ng: '>=12' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.3,<3.0.0a0' numpy: '>=1.22.4,<2.0a0' openssl: '>=3.2.0,<4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.8.1-py39h14df8fe_4.conda + url: https://conda.anaconda.org/conda-forge/linux-64/gdal-3.8.3-py39h14df8fe_0.conda hash: - md5: 9c0731012ed411761cc4ecede9c4d10f - sha256: e8307f25b414df4c6e2de46b2ed8b72fed934e953c219b8b7aa115f103d3a5d9 + md5: 9f556acdeaf436764dfc23a43de35864 + sha256: 449306555264d3bdfd3af16f3ad48541da5755ddeb1b9694b476830f05ca20aa category: main optional: false - name: gdal - version: 3.8.1 + version: 3.8.3 manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' hdf5: '>=1.14.3,<1.14.4.0a0' - libcxx: '>=16.0.6' - libgdal: 3.8.1 - libxml2: '>=2.12.2,<2.13.0a0' + libcxx: '>=15' + libgdal: 3.8.3 + libxml2: '>=2.12.3,<3.0.0a0' numpy: '>=1.22.4,<2.0a0' openssl: '>=3.2.0,<4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/gdal-3.8.1-py39h6f219a9_4.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/gdal-3.8.3-py39h46f8cc7_0.conda hash: - md5: 0f6dee62709305d9bd210136c4e0a99c - sha256: 140796728cee854e5b95f56a96c05e5c9c92b31d1abd5939ac05fd5ec17a54b9 + md5: 3b0371ffadf43f722814a003a6530b6d + sha256: 948a26e95fec4eb6f4e473c1c38ea8ea77b0a5b2c25faabfe095d9973088abcb category: main optional: false - name: gdk-pixbuf @@ -4019,22 +4111,22 @@ package: category: main optional: false - name: geopandas - version: 0.14.1 + version: 0.14.2 manager: conda platform: linux-64 dependencies: fiona: '>=1.8.21' folium: '' - geopandas-base: 0.14.1 + geopandas-base: 0.14.2 mapclassify: '>=2.4.0' matplotlib-base: '' python: '>=3.9' rtree: '' xyzservices: '' - url: https://conda.anaconda.org/conda-forge/noarch/geopandas-0.14.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/geopandas-0.14.2-pyhd8ed1ab_0.conda hash: - md5: 6ce5f89fb1e2aa7e04d12c0008b3a745 - sha256: f3563ad6f1a55587c097337ece863e583c796c9a9df3ecb396bbfeec4ec309fb + md5: 4f873f6e48ae63d573c3e4937185027e + sha256: 5d0e42a7dbc4bec7fa716c55d5ece0f9a90fccc905bae7d619f46a83aa7efccc category: main optional: false - name: geopandas @@ -4057,7 +4149,7 @@ package: category: main optional: false - name: geopandas-base - version: 0.14.1 + version: 0.14.2 manager: conda platform: linux-64 dependencies: @@ -4066,10 +4158,10 @@ package: pyproj: '>=3.3.0' python: '>=3.9' shapely: '>=1.8.0' - url: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-0.14.1-pyha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/geopandas-base-0.14.2-pyha770c72_0.conda hash: - md5: d65c6f458bfdaa181f388d91e858ea67 - sha256: c813004bb84e50de19f599b188719e40106c858c7da22e504b29ce66e5043361 + md5: e825cfead1f4223911d9538f6c575c90 + sha256: d896b02d8107a3c61cacd8e2a56a11b931710aba59cf3baa748837ef48404252 category: main optional: false - name: geopandas-base @@ -4249,31 +4341,31 @@ package: category: main optional: false - name: gitpython - version: 3.1.40 + version: 3.1.41 manager: conda platform: linux-64 dependencies: gitdb: '>=4.0.1,<5' python: '>=3.7' typing_extensions: '>=3.7.4.3' - url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.40-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.41-pyhd8ed1ab_0.conda hash: - md5: 6bf74c3b7c13079a91d4bd3da51cefcf - sha256: 6b85809ffbfe5c1887b674bf0492cc4dd1ac8a25f4d9fa20ef404be92186259b + md5: 84874a90c312088f7b5e63402fc44a58 + sha256: cf3c45156feec1fe8adfd3552ed70f4218e9771643cca8dd2673bca9dea04c9c category: main optional: false - name: gitpython - version: 3.1.40 + version: 3.1.41 manager: conda platform: osx-arm64 dependencies: python: '>=3.7' typing_extensions: '>=3.7.4.3' gitdb: '>=4.0.1,<5' - url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.40-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.41-pyhd8ed1ab_0.conda hash: - md5: 6bf74c3b7c13079a91d4bd3da51cefcf - sha256: 6b85809ffbfe5c1887b674bf0492cc4dd1ac8a25f4d9fa20ef404be92186259b + md5: 84874a90c312088f7b5e63402fc44a58 + sha256: cf3c45156feec1fe8adfd3552ed70f4218e9771643cca8dd2673bca9dea04c9c category: main optional: false - name: glog @@ -4423,7 +4515,7 @@ package: category: main optional: false - name: google-auth - version: 2.25.2 + version: 2.26.2 manager: conda platform: linux-64 dependencies: @@ -4436,14 +4528,14 @@ package: pyu2f: '>=0.1.5' requests: '>=2.20.0,<3.0.0' rsa: '>=3.1.4,<5' - url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.25.2-pyhca7485f_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.26.2-pyhca7485f_0.conda hash: - md5: ef008fe13beb99a47cbe5a7a68a1f0ea - sha256: 928960978bfdb77a912db60659ec30ec7e04d1ec12ba12f38ac61134158320d0 + md5: 64a730f6101826d9d5c6e91f5e87f379 + sha256: ea337fd708b140c551bca4342c2fad0cf542f2862ecf38e77bc4a9237dd3c394 category: main optional: false - name: google-auth - version: 2.25.2 + version: 2.26.2 manager: conda platform: osx-arm64 dependencies: @@ -4456,14 +4548,14 @@ package: cachetools: '>=2.0.0,<6.0' aiohttp: '>=3.6.2,<4.0.0' cryptography: '>=38.0.3' - url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.25.2-pyhca7485f_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-auth-2.26.2-pyhca7485f_0.conda hash: - md5: ef008fe13beb99a47cbe5a7a68a1f0ea - sha256: 928960978bfdb77a912db60659ec30ec7e04d1ec12ba12f38ac61134158320d0 + md5: 64a730f6101826d9d5c6e91f5e87f379 + sha256: ea337fd708b140c551bca4342c2fad0cf542f2862ecf38e77bc4a9237dd3c394 category: main optional: false - name: google-auth-oauthlib - version: 1.0.0 + version: 1.2.0 manager: conda platform: linux-64 dependencies: @@ -4471,14 +4563,14 @@ package: google-auth: '>=2.15.0' python: '>=3.6' requests-oauthlib: '>=0.7.0' - url: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.0.0-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.2.0-pyhd8ed1ab_0.conda hash: - md5: 569e62e95b01b53e4ec7d9abe83b7385 - sha256: f89613643658a51a1ac0fb7c7950526fadc2a6ce1ae13755d786e14cfce1633c + md5: 2057f12885a73b4d621c075423cec969 + sha256: 39d031780d9ac2da430ead078a40ff67db3ad57e24ab1e3c68b4e0f2b48a2311 category: main optional: false - name: google-auth-oauthlib - version: 1.0.0 + version: 1.2.0 manager: conda platform: osx-arm64 dependencies: @@ -4486,20 +4578,20 @@ package: requests-oauthlib: '>=0.7.0' click: '>=6.0.0' google-auth: '>=2.15.0' - url: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.0.0-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-auth-oauthlib-1.2.0-pyhd8ed1ab_0.conda hash: - md5: 569e62e95b01b53e4ec7d9abe83b7385 - sha256: f89613643658a51a1ac0fb7c7950526fadc2a6ce1ae13755d786e14cfce1633c + md5: 2057f12885a73b4d621c075423cec969 + sha256: 39d031780d9ac2da430ead078a40ff67db3ad57e24ab1e3c68b4e0f2b48a2311 category: main optional: false - name: google-cloud-bigquery - version: 3.14.1 + version: 3.16.0 manager: conda platform: linux-64 dependencies: db-dtypes: '>=0.3.0,<2.0.0dev' geopandas: '>=0.9.0,<1.0dev' - google-cloud-bigquery-core: 3.14.1 + google-cloud-bigquery-core: 3.16.0 google-cloud-bigquery-storage: '>=2.6.0,<3.0.0dev' grpcio: '>=1.49.1,<2.0dev' ipykernel: '>=6.0.0' @@ -4512,14 +4604,14 @@ package: python: '>=3.8' shapely: '>=1.8.4,<3.0.0dev' tqdm: '>=4.7.4,<=5.0.0dev' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.14.1-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.16.0-pyhd8ed1ab_0.conda hash: - md5: c6754ea972dfcf518b5abb4f8aefc8b9 - sha256: 63947e69bacff4029cd7c2d5e2c5c2c0e03f3c1bb0ccf2fa658d111a78317eb9 + md5: 615a7e8d130b31901c26483865aa7280 + sha256: a958f3944e06c55d5ddeaabfc8aa300c9dc6af21a5d4ba2a79702a1ed06b7bd9 category: main optional: false - name: google-cloud-bigquery - version: 3.14.1 + version: 3.16.0 manager: conda platform: osx-arm64 dependencies: @@ -4537,15 +4629,15 @@ package: ipython: '>=7.23.1,!=8.1.0' google-cloud-bigquery-storage: '>=2.6.0,<3.0.0dev' shapely: '>=1.8.4,<3.0.0dev' - google-cloud-bigquery-core: 3.14.1 - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.14.1-pyhd8ed1ab_1.conda + google-cloud-bigquery-core: 3.16.0 + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-3.16.0-pyhd8ed1ab_0.conda hash: - md5: c6754ea972dfcf518b5abb4f8aefc8b9 - sha256: 63947e69bacff4029cd7c2d5e2c5c2c0e03f3c1bb0ccf2fa658d111a78317eb9 + md5: 615a7e8d130b31901c26483865aa7280 + sha256: a958f3944e06c55d5ddeaabfc8aa300c9dc6af21a5d4ba2a79702a1ed06b7bd9 category: main optional: false - name: google-cloud-bigquery-core - version: 3.14.1 + version: 3.16.0 manager: conda platform: linux-64 dependencies: @@ -4556,14 +4648,14 @@ package: python: '>=3.8' python-dateutil: '>=2.7.2,<3.0dev' requests: '>=2.21.0,<3.0.0dev' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.14.1-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.16.0-pyhd8ed1ab_0.conda hash: - md5: 12104b8db939e54acdfbcb9092e58ba7 - sha256: 3704662aac3005f4c7c92e8415c0927fdf69da355ab0bbe24640e55e3b413368 + md5: 3022f10d34999c3a0b03ce8da812e750 + sha256: 8b941269eaf2caf0b1f04312be2b36b48dc2250bf718b40f83bf933463a8677d category: main optional: false - name: google-cloud-bigquery-core - version: 3.14.1 + version: 3.16.0 manager: conda platform: osx-arm64 dependencies: @@ -4574,30 +4666,30 @@ package: requests: '>=2.21.0,<3.0.0dev' packaging: '>=20.0.0' google-cloud-core: '>=1.6.0,<3.0.0dev' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.14.1-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-core-3.16.0-pyhd8ed1ab_0.conda hash: - md5: 12104b8db939e54acdfbcb9092e58ba7 - sha256: 3704662aac3005f4c7c92e8415c0927fdf69da355ab0bbe24640e55e3b413368 + md5: 3022f10d34999c3a0b03ce8da812e750 + sha256: 8b941269eaf2caf0b1f04312be2b36b48dc2250bf718b40f83bf933463a8677d category: main optional: false - name: google-cloud-bigquery-storage - version: 2.22.0 + version: 2.24.0 manager: conda platform: linux-64 dependencies: fastavro: '>=0.21.2' - google-cloud-bigquery-storage-core: 2.22.0.* + google-cloud-bigquery-storage-core: 2.24.0.* pandas: '>=0.21.1' pyarrow: '>=0.15.0' python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-2.22.0-pyh1a96a4e_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-2.24.0-pyhca7485f_0.conda hash: - md5: 14a5213a076993679239aa0f07334ecd - sha256: b8777b626962e1912f8592e093d318699f7e42752ef90e85f68bc2fab01f7c2c + md5: 86a0630f1f8b18915d909503f17ca1e0 + sha256: 63935dd8bdc4da2d8164ada820492b7ec9beba3197b5cb9ab34495d8fd56b0cd category: main optional: false - name: google-cloud-bigquery-storage - version: 2.22.0 + version: 2.24.0 manager: conda platform: osx-arm64 dependencies: @@ -4605,15 +4697,15 @@ package: pyarrow: '>=0.15.0' fastavro: '>=0.21.2' pandas: '>=0.21.1' - google-cloud-bigquery-storage-core: 2.22.0.* - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-2.22.0-pyh1a96a4e_0.conda + google-cloud-bigquery-storage-core: 2.24.0.* + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-2.24.0-pyhca7485f_0.conda hash: - md5: 14a5213a076993679239aa0f07334ecd - sha256: b8777b626962e1912f8592e093d318699f7e42752ef90e85f68bc2fab01f7c2c + md5: 86a0630f1f8b18915d909503f17ca1e0 + sha256: 63935dd8bdc4da2d8164ada820492b7ec9beba3197b5cb9ab34495d8fd56b0cd category: main optional: false - name: google-cloud-bigquery-storage-core - version: 2.22.0 + version: 2.24.0 manager: conda platform: linux-64 dependencies: @@ -4621,14 +4713,14 @@ package: proto-plus: '>=1.22.0,<2.0.0dev' protobuf: '>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-core-2.22.0-pyh1a96a4e_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-core-2.24.0-pyhca7485f_0.conda hash: - md5: 0869ea717d694cac0fbcd494a6f987c9 - sha256: 8af06f60b2c47cf882755a295d1a8168be978d712abba491d003bbf1a20ca62f + md5: 1d1a6f7938f1a29044e676d112c0b424 + sha256: 60994f98e7386bd5836dd04b74112d30286df2bb15e8edf2f2d460dd37821860 category: main optional: false - name: google-cloud-bigquery-storage-core - version: 2.22.0 + version: 2.24.0 manager: conda platform: osx-arm64 dependencies: @@ -4636,10 +4728,10 @@ package: proto-plus: '>=1.22.0,<2.0.0dev' protobuf: '>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' google-api-core-grpc: '>=1.34.0,<3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*' - url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-core-2.22.0-pyh1a96a4e_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/google-cloud-bigquery-storage-core-2.24.0-pyhca7485f_0.conda hash: - md5: 0869ea717d694cac0fbcd494a6f987c9 - sha256: 8af06f60b2c47cf882755a295d1a8168be978d712abba491d003bbf1a20ca62f + md5: 1d1a6f7938f1a29044e676d112c0b424 + sha256: 60994f98e7386bd5836dd04b74112d30286df2bb15e8edf2f2d460dd37821860 category: main optional: false - name: google-cloud-core @@ -4895,7 +4987,7 @@ package: category: main optional: false - name: great-expectations - version: 0.18.5 + version: 0.18.8 manager: conda platform: linux-64 dependencies: @@ -4928,14 +5020,14 @@ package: typing-extensions: '>=3.10.0.0' tzlocal: '>=1.2' urllib3: '>=1.26' - url: https://conda.anaconda.org/conda-forge/noarch/great-expectations-0.18.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/great-expectations-0.18.8-pyhd8ed1ab_0.conda hash: - md5: 68b93891d7772b9fb2d33a9d48729a9c - sha256: 963fd0986b1de12c6605d1e6b55bc7085e77f09f14eecf60e99b94a5c3d542e7 + md5: aec88c7640df4638b89356cc72732d67 + sha256: 85f91a1d5ed6e80653a7f9d3564a4fdb5d67e1947e51479006ebd674c4bbb669 category: main optional: false - name: great-expectations - version: 0.18.5 + version: 0.18.8 manager: conda platform: osx-arm64 dependencies: @@ -4953,9 +5045,9 @@ package: scipy: '>=1.6.0' jsonpatch: '>=1.22' mistune: '>=0.8.4' + typing-extensions: '>=3.10.0.0' pytz: '>=2021.3' nbformat: '>=5.0' - typing-extensions: '>=3.10.0.0' ruamel.yaml: '>=0.16,<0.17.18' ipython: '>=7.16.3' cryptography: '>=3.2' @@ -4968,14 +5060,14 @@ package: altair: '>=4.2.1,<5.0.0' click: '>=7.1.2,!=8.1.4' pydantic: '>=1.9.2' - url: https://conda.anaconda.org/conda-forge/noarch/great-expectations-0.18.5-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/great-expectations-0.18.8-pyhd8ed1ab_0.conda hash: - md5: 68b93891d7772b9fb2d33a9d48729a9c - sha256: 963fd0986b1de12c6605d1e6b55bc7085e77f09f14eecf60e99b94a5c3d542e7 + md5: aec88c7640df4638b89356cc72732d67 + sha256: 85f91a1d5ed6e80653a7f9d3564a4fdb5d67e1947e51479006ebd674c4bbb669 category: main optional: false - name: greenlet - version: 3.0.2 + version: 3.0.3 manager: conda platform: linux-64 dependencies: @@ -4983,86 +5075,88 @@ package: libstdcxx-ng: '>=12' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/greenlet-3.0.2-py39h3d6467e_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/greenlet-3.0.3-py39h3d6467e_0.conda hash: - md5: b133b87a6f73b630be104d02776de451 - sha256: aafa5376e3bb65843f6e6cb65c86cb767b8ca9a3ee61e8647036e49a88ca7cc0 + md5: a240d46f8f326c4af01de1a1d8e615d0 + sha256: 3cc114aaf9051dc40dc63be9969284a6691fdbf9d55668a7e245de9bc3bbcf38 category: main optional: false - name: greenlet - version: 3.0.2 + version: 3.0.3 manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' - libcxx: '>=16.0.6' + libcxx: '>=15' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/greenlet-3.0.2-py39h4ce5507_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/greenlet-3.0.3-py39hf3050f2_0.conda hash: - md5: 3f5b15ff81e34875d886eecddd19229e - sha256: 6a5c58a657f4892c481710dc1153e7fbab93be42c2a4141b9b41be074c9c1bf3 + md5: 92b3b5c1ca71c384789a1bc55cbd3d7e + sha256: 38f34512f3d48e9f24bb0a597e2ce8898e9fd9f2540a1412095d0fdc68e229be category: main optional: false - name: grpcio - version: 1.54.3 + version: 1.59.3 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' - libgrpc: 1.54.3 + libgrpc: 1.59.3 libstdcxx-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/grpcio-1.54.3-py39h227be39_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/grpcio-1.59.3-py39h174d805_0.conda hash: - md5: 88730642f4e64eab737907f903d9b253 - sha256: fd1efc4fe365f429d43ee679dc7d28b7c2b5ad829a63289fccfe0c7c05e660ab + md5: 936452359388a43140bec999adbba8e3 + sha256: 7136df5e62c5af83981e29733f95fe88140d9f02def426db5302f01777f43b24 category: main optional: false - name: grpcio - version: 1.54.3 + version: 1.59.3 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=15.0.7' - libgrpc: 1.54.3 + __osx: '>=10.9' + libcxx: '>=16.0.6' + libgrpc: 1.59.3 + libzlib: '>=1.2.13,<1.3.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/grpcio-1.54.3-py39hb198ff7_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/grpcio-1.59.3-py39h52f163a_0.conda hash: - md5: 076cb3ed6721ebab7f73670d7671f15b - sha256: 8aabc2d3baf71cde6543169ef5aa206da3120858ad5aaa1c98bb0689010bdbcb + md5: 65c1499ad010b38f0acab66e753f43b8 + sha256: 9939546b8eaf9a5666b31f872e6d70950139b108a557f3a1a3f14f0548af1fb3 category: main optional: false - name: grpcio-status - version: 1.54.2 + version: 1.59.3 manager: conda platform: linux-64 dependencies: googleapis-common-protos: '>=1.5.5' - grpcio: '>=1.54.2' + grpcio: '>=1.59.3' protobuf: '>=4.21.6' - python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/grpcio-status-1.54.2-pyhd8ed1ab_0.conda + python: '>=3.7' + url: https://conda.anaconda.org/conda-forge/noarch/grpcio-status-1.59.3-pyhd8ed1ab_0.conda hash: - md5: c7e845594733f1cb1f0f6d60a71d0dae - sha256: 8420c3da19de43e75383f3c0c93d3f556dc9b26a4bbe08f7fc557ec3c93782d4 + md5: 667999da148378ada5b9f13e7f3850c3 + sha256: 45c00a3feaf80f29cbc4b2f6a7ef73b08ce4e4a847b32a82eed443c7dd95d0c9 category: main optional: false - name: grpcio-status - version: 1.54.2 + version: 1.59.3 manager: conda platform: osx-arm64 dependencies: - python: '>=3.6' + python: '>=3.7' googleapis-common-protos: '>=1.5.5' protobuf: '>=4.21.6' - grpcio: '>=1.54.2' - url: https://conda.anaconda.org/conda-forge/noarch/grpcio-status-1.54.2-pyhd8ed1ab_0.conda + grpcio: '>=1.59.3' + url: https://conda.anaconda.org/conda-forge/noarch/grpcio-status-1.59.3-pyhd8ed1ab_0.conda hash: - md5: c7e845594733f1cb1f0f6d60a71d0dae - sha256: 8420c3da19de43e75383f3c0c93d3f556dc9b26a4bbe08f7fc557ec3c93782d4 + md5: 667999da148378ada5b9f13e7f3850c3 + sha256: 45c00a3feaf80f29cbc4b2f6a7ef73b08ce4e4a847b32a82eed443c7dd95d0c9 category: main optional: false - name: gtk2 @@ -5294,7 +5388,7 @@ package: category: main optional: false - name: huggingface_hub - version: 0.19.4 + version: 0.20.2 manager: conda platform: linux-64 dependencies: @@ -5306,14 +5400,14 @@ package: requests: '' tqdm: '>=4.42.1' typing-extensions: '>=3.7.4.3' - url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.19.4-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.20.2-pyhd8ed1ab_0.conda hash: - md5: 89932559fd83abbc8f081f688a2e764f - sha256: 411e21dd3d54738f16208ec408b7cd88785dc7007ab4fe8db1aaf9f342fab279 + md5: 3f4e4c828523dcac4829d7362b60ef8f + sha256: 3d036f2cf10e8bea177514b667a21912774d339aa8403b85c6883b61ca041e09 category: main optional: false - name: huggingface_hub - version: 0.19.4 + version: 0.20.2 manager: conda platform: osx-arm64 dependencies: @@ -5323,12 +5417,12 @@ package: pyyaml: '>=5.1' packaging: '>=20.9' typing-extensions: '>=3.7.4.3' - tqdm: '>=4.42.1' fsspec: '>=2023.5.0' - url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.19.4-pyhd8ed1ab_0.conda + tqdm: '>=4.42.1' + url: https://conda.anaconda.org/conda-forge/noarch/huggingface_hub-0.20.2-pyhd8ed1ab_0.conda hash: - md5: 89932559fd83abbc8f081f688a2e764f - sha256: 411e21dd3d54738f16208ec408b7cd88785dc7007ab4fe8db1aaf9f342fab279 + md5: 3f4e4c828523dcac4829d7362b60ef8f + sha256: 3d036f2cf10e8bea177514b667a21912774d339aa8403b85c6883b61ca041e09 category: main optional: false - name: icu @@ -5532,7 +5626,7 @@ package: category: main optional: false - name: ipykernel - version: 6.26.0 + version: 6.29.0 manager: conda platform: linux-64 dependencies: @@ -5547,17 +5641,17 @@ package: packaging: '' psutil: '' python: '>=3.8' - pyzmq: '>=20' + pyzmq: '>=24' tornado: '>=6.1' traitlets: '>=5.4.0' - url: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.26.0-pyhf8b6a83_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.0-pyhd33586a_0.conda hash: - md5: 2307f71f5f0896d4b91b93e6b468abff - sha256: 9e647454f7572101657a07820ebed294df9a6a527b041cd5e4dd98b8aa3db625 + md5: 10915bfa94b94f4ad0f347efd124a339 + sha256: fa82aa089d474d11e63deb4b433acb9618294fe6b3b08123d7a269f72c0cc8ca category: main optional: false - name: ipykernel - version: 6.26.0 + version: 6.29.0 manager: conda platform: osx-arm64 dependencies: @@ -5569,17 +5663,17 @@ package: python: '>=3.8' tornado: '>=6.1' jupyter_client: '>=6.1.12' + jupyter_core: '>=4.12,!=5.0.*' ipython: '>=7.23.1' matplotlib-inline: '>=0.1' - jupyter_core: '>=4.12,!=5.0.*' debugpy: '>=1.6.5' comm: '>=0.1.1' - pyzmq: '>=20' traitlets: '>=5.4.0' - url: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.26.0-pyh3cd1d5f_0.conda + pyzmq: '>=24' + url: https://conda.anaconda.org/conda-forge/noarch/ipykernel-6.29.0-pyh3cd1d5f_0.conda hash: - md5: 3c6e2148d30e6a762d8327a433ebfb5a - sha256: be9927d47fe23cc4d2a09d252e37e1e56ffb137767d2c0577ed882ead16f75fa + md5: 17b1e24f82177f01ac7c6c518288612c + sha256: 6df088b8d356281347a71388315c7bbf49f0a5a10e6b16000553a43d74f5222d category: main optional: false - name: ipython @@ -5805,29 +5899,29 @@ package: category: main optional: false - name: jinja2 - version: 3.1.2 + version: 3.1.3 manager: conda platform: linux-64 dependencies: markupsafe: '>=2.0' python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.3-pyhd8ed1ab_0.conda hash: - md5: c8490ed5c70966d232fdd389d0dbed37 - sha256: b045faba7130ab263db6a8fdc96b1a3de5fcf85c4a607c5f11a49e76851500b5 + md5: e7d8df6509ba635247ff9aea31134262 + sha256: fd517b7dd3a61eca34f8a6f9f92f306397149cae1204fce72ac3d227107dafdc category: main optional: false - name: jinja2 - version: 3.1.2 + version: 3.1.3 manager: conda platform: osx-arm64 dependencies: python: '>=3.7' markupsafe: '>=2.0' - url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.3-pyhd8ed1ab_0.conda hash: - md5: c8490ed5c70966d232fdd389d0dbed37 - sha256: b045faba7130ab263db6a8fdc96b1a3de5fcf85c4a607c5f11a49e76851500b5 + md5: e7d8df6509ba635247ff9aea31134262 + sha256: fd517b7dd3a61eca34f8a6f9f92f306397149cae1204fce72ac3d227107dafdc category: main optional: false - name: jmespath @@ -6006,7 +6100,7 @@ package: category: main optional: false - name: jsonschema - version: 4.20.0 + version: 4.21.0 manager: conda platform: linux-64 dependencies: @@ -6017,14 +6111,14 @@ package: python: '>=3.8' referencing: '>=0.28.4' rpds-py: '>=0.7.1' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.20.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.21.0-pyhd8ed1ab_0.conda hash: - md5: 1116d79def5268414fb0917520b2bbf1 - sha256: 77aae609097d06deedb8ef8407a44b23d5fef95962ba6fe1c959ac7bd6195296 + md5: 63dd555524e29934d1d3c9f7001ec4bd + sha256: 3562f0b6abaab7547ac3d86c5cd3eec30f0dc7072e136556ec168c8652911af7 category: main optional: false - name: jsonschema - version: 4.20.0 + version: 4.21.0 manager: conda platform: osx-arm64 dependencies: @@ -6035,42 +6129,42 @@ package: jsonschema-specifications: '>=2023.03.6' referencing: '>=0.28.4' rpds-py: '>=0.7.1' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.20.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.21.0-pyhd8ed1ab_0.conda hash: - md5: 1116d79def5268414fb0917520b2bbf1 - sha256: 77aae609097d06deedb8ef8407a44b23d5fef95962ba6fe1c959ac7bd6195296 + md5: 63dd555524e29934d1d3c9f7001ec4bd + sha256: 3562f0b6abaab7547ac3d86c5cd3eec30f0dc7072e136556ec168c8652911af7 category: main optional: false - name: jsonschema-specifications - version: 2023.11.2 + version: 2023.12.1 manager: conda platform: linux-64 dependencies: importlib_resources: '>=1.4.0' python: '>=3.8' referencing: '>=0.31.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.11.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda hash: - md5: 73884ca36d6d96cbce498cde99fab40f - sha256: e26115d02dc208a05b557c8dd670923270803b9b3b8af4e22b93d659d1ec77ec + md5: a0e4efb5f35786a05af4809a2fb1f855 + sha256: a9630556ddc3121c0be32f4cbf792dd9102bd380d5cd81d57759d172cf0c2da2 category: main optional: false - name: jsonschema-specifications - version: 2023.11.2 + version: 2023.12.1 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' importlib_resources: '>=1.4.0' referencing: '>=0.31.0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.11.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda hash: - md5: 73884ca36d6d96cbce498cde99fab40f - sha256: e26115d02dc208a05b557c8dd670923270803b9b3b8af4e22b93d659d1ec77ec + md5: a0e4efb5f35786a05af4809a2fb1f855 + sha256: a9630556ddc3121c0be32f4cbf792dd9102bd380d5cd81d57759d172cf0c2da2 category: main optional: false - name: jsonschema-with-format-nongpl - version: 4.20.0 + version: 4.21.0 manager: conda platform: linux-64 dependencies: @@ -6078,20 +6172,20 @@ package: idna: '' isoduration: '' jsonpointer: '>1.13' - jsonschema: '>=4.20.0,<4.20.1.0a0' + jsonschema: '>=4.21.0,<4.21.1.0a0' python: '' rfc3339-validator: '' rfc3986-validator: '>0.1.0' uri-template: '' webcolors: '>=1.11' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.20.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.21.0-pyhd8ed1ab_0.conda hash: - md5: a168c5f84010711f6d4ae650bc22b480 - sha256: 03558b25daa57137fdf98e92731ba50ff5506f265294ac2eef5ec465c76ecf57 + md5: 790538f1e539e9d44584cf49422053ca + sha256: 657a5597ff1060ca74ea5431d6f9743792fa62d59c4541ac83ebf105b80e1011 category: main optional: false - name: jsonschema-with-format-nongpl - version: 4.20.0 + version: 4.21.0 manager: conda platform: osx-arm64 dependencies: @@ -6104,11 +6198,11 @@ package: jsonpointer: '>1.13' webcolors: '>=1.11' rfc3986-validator: '>0.1.0' - jsonschema: '>=4.20.0,<4.20.1.0a0' - url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.20.0-pyhd8ed1ab_0.conda + jsonschema: '>=4.21.0,<4.21.1.0a0' + url: https://conda.anaconda.org/conda-forge/noarch/jsonschema-with-format-nongpl-4.21.0-pyhd8ed1ab_0.conda hash: - md5: a168c5f84010711f6d4ae650bc22b480 - sha256: 03558b25daa57137fdf98e92731ba50ff5506f265294ac2eef5ec465c76ecf57 + md5: 790538f1e539e9d44584cf49422053ca + sha256: 657a5597ff1060ca74ea5431d6f9743792fa62d59c4541ac83ebf105b80e1011 category: main optional: false - name: jupyter @@ -6176,8 +6270,8 @@ package: click: '' importlib-metadata: '' tabulate: '' - nbformat: '' attrs: '' + nbformat: '' python: '>=3.8' sqlalchemy: '>=1.3.12,<3' nbclient: '>=0.2,<0.8' @@ -6188,31 +6282,31 @@ package: category: main optional: false - name: jupyter-lsp - version: 2.2.1 + version: 2.2.2 manager: conda platform: linux-64 dependencies: importlib-metadata: '>=4.8.3' jupyter_server: '>=1.1.2' python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.2-pyhd8ed1ab_0.conda hash: - md5: d1a5efc65bfabc3bfebf4d3a204da897 - sha256: 0f995f60609fb50db74bed3637165ad202cf091ec0804519c11b6cffce901e88 + md5: ed56b103cac2db68f22909e9f5cca6b6 + sha256: d8ab253be3df67be1b31fe040a8386e071ff065ef4442b94a722a45fa3562fbe category: main optional: false - name: jupyter-lsp - version: 2.2.1 + version: 2.2.2 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' importlib-metadata: '>=4.8.3' jupyter_server: '>=1.1.2' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter-lsp-2.2.2-pyhd8ed1ab_0.conda hash: - md5: d1a5efc65bfabc3bfebf4d3a204da897 - sha256: 0f995f60609fb50db74bed3637165ad202cf091ec0804519c11b6cffce901e88 + md5: ed56b103cac2db68f22909e9f5cca6b6 + sha256: d8ab253be3df67be1b31fe040a8386e071ff065ef4442b94a722a45fa3562fbe category: main optional: false - name: jupyter_client @@ -6292,7 +6386,7 @@ package: category: main optional: false - name: jupyter_core - version: 5.5.0 + version: 5.7.1 manager: conda platform: linux-64 dependencies: @@ -6300,14 +6394,14 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.5.0-py39hf3d152e_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.7.1-py39hf3d152e_0.conda hash: - md5: 87274099cd45c591edd6ed59bcc96095 - sha256: 23e7d01036b8c25bb054b28e4ee299a2cac7ae62af672bdda5f814c7b6ff470d + md5: 0195c150e0768bc4caccdff46a12075c + sha256: 7527ebd9196d4d22c428854e065dc336a8d184bbd1f7264fa1139af493d3f7d6 category: main optional: false - name: jupyter_core - version: 5.5.0 + version: 5.7.1 manager: conda platform: osx-arm64 dependencies: @@ -6315,10 +6409,10 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* traitlets: '>=5.3' - url: https://conda.anaconda.org/conda-forge/osx-arm64/jupyter_core-5.5.0-py39h2804cbe_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/jupyter_core-5.7.1-py39h2804cbe_0.conda hash: - md5: 42a8bd9a68a56049e0e4b29209c48c6d - sha256: 71c4ff910e60afa6a336e7d3b6d9332e615411e1810197d9076629314c208999 + md5: 7a804a3d9c0a04e3b0e0ce1a9f3fdc03 + sha256: e4cb996a1878b0cace1a8e4d05543099fe0a9c691c3932662c04709a4197ce95 category: main optional: false - name: jupyter_events @@ -6360,7 +6454,7 @@ package: category: main optional: false - name: jupyter_server - version: 2.12.1 + version: 2.12.5 manager: conda platform: linux-64 dependencies: @@ -6383,14 +6477,14 @@ package: tornado: '>=6.2.0' traitlets: '>=5.6.0' websocket-client: '' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.12.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.12.5-pyhd8ed1ab_0.conda hash: - md5: e9781be1e6c93b5df2c180a9f9242420 - sha256: c4aabe2041afb8fde1f049549c2e292265612d07dd4d1156f3e183ba6a6f007b + md5: 755177a956fa6dd90d5cfcbbb5084de2 + sha256: 43dcd238c656c7ecf3228be8735def530cad5181f990c042ba202b9e383d2b1f category: main optional: false - name: jupyter_server - version: 2.12.1 + version: 2.12.5 manager: conda platform: osx-arm64 dependencies: @@ -6404,49 +6498,49 @@ package: python: '>=3.8' terminado: '>=0.8.3' jupyter_core: '>=4.12,!=5.0.*' - nbconvert-core: '>=6.4.4' tornado: '>=6.2.0' + nbconvert-core: '>=6.4.4' + pyzmq: '>=24' jupyter_client: '>=7.4.4' nbformat: '>=5.3.0' - pyzmq: '>=24' traitlets: '>=5.6.0' anyio: '>=3.1.0' send2trash: '>=1.8.2' jupyter_events: '>=0.9.0' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.12.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server-2.12.5-pyhd8ed1ab_0.conda hash: - md5: e9781be1e6c93b5df2c180a9f9242420 - sha256: c4aabe2041afb8fde1f049549c2e292265612d07dd4d1156f3e183ba6a6f007b + md5: 755177a956fa6dd90d5cfcbbb5084de2 + sha256: 43dcd238c656c7ecf3228be8735def530cad5181f990c042ba202b9e383d2b1f category: main optional: false - name: jupyter_server_terminals - version: 0.5.0 + version: 0.5.1 manager: conda platform: linux-64 dependencies: python: '>=3.8' terminado: '>=0.8.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.1-pyhd8ed1ab_0.conda hash: - md5: 37a8b4098d428ecd40e58f8ec8a8e77d - sha256: b2c769977c258e5a81d541fd526d01083fc6b8c8dfdd4822795a898626bc81e6 + md5: 919e6d570f8b3839f3a1ed99b25088af + sha256: 488676cc34049a8a80002d323c4d83c03e6188a8f31ebfb02d43e20d183b3662 category: main optional: false - name: jupyter_server_terminals - version: 0.5.0 + version: 0.5.1 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' terminado: '>=0.8.3' - url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyter_server_terminals-0.5.1-pyhd8ed1ab_0.conda hash: - md5: 37a8b4098d428ecd40e58f8ec8a8e77d - sha256: b2c769977c258e5a81d541fd526d01083fc6b8c8dfdd4822795a898626bc81e6 + md5: 919e6d570f8b3839f3a1ed99b25088af + sha256: 488676cc34049a8a80002d323c4d83c03e6188a8f31ebfb02d43e20d183b3662 category: main optional: false - name: jupyterlab - version: 4.0.9 + version: 4.0.10 manager: conda platform: linux-64 dependencies: @@ -6465,14 +6559,14 @@ package: tomli: '' tornado: '>=6.2.0' traitlets: '' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.0.9-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.0.10-pyhd8ed1ab_0.conda hash: - md5: 7da6e874b0904e411ec2fd8e6082841e - sha256: 1c55e63e4b84810796c8827370ebd597ad3f45bcd0c1fa9975a363bc6a895f23 + md5: a2a505f332f32914004f9b058fd9d0c2 + sha256: 056abf47ec7c6bfb32f5e01eedca32ac881d85cc6e648c0b86dce65f64ceb06c category: main optional: false - name: jupyterlab - version: 4.0.9 + version: 4.0.10 manager: conda platform: osx-arm64 dependencies: @@ -6491,10 +6585,10 @@ package: async-lru: '>=1.0.0' jupyterlab_server: '>=2.19.0,<3' notebook-shim: '>=0.2' - url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.0.9-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupyterlab-4.0.10-pyhd8ed1ab_0.conda hash: - md5: 7da6e874b0904e411ec2fd8e6082841e - sha256: 1c55e63e4b84810796c8827370ebd597ad3f45bcd0c1fa9975a363bc6a895f23 + md5: a2a505f332f32914004f9b058fd9d0c2 + sha256: 056abf47ec7c6bfb32f5e01eedca32ac881d85cc6e648c0b86dce65f64ceb06c category: main optional: false - name: jupyterlab_pygments @@ -6588,7 +6682,7 @@ package: category: main optional: false - name: jupytext - version: 1.16.0 + version: 1.16.1 manager: conda platform: linux-64 dependencies: @@ -6599,14 +6693,14 @@ package: python: '>=3.8' pyyaml: '' toml: '' - url: https://conda.anaconda.org/conda-forge/noarch/jupytext-1.16.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupytext-1.16.1-pyhd8ed1ab_0.conda hash: - md5: 562071ea5a2172022aadf6c3ddbf22eb - sha256: 9050a4c3ef58c61e0f6f8fff270b8c20617a35a4d14f651cc76af52665563a4a + md5: 14a45070afec994235a23ae09b098cce + sha256: 450d03ec711a5cbd643f99f4fb2f08aa167db7a0cb54dcbb53700c81b290c316 category: main optional: false - name: jupytext - version: 1.16.0 + version: 1.16.1 manager: conda platform: osx-arm64 dependencies: @@ -6617,61 +6711,61 @@ package: mdit-py-plugins: '' python: '>=3.8' markdown-it-py: '>=1.0' - url: https://conda.anaconda.org/conda-forge/noarch/jupytext-1.16.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/jupytext-1.16.1-pyhd8ed1ab_0.conda hash: - md5: 562071ea5a2172022aadf6c3ddbf22eb - sha256: 9050a4c3ef58c61e0f6f8fff270b8c20617a35a4d14f651cc76af52665563a4a + md5: 14a45070afec994235a23ae09b098cce + sha256: 450d03ec711a5cbd643f99f4fb2f08aa167db7a0cb54dcbb53700c81b290c316 category: main optional: false - name: kealib - version: 1.5.2 + version: 1.5.3 manager: conda platform: linux-64 dependencies: - hdf5: '>=1.14.2,<1.14.4.0a0' + hdf5: '>=1.14.3,<1.14.4.0a0' libgcc-ng: '>=12' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.2-hcd42e92_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-h2f55d51_0.conda hash: - md5: b04c039f0bd511533a0d8bc8a7b6835e - sha256: 1278aaba7bfd9a143a58a2d5e13296702b6bd77f7b43f6ecace555a55579bdad + md5: f7e7077802927590efc8bf7328208f12 + sha256: ee0934ff426d3cab015055808bed33eb9d20f635ec14bc421c596f4b70927102 category: main optional: false - name: kealib - version: 1.5.2 + version: 1.5.3 manager: conda platform: osx-arm64 dependencies: - hdf5: '>=1.14.2,<1.14.4.0a0' - libcxx: '>=15.0.7' - url: https://conda.anaconda.org/conda-forge/osx-arm64/kealib-1.5.2-h47b5e36_1.conda + hdf5: '>=1.14.3,<1.14.4.0a0' + libcxx: '>=15' + url: https://conda.anaconda.org/conda-forge/osx-arm64/kealib-1.5.3-h210d843_0.conda hash: - md5: 88abe34211296bbc0ba1871fd2b13962 - sha256: 93e9b03cd9035766c43e5f7f851fc07a4f68b79fd48c1306280f17093a8ae746 + md5: 0153b4907333b9005f48d19584e4153e + sha256: f9bae19e49eda17d32b1ca6cabe501e09b00ba10f6d061fc8a14086a8455710e category: main optional: false - name: keras - version: 2.14.0 + version: 2.15.0 manager: conda platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/keras-2.14.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/keras-2.15.0-pyhd8ed1ab_0.conda hash: - md5: 1f549fcd82b651b879789fd922e32e1f - sha256: 1e45448fd8da68a5cc27e6a8f01e0bddbefbe3e33cb638fd3a65149b64c69393 + md5: 91e789823c9a5577a0a6979d7e594159 + sha256: 7a1144e42f7815a216c46038e3c71b004feb3082fb4e9b9cf9abc5da725d8448 category: main optional: false - name: keras - version: 2.14.0 + version: 2.15.0 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/keras-2.14.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/keras-2.15.0-pyhd8ed1ab_0.conda hash: - md5: 1f549fcd82b651b879789fd922e32e1f - sha256: 1e45448fd8da68a5cc27e6a8f01e0bddbefbe3e33cb638fd3a65149b64c69393 + md5: 91e789823c9a5577a0a6979d7e594159 + sha256: 7a1144e42f7815a216c46038e3c71b004feb3082fb4e9b9cf9abc5da725d8448 category: main optional: false - name: keyring @@ -6943,28 +7037,28 @@ package: category: main optional: false - name: libabseil - version: '20230125.3' + version: '20230802.1' manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20230125.3-cxx17_h59595ed_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libabseil-20230802.1-cxx17_h59595ed_0.conda hash: - md5: d1db1b8be7c3a8983dcbbbfe4f0765de - sha256: 3c6fab31ed4dc8428605588454596b307b1bd59d33b0c7073c407ab51408b011 + md5: 2785ddf4cb0e7e743477991d64353947 + sha256: 8729021a93e67bb93b4e73ef0a132499db516accfea11561b667635bcd0507e7 category: main optional: false - name: libabseil - version: '20230125.3' + version: '20230802.1' manager: conda platform: osx-arm64 dependencies: libcxx: '>=15.0.7' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20230125.3-cxx17_h13dd4ca_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libabseil-20230802.1-cxx17_h13dd4ca_0.conda hash: - md5: e9edfc273c30153b3427332e90110422 - sha256: dacf281442b411eb4a4dece69618c247dbaacaa71669ca6631fc924be86ceab8 + md5: fb6dfadc1898666616dfda242d8aea10 + sha256: 459a58f36607246b4483d7a370c2d9a03e7f824e79da2c6e3e9d62abf80393e7 category: main optional: false - name: libaec @@ -6999,7 +7093,7 @@ package: dependencies: bzip2: '>=1.0.8,<2.0a0' libgcc-ng: '>=12' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.2,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' lzo: '>=2.10,<3.0a0' @@ -7019,7 +7113,7 @@ package: dependencies: bzip2: '>=1.0.8,<2.0a0' libiconv: '>=1.17,<2.0a0' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.2,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' lzo: '>=2.10,<3.0a0' @@ -7033,67 +7127,69 @@ package: category: main optional: false - name: libarrow - version: 12.0.1 + version: 13.0.0 manager: conda platform: linux-64 dependencies: - aws-crt-cpp: '>=0.21.0,<0.21.1.0a0' - aws-sdk-cpp: '>=1.10.57,<1.10.58.0a0' + aws-crt-cpp: '>=0.26.0,<0.26.1.0a0' + aws-sdk-cpp: '>=1.11.210,<1.11.211.0a0' bzip2: '>=1.0.8,<2.0a0' glog: '>=0.6.0,<0.7.0a0' - libabseil: '>=20230125.3,<20230126.0a0' - libbrotlidec: '>=1.0.9,<1.1.0a0' - libbrotlienc: '>=1.0.9,<1.1.0a0' + libabseil: '>=20230802.1,<20230803.0a0' + libbrotlidec: '>=1.1.0,<1.2.0a0' + libbrotlienc: '>=1.1.0,<1.2.0a0' libgcc-ng: '>=12' libgoogle-cloud: '>=2.12.0,<2.13.0a0' - libgrpc: '>=1.54.3,<1.55.0a0' - libprotobuf: '>=3.21.12,<3.22.0a0' + libgrpc: '>=1.59.3,<1.60.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libre2-11: '>=2023.6.2,<2024.0a0' libstdcxx-ng: '>=12' - libthrift: '>=0.18.1,<0.18.2.0a0' + libthrift: '>=0.19.0,<0.19.1.0a0' libutf8proc: '>=2.8.0,<3.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' - openssl: '>=3.1.2,<4.0a0' - orc: '>=1.9.0,<1.9.1.0a0' - re2: '>=2023.3.2,<2023.3.3.0a0' + openssl: '>=3.2.0,<4.0a0' + orc: '>=1.9.2,<1.9.3.0a0' + re2: '' snappy: '>=1.1.10,<2.0a0' - ucx: '>=1.14.0,<1.15.0a0' - zstd: '>=1.5.2,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-12.0.1-hb87d912_8_cpu.conda + ucx: '>=1.15.0,<1.16.0a0' + zstd: '>=1.5.5,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libarrow-13.0.0-h75ae66a_23_cpu.conda hash: - md5: 3f3b11398fe79b578e3c44dd00a44e4a - sha256: 046cc9a10999c58d351e3778ba7de5f694f90f46384726f26e6ab0e2826ccff7 + md5: a474ac3e22c8f4c7540533e11e3ea820 + sha256: cd40afed871226bdfb5bc87f92899269006d02373f8b853efc2c4d2bde96c371 category: main optional: false - name: libarrow - version: 12.0.1 + version: 13.0.0 manager: conda platform: osx-arm64 dependencies: - aws-crt-cpp: '>=0.21.0,<0.21.1.0a0' - aws-sdk-cpp: '>=1.10.57,<1.10.58.0a0' + aws-crt-cpp: '>=0.26.0,<0.26.1.0a0' + aws-sdk-cpp: '>=1.11.210,<1.11.211.0a0' bzip2: '>=1.0.8,<2.0a0' glog: '>=0.6.0,<0.7.0a0' - libabseil: '>=20230125.3,<20230126.0a0' - libbrotlidec: '>=1.0.9,<1.1.0a0' - libbrotlienc: '>=1.0.9,<1.1.0a0' - libcxx: '>=15.0.7' + libabseil: '>=20230802.1,<20230803.0a0' + libbrotlidec: '>=1.1.0,<1.2.0a0' + libbrotlienc: '>=1.1.0,<1.2.0a0' + libcxx: '>=14' libgoogle-cloud: '>=2.12.0,<2.13.0a0' - libgrpc: '>=1.54.3,<1.55.0a0' - libprotobuf: '>=3.21.12,<3.22.0a0' - libthrift: '>=0.18.1,<0.18.2.0a0' + libgrpc: '>=1.59.3,<1.60.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libre2-11: '>=2023.6.2,<2024.0a0' + libthrift: '>=0.19.0,<0.19.1.0a0' libutf8proc: '>=2.8.0,<3.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' - openssl: '>=3.1.2,<4.0a0' - orc: '>=1.9.0,<1.9.1.0a0' - re2: '>=2023.3.2,<2023.3.3.0a0' + openssl: '>=3.2.0,<4.0a0' + orc: '>=1.9.2,<1.9.3.0a0' + re2: '' snappy: '>=1.1.10,<2.0a0' - zstd: '>=1.5.2,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-12.0.1-h0b136c2_8_cpu.conda + zstd: '>=1.5.5,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libarrow-13.0.0-h7c288cf_23_cpu.conda hash: - md5: bfd29a8b76d92468d98d473f4d587796 - sha256: 7852b48ec223c71371f4dac033e5823c64c80bf887a432cdddfafe9a6c91dfce + md5: 9fed5c58b68f1cd832dca7b5fcd1be2a + sha256: 30846fd0da328e5f2027feb3eebcc89b2b75ff7f506ab6f52a423d237a2faa6f category: main optional: false - name: libblas @@ -7121,98 +7217,98 @@ package: category: main optional: false - name: libboost-headers - version: 1.83.0 + version: 1.84.0 manager: conda platform: linux-64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/linux-64/libboost-headers-1.83.0-ha770c72_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libboost-headers-1.84.0-ha770c72_0.conda hash: - md5: 1fc57b3ba24d18cc75f431d7feb2c785 - sha256: aaa194e8b7ba401e6507a2f6dc0714d2f8f5a9951f8be18b96c250b0a1175982 + md5: 9c595e87653a36aa4d8c71b4e2f7e586 + sha256: f6a6eb40a33b32eaab5d9ab36567b126f54c0b2112fe53131cf7b55afff2d0d9 category: main optional: false - name: libboost-headers - version: 1.83.0 + version: 1.84.0 manager: conda platform: osx-arm64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/libboost-headers-1.83.0-hce30654_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libboost-headers-1.84.0-hce30654_0.conda hash: - md5: ff53ab92ecb249691bb72d0561ff13ed - sha256: e02286d2a73f70f0fd71e1db41d0feac2276dbc9a5479d7f0e8074f4b459faac + md5: 284cebe4165a186462c4471c11f5ee96 + sha256: ca1bcaac05608ed420650c1cb329bbab7bb0a059dce7f7803a592fcc414d4604 category: main optional: false - name: libbrotlicommon - version: 1.0.9 + version: 1.1.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hd590300_1.conda hash: - md5: 61641e239f96eae2b8492dc7e755828c - sha256: fc57c0876695c5b4ab7173438580c1d7eaa7dccaf14cb6467ca9e0e97abe0cf0 + md5: aec6c91c7371c26392a06708a73c70e5 + sha256: 40f29d1fab92c847b083739af86ad2f36d8154008cf99b64194e4705a1725d78 category: main optional: false - name: libbrotlicommon - version: 1.0.9 + version: 1.1.0 manager: conda platform: osx-arm64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.0.9-h1a8c8d9_9.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlicommon-1.1.0-hb547adb_1.conda hash: - md5: 82354022c67480c61419b6e47377af89 - sha256: 53f4a6cc4f5795adf33fda00b86a0e91513c534ae44859754e9c07954d3a7148 + md5: cd68f024df0304be41d29a9088162b02 + sha256: 556f0fddf4bd4d35febab404d98cb6862ce3b7ca843e393da0451bfc4654cf07 category: main optional: false - name: libbrotlidec - version: 1.0.9 + version: 1.1.0 manager: conda platform: linux-64 dependencies: - libbrotlicommon: 1.0.9 + libbrotlicommon: 1.1.0 libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hd590300_1.conda hash: - md5: 081aa22f4581c08e4372b0b6c2f8478e - sha256: 564f301430c3c61bc5e149e74157ec181ed2a758befc89f7c38466d515a0f614 + md5: f07002e225d7a60a694d42a7bf5ff53f + sha256: 86fc861246fbe5ad85c1b6b3882aaffc89590a48b42d794d3d5c8e6d99e5f926 category: main optional: false - name: libbrotlidec - version: 1.0.9 + version: 1.1.0 manager: conda platform: osx-arm64 dependencies: - libbrotlicommon: 1.0.9 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.0.9-h1a8c8d9_9.conda + libbrotlicommon: 1.1.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlidec-1.1.0-hb547adb_1.conda hash: - md5: af03c66e8cb688221bdc9e2b0faaa2bf - sha256: 2de613dcccbbe40f90a256784ab23f7292aaa0985642ca35496cb9c177d8220b + md5: ee1a519335cc10d0ec7e097602058c0a + sha256: c1c85937828ad3bc434ac60b7bcbde376f4d2ea4ee42d15d369bf2a591775b4a category: main optional: false - name: libbrotlienc - version: 1.0.9 + version: 1.1.0 manager: conda platform: linux-64 dependencies: - libbrotlicommon: 1.0.9 + libbrotlicommon: 1.1.0 libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hd590300_1.conda hash: - md5: 1f0a03af852a9659ed2bf08f2f1704fd - sha256: d27bc2562ea3f3b2bfd777f074f1cac6bfa4a737233dad288cd87c4634a9bb3a + md5: 5fc11c6020d421960607d821310fcd4d + sha256: f751b8b1c4754a2a8dfdc3b4040fa7818f35bbf6b10e905a47d3a194b746b071 category: main optional: false - name: libbrotlienc - version: 1.0.9 + version: 1.1.0 manager: conda platform: osx-arm64 dependencies: - libbrotlicommon: 1.0.9 - url: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.0.9-h1a8c8d9_9.conda + libbrotlicommon: 1.1.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/libbrotlienc-1.1.0-hb547adb_1.conda hash: - md5: 8231f81e72b1113eb2ed8d2586c82691 - sha256: 37e766c0b87d06637bdfc68e072c227ce2dac82b81d26b5f9ac57fb948e2e2b7 + md5: d7e077f326a98b2cc60087eaff7c730b + sha256: 690dfc98e891ee1871c54166d30f6e22edfc2d7d6b29e7988dde5f1ce271c81a category: main optional: false - name: libcblas @@ -7516,7 +7612,7 @@ package: category: main optional: false - name: libgdal - version: 3.8.1 + version: 3.8.3 manager: conda platform: linux-64 dependencies: @@ -7530,7 +7626,7 @@ package: hdf4: '>=4.2.15,<4.2.16.0a0' hdf5: '>=1.14.3,<1.14.4.0a0' json-c: '>=0.17,<0.18.0a0' - kealib: '>=1.5.2,<1.6.0a0' + kealib: '>=1.5.3,<1.6.0a0' lerc: '>=4.0.0,<5.0a0' libaec: '>=1.1.2,<2.0a0' libarchive: '>=3.7.2,<3.8.0a0' @@ -7550,7 +7646,7 @@ package: libtiff: '>=4.6.0,<4.7.0a0' libuuid: '>=2.38.1,<3.0a0' libwebp-base: '>=1.3.2,<2.0a0' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.3,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' openjpeg: '>=2.5.0,<3.0a0' @@ -7559,22 +7655,21 @@ package: poppler: '>=23.12.0,<23.13.0a0' postgresql: '' proj: '>=9.3.1,<9.3.2.0a0' - tiledb: '>=2.18.2,<2.19.0a0' - xerces-c: '>=3.2.4,<3.3.0a0' + tiledb: '>=2.19.0,<2.20.0a0' + xerces-c: '>=3.2.5,<3.3.0a0' xz: '>=5.2.6,<6.0a0' zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.8.1-hed8bd54_4.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.8.3-hcd1fc54_0.conda hash: - md5: 32e453fb234a3534069396da161b145b - sha256: 8461bd176f6b526d856c28ad42c0899683211104c0609bbba5b897466597a34c + md5: ef5ae0528509a7987cf29e8827f46938 + sha256: 70b40ec4c171010895920000bf877b7454474df0d7473117277b22a0727b7aa4 category: main optional: false - name: libgdal - version: 3.8.1 + version: 3.8.3 manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' blosc: '>=1.21.5,<2.0a0' cfitsio: '>=4.3.1,<4.3.2.0a0' freexl: '>=2.0.0,<3.0a0' @@ -7584,12 +7679,12 @@ package: hdf4: '>=4.2.15,<4.2.16.0a0' hdf5: '>=1.14.3,<1.14.4.0a0' json-c: '>=0.17,<0.18.0a0' - kealib: '>=1.5.2,<1.6.0a0' + kealib: '>=1.5.3,<1.6.0a0' lerc: '>=4.0.0,<5.0a0' libaec: '>=1.1.2,<2.0a0' libarchive: '>=3.7.2,<3.8.0a0' libcurl: '>=8.5.0,<9.0a0' - libcxx: '>=16.0.6' + libcxx: '>=15' libdeflate: '>=1.19,<1.20.0a0' libexpat: '>=2.5.0,<3.0a0' libiconv: '>=1.17,<2.0a0' @@ -7602,7 +7697,7 @@ package: libsqlite: '>=3.44.2,<4.0a0' libtiff: '>=4.6.0,<4.7.0a0' libwebp-base: '>=1.3.2,<2.0a0' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.3,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' openjpeg: '>=2.5.0,<3.0a0' @@ -7611,14 +7706,14 @@ package: poppler: '>=23.12.0,<23.13.0a0' postgresql: '' proj: '>=9.3.1,<9.3.2.0a0' - tiledb: '>=2.18.2,<2.19.0a0' - xerces-c: '>=3.2.4,<3.3.0a0' + tiledb: '>=2.19.0,<2.20.0a0' + xerces-c: '>=3.2.5,<3.3.0a0' xz: '>=5.2.6,<6.0a0' zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-3.8.1-h8e72e65_4.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgdal-3.8.3-h7e86f1f_0.conda hash: - md5: 241eb423b481554904b3d78648443cf8 - sha256: c0515ef678b73f7332ad6aa4857aef35e2290194ea8bb4badbf40b0a4e784af7 + md5: dd42aa63e28b0e8c5d4af6d7995ab151 + sha256: fa94cfe093975c61b426c5d9bcde7e0d52d9623cd515a21bc14b941f024eec31 category: main optional: false - name: libgfortran @@ -7710,18 +7805,18 @@ package: manager: conda platform: linux-64 dependencies: - libabseil: '>=20230125.3,<20230126.0a0' + libabseil: '>=20230802.1,<20230803.0a0' libcrc32c: '>=1.1.2,<1.2.0a0' - libcurl: '>=8.1.2,<9.0a0' + libcurl: '>=8.4.0,<9.0a0' libgcc-ng: '>=12' - libgrpc: '>=1.54.2,<1.55.0a0' - libprotobuf: '>=3.21.12,<3.22.0a0' + libgrpc: '>=1.59.2,<1.60.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' libstdcxx-ng: '>=12' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.12.0-hac9eb74_1.conda + openssl: '>=3.1.4,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.12.0-h5206363_4.conda hash: - md5: 0dee716254497604762957076ac76540 - sha256: d133fdd36924ac5068ebfec493ec6a888aeb328060b4bc1c1928340047ebce26 + md5: b5eb63d2683102be45d17c55021282f6 + sha256: 82a7d211d0df165b073f9e8ba6d789c4b1c7c4882d546ca12d40f201fc3496fc category: main optional: false - name: libgoogle-cloud @@ -7729,56 +7824,72 @@ package: manager: conda platform: osx-arm64 dependencies: - libabseil: '>=20230125.3,<20230126.0a0' + __osx: '>=10.9' + libabseil: '>=20230802.1,<20230803.0a0' libcrc32c: '>=1.1.2,<1.2.0a0' - libcurl: '>=8.1.2,<9.0a0' - libcxx: '>=15.0.7' - libgrpc: '>=1.54.2,<1.55.0a0' - libprotobuf: '>=3.21.12,<3.22.0a0' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.12.0-he22f4c0_1.conda + libcurl: '>=8.4.0,<9.0a0' + libcxx: '>=16.0.6' + libgrpc: '>=1.59.2,<1.60.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + openssl: '>=3.1.4,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgoogle-cloud-2.12.0-hfb399a7_4.conda hash: - md5: 0f0853e2cdd835d56c6e2d3e78e08692 - sha256: bbf6428fdc0428ba01f88b9ef3af75bab31e17eae113bc235615e70151940a40 + md5: d62901188ab756c841cbb9a80c6c3f3c + sha256: 22122939a462f64a82ca2f305c43e5e5cf5a55f1ae12979c2445f9dc196b7047 category: main optional: false - name: libgrpc - version: 1.54.3 + version: 1.59.3 manager: conda platform: linux-64 dependencies: - c-ares: '>=1.19.1,<2.0a0' - libabseil: '>=20230125.3,<20230126.0a0' + c-ares: '>=1.21.0,<2.0a0' + libabseil: '>=20230802.1,<20230803.0a0' libgcc-ng: '>=12' - libprotobuf: '>=3.21.12,<3.22.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libre2-11: '>=2023.6.2,<2024.0a0' libstdcxx-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.1,<4.0a0' - re2: '>=2023.3.2,<2023.3.3.0a0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.54.3-hb20ce57_0.conda + openssl: '>=3.1.4,<4.0a0' + re2: '' + url: https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.59.3-hd6c4280_0.conda hash: - md5: 7af7c59ab24db007dfd82e0a3a343f66 - sha256: f5fea0c2eececb010529ac5863fbede05a2413ea8dc1a1c419db861f68ed66d7 + md5: 896c137eaf0c22f2fef58332eb4a4b83 + sha256: 3f95a2792e565b628cb284de92017a37a1cddc4a3f83453b8f75d9adc9f8cfdd category: main optional: false - name: libgrpc - version: 1.54.3 + version: 1.59.3 manager: conda platform: osx-arm64 dependencies: - c-ares: '>=1.19.1,<2.0a0' - libabseil: '>=20230125.3,<20230126.0a0' - libcxx: '>=15.0.7' - libprotobuf: '>=3.21.12,<3.22.0a0' + __osx: '>=10.9' + c-ares: '>=1.21.0,<2.0a0' + libabseil: '>=20230802.1,<20230803.0a0' + libcxx: '>=16.0.6' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libre2-11: '>=2023.6.2,<2024.0a0' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.1,<4.0a0' - re2: '>=2023.3.2,<2023.3.3.0a0' - zlib: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.54.3-h0a338ca_0.conda + openssl: '>=3.1.4,<4.0a0' + re2: '' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libgrpc-1.59.3-hbcf6334_0.conda hash: - md5: 181d9f230758e567c2a5c947ecc5c813 - sha256: 8340196ab92b0e4ca136d0ab9278e96caf8594c5065ef33dc9f5c3a4e659521d + md5: e9c7cbc84af929dd47501629a5e19713 + sha256: 54cacd1fc7503d48c135301a775568f15089b537b3c56804767c627a89a20c30 + category: main + optional: false +- name: libhwloc + version: 2.9.3 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libxml2: '>=2.11.5,<3.0.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.9.3-default_h554bfaf_1009.conda + hash: + md5: f36ddc11ca46958197a45effdd286e45 + sha256: 6950fee24766d03406e0f6f965262a5d98829c71eed8d1004f313892423b559b category: main optional: false - name: libiconv @@ -7787,10 +7898,10 @@ package: platform: linux-64 dependencies: libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda hash: - md5: 4b06b43d0eca61db2899e4d7a289c302 - sha256: a9364735ef2542558ed59aa5f404707dab674df465cbdf312edeaf5e827b55ed + md5: d66573916ffcf376178462f1b61c941e + sha256: 8ac2f6a9f186e76539439e50505d98581472fedb347a20e7d1f36429849f05c9 category: main optional: false - name: libiconv @@ -7798,10 +7909,10 @@ package: manager: conda platform: osx-arm64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.17-h0d3ecfb_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libiconv-1.17-h0d3ecfb_2.conda hash: - md5: df3fbfc1fddc8fa40122206a4e47ea4e - sha256: d407ebd1e72ebb20716ea325cdebdd018bdc3c3d3424e67825db3eaa8809164e + md5: 69bda57310071cf6d2b86caf11573d2d + sha256: bc7de5097b97bcafcf7deaaed505f7ce02f648aac8eccc0d5a47cc599a1d0304 category: main optional: false - name: libjpeg-turbo @@ -7897,7 +8008,7 @@ package: libcurl: '>=8.5.0,<9.0a0' libgcc-ng: '>=12' libstdcxx-ng: '>=12' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.2,<3.0.0a0' libzip: '>=1.10.1,<2.0a0' libzlib: '>=1.2.13,<1.3.0a0' openssl: '>=3.2.0,<4.0a0' @@ -7922,7 +8033,7 @@ package: libaec: '>=1.1.2,<2.0a0' libcurl: '>=8.5.0,<9.0a0' libcxx: '>=16.0.6' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.2,<3.0.0a0' libzip: '>=1.10.1,<2.0a0' libzlib: '>=1.2.13,<1.3.0a0' openssl: '>=3.2.0,<4.0a0' @@ -7968,6 +8079,18 @@ package: sha256: fc97aaaf0c6d0f508be313d86c2705b490998d382560df24be918b8e977802cd category: main optional: false +- name: libnl + version: 3.9.0 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libnl-3.9.0-hd590300_0.conda + hash: + md5: d27c451db4f1d3c983c78167d2fdabc2 + sha256: aae03117811e704c3f3666e8374dd2e632f1d78bef0c27330e7298b24004819e + category: main + optional: false - name: libnsl version: 2.0.1 manager: conda @@ -8068,35 +8191,66 @@ package: openssl: '>=3.2.0,<4.0a0' url: https://conda.anaconda.org/conda-forge/osx-arm64/libpq-16.1-h0f8b458_7.conda hash: - md5: c94283997b390fc897936edf2c1f0d55 - sha256: 2e71c5efc57ec7da59efcb747b615ccde1f70d12eb25128720817a3f3482d622 + md5: c94283997b390fc897936edf2c1f0d55 + sha256: 2e71c5efc57ec7da59efcb747b615ccde1f70d12eb25128720817a3f3482d622 + category: main + optional: false +- name: libprotobuf + version: 4.24.4 + manager: conda + platform: linux-64 + dependencies: + libabseil: '>=20230802.1,<20230803.0a0' + libgcc-ng: '>=12' + libstdcxx-ng: '>=12' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.24.4-hf27288f_0.conda + hash: + md5: 1a0287ab734591ad63603734f923016b + sha256: 3e0f6454190abb27edd2aeb724688ee440de133edb02cbb17d5609ba36aa8be0 + category: main + optional: false +- name: libprotobuf + version: 4.24.4 + manager: conda + platform: osx-arm64 + dependencies: + __osx: '>=10.9' + libabseil: '>=20230802.1,<20230803.0a0' + libcxx: '>=16.0.6' + libzlib: '>=1.2.13,<1.3.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-4.24.4-hc9861d8_0.conda + hash: + md5: ac5438d981e105e053b341eb30c44273 + sha256: 2e81e023f463ef239e2fb7f56a4e8eed61a1d8e9ca3f2f07bec1668cc369b2ce category: main optional: false -- name: libprotobuf - version: 3.21.12 +- name: libre2-11 + version: 2023.06.02 manager: conda platform: linux-64 dependencies: + libabseil: '>=20230802.1,<20230803.0a0' libgcc-ng: '>=12' libstdcxx-ng: '>=12' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-3.21.12-hfc55251_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.06.02-h7a70373_0.conda hash: - md5: e3a7d4ba09b8dc939b98fef55f539220 - sha256: 2df8888c51c23dedc831ba4378bad259e95c3a20a6408f54926a6a6f629f6153 + md5: c0e7eacd9694db3ef5ef2979a7deea70 + sha256: 22b0b2169c80b65665ba0d6418bd5d3d4c7d89915ee0f9613403efe871c27db8 category: main optional: false -- name: libprotobuf - version: 3.21.12 +- name: libre2-11 + version: 2023.06.02 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=15.0.7' - libzlib: '>=1.2.13,<1.3.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libprotobuf-3.21.12-ha614eb4_2.conda + __osx: '>=10.9' + libabseil: '>=20230802.1,<20230803.0a0' + libcxx: '>=16.0.6' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libre2-11-2023.06.02-h1753957_0.conda hash: - md5: 440548f3f8e573a427553ee877c5f6c3 - sha256: 3910b38529c44119a293fccc93021cab6fe73ebbdaa8194dcf43a711a433bc55 + md5: 3b8652db4bf4e27fa1446526f7a78498 + sha256: 8bafee8f8ef27f4cb0afffe5404dd1abfc5fd6eac1ee9b4847a756d440bd7aa7 category: main optional: false - name: librsvg @@ -8109,7 +8263,7 @@ package: gettext: '>=0.21.1,<1.0a0' libgcc-ng: '>=12' libglib: '>=2.78.1,<3.0a0' - libxml2: '>=2.12.1,<2.13.0a0' + libxml2: '>=2.12.1,<3.0.0a0' pango: '>=1.50.14,<2.0a0' url: https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-he3f83f7_1.conda hash: @@ -8126,7 +8280,7 @@ package: gdk-pixbuf: '>=2.42.10,<3.0a0' gettext: '>=0.21.1,<1.0a0' libglib: '>=2.78.1,<3.0a0' - libxml2: '>=2.12.1,<2.13.0a0' + libxml2: '>=2.12.1,<3.0.0a0' pango: '>=1.50.14,<2.0a0' url: https://conda.anaconda.org/conda-forge/osx-arm64/librsvg-2.56.3-h55a2576_1.conda hash: @@ -8221,7 +8375,7 @@ package: librttopo: '>=1.1.0,<1.2.0a0' libsqlite: '>=3.44.2,<4.0a0' libstdcxx-ng: '>=12' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.2,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' proj: '>=9.3.1,<9.3.2.0a0' sqlite: '' @@ -8244,7 +8398,7 @@ package: libiconv: '>=1.17,<2.0a0' librttopo: '>=1.1.0,<1.2.0a0' libsqlite: '>=3.44.2,<4.0a0' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.2,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' proj: '>=9.3.1,<9.3.2.0a0' sqlite: '' @@ -8319,7 +8473,7 @@ package: category: main optional: false - name: libthrift - version: 0.18.1 + version: 0.19.0 manager: conda platform: linux-64 dependencies: @@ -8327,26 +8481,26 @@ package: libgcc-ng: '>=12' libstdcxx-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.18.1-h8fd135c_2.conda + openssl: '>=3.1.3,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.19.0-hb90f79a_1.conda hash: - md5: bbf65f7688512872f063810623b755dc - sha256: 06cd0ccd95d19389d0b0146402ac5536e4bb0abd08a88650f95dd18debd62677 + md5: 8cdb7d41faa0260875ba92414c487e2d + sha256: 719add2cf20d144ef9962c57cd0f77178259bdb3aae1cded2e2b2b7c646092f5 category: main optional: false - name: libthrift - version: 0.18.1 + version: 0.19.0 manager: conda platform: osx-arm64 dependencies: libcxx: '>=15.0.7' libevent: '>=2.1.12,<2.1.13.0a0' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.1,<4.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.18.1-ha061701_2.conda + openssl: '>=3.1.3,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libthrift-0.19.0-h026a170_1.conda hash: - md5: c1a4bb91d705cc903de58a95aa35ab5b - sha256: 4b3cbe168c7fb070a79960fd20bdc60b309d79805619ed93cdf93f71bdc88bf6 + md5: 4b8b21eb00d9019e9fa351141da2a6ac + sha256: b2c1b30d36f0412c0c0313db76a0236d736f3a9b887b8ed16182f531e4b7cb80 category: main optional: false - name: libtiff @@ -8388,6 +8542,47 @@ package: sha256: b18ef36eb90f190db22c56ae5a080bccc16669c8f5b795a6211d7b0c00c18ff7 category: main optional: false +- name: libtorch + version: 2.1.0 + manager: conda + platform: linux-64 + dependencies: + __glibc: '>=2.17,<3.0.a0' + _openmp_mutex: '>=4.5' + libcblas: '>=3.9.0,<4.0a0' + libgcc-ng: '>=12' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libstdcxx-ng: '>=12' + libuv: '>=1.46.0,<2.0a0' + mkl: '>=2023.2.0,<2024.0a0' + sleef: '>=3.5.1,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/libtorch-2.1.0-cpu_mkl_hadc400e_102.conda + hash: + md5: bfb8dc68afc5fff1d3123851b9514a30 + sha256: 1121cc2ba89fea59b5619a59baf81c328bc29e5ba42edb53895c590c6eca00be + category: main + optional: false +- name: libtorch + version: 2.1.0 + manager: conda + platform: osx-arm64 + dependencies: + libcblas: '>=3.9.0,<4.0a0' + libcxx: '>=14' + liblapack: '>=3.9.0,<4.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libuv: '>=1.46.0,<2.0a0' + llvm-openmp: '>=16.0.6' + numpy: '>=1.22.4,<2.0a0' + python: '>=3.9,<3.10.0a0' + python_abi: 3.9.* + sleef: '>=3.5.1,<4.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/libtorch-2.1.0-cpu_generic_hd9c544a_2.conda + hash: + md5: c7a4103694d0b4c2005339c6c56970cc + sha256: 091c22fbbcb178a7e8e84cd3597bca98224398d2e5f24a6ca0cdf38f8f544aac + category: main + optional: false - name: libutf8proc version: 2.8.0 manager: conda @@ -8531,8 +8726,20 @@ package: sha256: 6eaa87760ff3e91bb5524189700139db46f8946ff6331f4e571e4a9356edbb0d category: main optional: false +- name: libxcrypt + version: 4.4.36 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + url: https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda + hash: + md5: 5aa797f8787fe7a17d1b0821485b5adc + sha256: 6ae68e0b86423ef188196fff6207ed0c8195dd84273cb5623b85aa08033a410c + category: main + optional: false - name: libxml2 - version: 2.12.3 + version: 2.12.4 manager: conda platform: linux-64 dependencies: @@ -8541,14 +8748,14 @@ package: libiconv: '>=1.17,<2.0a0' libzlib: '>=1.2.13,<1.3.0a0' xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.3-h232c23b_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.4-h232c23b_1.conda hash: - md5: bc6ac4c0cea148d924f621985bc3892b - sha256: 31dd757689a1a28e42021208b6c740e84bcfdfee213a39c9bcc0dfbc33acf7a5 + md5: 53e951fab78d7e3bab40745f7b3d1620 + sha256: f6828b44da29bbfbf367ddbc72902e84ea5f5de933be494d6aac4a35826afed0 category: main optional: false - name: libxml2 - version: 2.12.3 + version: 2.12.4 manager: conda platform: osx-arm64 dependencies: @@ -8556,10 +8763,10 @@ package: libiconv: '>=1.17,<2.0a0' libzlib: '>=1.2.13,<1.3.0a0' xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.12.3-h0d0cfa8_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/libxml2-2.12.4-h0d0cfa8_1.conda hash: - md5: 84e330ed40b5bf8e95a65529ccb94a14 - sha256: c23c34af9037abc6749d59271fbdbb584f0cdcd87e17c2f011f099e63e0d7a7d + md5: 2ce68362b6ba7e78a066abce22811df7 + sha256: 70863a5554cbdd573cf852571a6ef015e5376f8969068725523a01dff7ff4de3 category: main optional: false - name: libzip @@ -8663,7 +8870,7 @@ package: category: main optional: false - name: lz4 - version: 4.3.2 + version: 4.3.3 manager: conda platform: linux-64 dependencies: @@ -8671,24 +8878,24 @@ package: lz4-c: '>=1.9.3,<1.10.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.2-py39h79d96da_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py39h79d96da_0.conda hash: - md5: 098feecfa5d91726caf654b67c9e7190 - sha256: e99b7cab72a80429d6231149c18266a2b27318aa838aa1b29c0944dbe5efc69c + md5: 5605c39626be9ccfd3e5ad557d909450 + sha256: 85b0f58f4ab75bbfbf16036487f66aac0d577f2894b52ee2e86a82c0d04e8131 category: main optional: false - name: lz4 - version: 4.3.2 + version: 4.3.3 manager: conda platform: osx-arm64 dependencies: lz4-c: '>=1.9.3,<1.10.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.3.2-py39h0d94542_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/lz4-4.3.3-py39hf99b9d6_0.conda hash: - md5: 53809d9356529ecf1d2ccfde9dbf1c64 - sha256: aa0c9d0b4fee03e0e05a2e3767c30ff3fdb7716e80a3eb22d29a778bec7e3134 + md5: 2d222ce85e5756600f76046e8d2ec3ef + sha256: b326eb7233dda33b191952cbfd30b3eef0eb499c049c0cb320cabe21eaf203e8 category: main optional: false - name: lz4-c @@ -8826,29 +9033,29 @@ package: category: main optional: false - name: markdown - version: 3.5.1 + version: 3.5.2 manager: conda platform: linux-64 dependencies: importlib-metadata: '>=4.4' python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/markdown-3.5.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/markdown-3.5.2-pyhd8ed1ab_0.conda hash: - md5: 323495027ffa625701129acebf861412 - sha256: 35e8990504cf8dc7e2bb63efd855fb0a0d5c0d77bf79403235e757c24a83ec1d + md5: db7b48fa4eeb0c21b2f3f5b1f7d9ebcf + sha256: fbc70dc01b361fe46e7b4e102e725f99ba60bf9903c2fa86b53ad6b70ded677a category: main optional: false - name: markdown - version: 3.5.1 + version: 3.5.2 manager: conda platform: osx-arm64 dependencies: python: '>=3.6' importlib-metadata: '>=4.4' - url: https://conda.anaconda.org/conda-forge/noarch/markdown-3.5.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/markdown-3.5.2-pyhd8ed1ab_0.conda hash: - md5: 323495027ffa625701129acebf861412 - sha256: 35e8990504cf8dc7e2bb63efd855fb0a0d5c0d77bf79403235e757c24a83ec1d + md5: db7b48fa4eeb0c21b2f3f5b1f7d9ebcf + sha256: fbc70dc01b361fe46e7b4e102e725f99ba60bf9903c2fa86b53ad6b70ded677a category: main optional: false - name: markdown-it-py @@ -8907,29 +9114,29 @@ package: category: main optional: false - name: marshmallow - version: 3.20.1 + version: 3.20.2 manager: conda platform: linux-64 dependencies: packaging: '>=17.0' python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.20.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.20.2-pyhd8ed1ab_0.conda hash: - md5: 0a33d141ff5f5dfbf9306f8408e9b520 - sha256: ff761f3a2dc70b8451e482b38222da195143d75c83f7607b72eddda57b5eec12 + md5: 1c5717a601c50f58452839773f3a1485 + sha256: 9f9eb62e87eae91de3763d519475cc883ebe2fdc3e429283cd2abf8cba6120d0 category: main optional: false - name: marshmallow - version: 3.20.1 + version: 3.20.2 manager: conda platform: osx-arm64 dependencies: python: '>=3.7' packaging: '>=17.0' - url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.20.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/marshmallow-3.20.2-pyhd8ed1ab_0.conda hash: - md5: 0a33d141ff5f5dfbf9306f8408e9b520 - sha256: ff761f3a2dc70b8451e482b38222da195143d75c83f7607b72eddda57b5eec12 + md5: 1c5717a601c50f58452839773f3a1485 + sha256: 9f9eb62e87eae91de3763d519475cc883ebe2fdc3e429283cd2abf8cba6120d0 category: main optional: false - name: marshmallow-enum @@ -9118,31 +9325,31 @@ package: category: main optional: false - name: mdurl - version: 0.1.0 + version: 0.1.2 manager: conda platform: linux-64 dependencies: python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.0-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda hash: - md5: f8dab71fdc13b1bf29a01248b156d268 - sha256: c678b9194e025b1fb665bec30ee20aab93399203583875b1dcc0a3b52a8f5523 + md5: 776a8dd9e824f77abac30e6ef43a8f7a + sha256: 64073dfb6bb429d52fff30891877b48c7ec0f89625b1bf844905b66a81cce6e1 category: main optional: false - name: mdurl - version: 0.1.0 + version: 0.1.2 manager: conda platform: osx-arm64 dependencies: python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.0-pyhd8ed1ab_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_0.conda hash: - md5: f8dab71fdc13b1bf29a01248b156d268 - sha256: c678b9194e025b1fb665bec30ee20aab93399203583875b1dcc0a3b52a8f5523 + md5: 776a8dd9e824f77abac30e6ef43a8f7a + sha256: 64073dfb6bb429d52fff30891877b48c7ec0f89625b1bf844905b66a81cce6e1 category: main optional: false - name: minizip - version: 4.0.3 + version: 4.0.4 manager: conda platform: linux-64 dependencies: @@ -9151,32 +9358,31 @@ package: libiconv: '>=1.17,<2.0a0' libstdcxx-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.4,<4.0a0' + openssl: '>=3.2.0,<4.0a0' xz: '>=5.2.6,<6.0a0' zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.3-h0ab5242_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.4-h0ab5242_0.conda hash: - md5: 3f9b5f4400be3cee11b426a8cd653b7c - sha256: cf33c24fa8375d17fad4e1da631b4c2e8ed9a109480fa45c82fbfa2a7c5bdd41 + md5: 813bc75d9c33ddd9c9d5b8d9c560e152 + sha256: e25d24c4841aa85ed2153f826ae58e56ae4d12704fd9e52005a3d7edfeb3b95a category: main optional: false - name: minizip - version: 4.0.3 + version: 4.0.4 manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' bzip2: '>=1.0.8,<2.0a0' - libcxx: '>=16.0.6' + libcxx: '>=15' libiconv: '>=1.17,<2.0a0' libzlib: '>=1.2.13,<1.3.0a0' - openssl: '>=3.1.4,<4.0a0' + openssl: '>=3.2.0,<4.0a0' xz: '>=5.2.6,<6.0a0' zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/minizip-4.0.3-hd5cad61_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/minizip-4.0.4-hc35e051_0.conda hash: - md5: 8f1bf9ea12bca129b7a3d49eec9efd76 - sha256: 9db88831aa3485d98cad155d989d4de45edfec13e6cbe81b0093ba7e6ba8817d + md5: 293ad87f065d0e1dc011ccafeb1bb0be + sha256: 0fbf65095148cfe9dab8b32b533b3d2752a66bbf459816345773ed73844a448b category: main optional: false - name: mistune @@ -9204,17 +9410,17 @@ package: category: main optional: false - name: mkl - version: 2022.2.1 + version: 2023.2.0 manager: conda platform: linux-64 dependencies: _openmp_mutex: '>=4.5' - llvm-openmp: '>=15.0.6' + llvm-openmp: '>=17.0.3' tbb: 2021.* - url: https://conda.anaconda.org/conda-forge/linux-64/mkl-2022.2.1-h84fe81f_16997.conda + url: https://conda.anaconda.org/conda-forge/linux-64/mkl-2023.2.0-h84fe81f_50496.conda hash: - md5: a7ce56d5757f5b57e7daabe703ade5bb - sha256: 5322750d5e96ff5d96b1457db5fb6b10300f2bc4030545e940e17b57c4e96d00 + md5: 81d4a1a57d618adf0152db973d93b2ad + sha256: 046073737bf73153b0c39e343b197cdf0b7867d336962369407465a17ea5979a category: main optional: false - name: ml_dtypes @@ -9394,27 +9600,27 @@ package: category: main optional: false - name: more-itertools - version: 10.1.0 + version: 10.2.0 manager: conda platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.2.0-pyhd8ed1ab_0.conda hash: - md5: 8549fafed0351bbfaa1ddaa15fdf9b4e - sha256: 07ce65497dec537e490992758934ddbc4fb5ed9285b41387a7cca966f1a98a0f + md5: d5c98e9706fdc5328d49a9bf2ce5fb42 + sha256: 9e49e9484ff279453f0b55323a3f0c7cb97440c74f69eecda1f4ad29fae5cd3c category: main optional: false - name: more-itertools - version: 10.1.0 + version: 10.2.0 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/more-itertools-10.2.0-pyhd8ed1ab_0.conda hash: - md5: 8549fafed0351bbfaa1ddaa15fdf9b4e - sha256: 07ce65497dec537e490992758934ddbc4fb5ed9285b41387a7cca966f1a98a0f + md5: d5c98e9706fdc5328d49a9bf2ce5fb42 + sha256: 9e49e9484ff279453f0b55323a3f0c7cb97440c74f69eecda1f4ad29fae5cd3c category: main optional: false - name: mpc @@ -9824,35 +10030,35 @@ package: category: main optional: false - name: nbconvert - version: 7.12.0 + version: 7.14.2 manager: conda platform: linux-64 dependencies: - nbconvert-core: 7.12.0 - nbconvert-pandoc: 7.12.0 + nbconvert-core: 7.14.2 + nbconvert-pandoc: 7.14.2 python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.14.2-pyhd8ed1ab_0.conda hash: - md5: 364e28ab12477494e72839aaa588073d - sha256: 0137330ab16bddf1fcaf60c0501c6145705b775fd547823708ed84364c934b76 + md5: 0ac429dbe14800f8cb97b892b107bc85 + sha256: 3fd752441d18a2f7fe19a327a767bbfa71092e66bef6eac0bf74de42166bc19e category: main optional: false - name: nbconvert - version: 7.12.0 + version: 7.14.2 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' - nbconvert-core: 7.12.0 - nbconvert-pandoc: 7.12.0 - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.12.0-pyhd8ed1ab_0.conda + nbconvert-core: 7.14.2 + nbconvert-pandoc: 7.14.2 + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.14.2-pyhd8ed1ab_0.conda hash: - md5: 364e28ab12477494e72839aaa588073d - sha256: 0137330ab16bddf1fcaf60c0501c6145705b775fd547823708ed84364c934b76 + md5: 0ac429dbe14800f8cb97b892b107bc85 + sha256: 3fd752441d18a2f7fe19a327a767bbfa71092e66bef6eac0bf74de42166bc19e category: main optional: false - name: nbconvert-core - version: 7.12.0 + version: 7.14.2 manager: conda platform: linux-64 dependencies: @@ -9873,14 +10079,14 @@ package: python: '>=3.8' tinycss2: '' traitlets: '>=5.0' - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.14.2-pyhd8ed1ab_0.conda hash: - md5: 4d67c68fd0d130091ada039bc2d81b33 - sha256: 04c3ac88701d98d58139569e4899c3254bf99908179a898cc3dcadd8c0ef44b4 + md5: 631800aa8cc7ccf61e70087355d95827 + sha256: ba3e9a42ea03411d77ac28f14ce9bc6f70710ce1a8f4a970322b2daa91e47ce5 category: main optional: false - name: nbconvert-core - version: 7.12.0 + version: 7.14.2 manager: conda platform: osx-arm64 dependencies: @@ -9901,38 +10107,38 @@ package: pygments: '>=2.4.1' nbclient: '>=0.5.0' mistune: '>=2.0.3,<4' - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.14.2-pyhd8ed1ab_0.conda hash: - md5: 4d67c68fd0d130091ada039bc2d81b33 - sha256: 04c3ac88701d98d58139569e4899c3254bf99908179a898cc3dcadd8c0ef44b4 + md5: 631800aa8cc7ccf61e70087355d95827 + sha256: ba3e9a42ea03411d77ac28f14ce9bc6f70710ce1a8f4a970322b2daa91e47ce5 category: main optional: false - name: nbconvert-pandoc - version: 7.12.0 + version: 7.14.2 manager: conda platform: linux-64 dependencies: - nbconvert-core: 7.12.0 + nbconvert-core: 7.14.2 pandoc: '' python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.12.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.14.2-pyhd8ed1ab_0.conda hash: - md5: 460d7cac50322a39b61a833885a6a8d5 - sha256: ebf25caef387ec79f8d8f6771240d451ffaebcc2cdd127c0b152c6697d661d10 + md5: 27bb156a0e59f9c3b656e3e289475fbd + sha256: f6e7154d9bd74a9c174996b450943571842ffba61d7b2ccead9015a32a795dff category: main optional: false - name: nbconvert-pandoc - version: 7.12.0 + version: 7.14.2 manager: conda platform: osx-arm64 dependencies: pandoc: '' python: '>=3.8' - nbconvert-core: 7.12.0 - url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.12.0-pyhd8ed1ab_0.conda + nbconvert-core: 7.14.2 + url: https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.14.2-pyhd8ed1ab_0.conda hash: - md5: 460d7cac50322a39b61a833885a6a8d5 - sha256: ebf25caef387ec79f8d8f6771240d451ffaebcc2cdd127c0b152c6697d661d10 + md5: 27bb156a0e59f9c3b656e3e289475fbd + sha256: f6e7154d9bd74a9c174996b450943571842ffba61d7b2ccead9015a32a795dff category: main optional: false - name: nbformat @@ -9992,27 +10198,27 @@ package: category: main optional: false - name: nest-asyncio - version: 1.5.8 + version: 1.5.9 manager: conda platform: linux-64 dependencies: python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.5.8-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.5.9-pyhd8ed1ab_0.conda hash: - md5: a4f0e4519bc50eee4f53f689be9607f7 - sha256: d7b795b4e754136841c6da3f9fa1a0f7ec37bc7167e7dd68c5b45e657133e008 + md5: 6c59cb840d511a1a997998d55e68516c + sha256: 4f28afd67be5cabcc584fe51ce2dbefe15d6886c7a6bb862aae2e1f3d9239e5f category: main optional: false - name: nest-asyncio - version: 1.5.8 + version: 1.5.9 manager: conda platform: osx-arm64 dependencies: python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.5.8-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.5.9-pyhd8ed1ab_0.conda hash: - md5: a4f0e4519bc50eee4f53f689be9607f7 - sha256: d7b795b4e754136841c6da3f9fa1a0f7ec37bc7167e7dd68c5b45e657133e008 + md5: 6c59cb840d511a1a997998d55e68516c + sha256: 4f28afd67be5cabcc584fe51ce2dbefe15d6886c7a6bb862aae2e1f3d9239e5f category: main optional: false - name: networkx @@ -10259,40 +10465,41 @@ package: category: main optional: false - name: onnx - version: 1.14.0 + version: 1.15.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' - libprotobuf: '>=3.21.12,<3.22.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' libstdcxx-ng: '>=12' - numpy: '>=1.21.6,<2.0a0' + numpy: '>=1.22.4,<2.0a0' protobuf: '' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '>=3.6.2.1' - url: https://conda.anaconda.org/conda-forge/linux-64/onnx-1.14.0-py39h5cf8293_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/onnx-1.15.0-py39h868dac8_0.conda hash: - md5: f328acd6bba2c62d2751ba1821c7fa6a - sha256: a8da023239f0028f486ea5b3bbd0e96bda75ddedd56a650af3ec105a2b163209 + md5: b7daae3b7f6ac91b5d5a9c046b7adbdf + sha256: 83370d694316b40036270969cf8c3cd3bd3340ddcdc72a79cc4208f43a402536 category: main optional: false - name: onnx - version: 1.14.0 + version: 1.15.0 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=15.0.7' - libprotobuf: '>=3.21.12,<3.22.0a0' - numpy: '>=1.21.6,<2.0a0' + __osx: '>=10.9' + libcxx: '>=16.0.6' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + numpy: '>=1.22.4,<2.0a0' protobuf: '' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing-extensions: '>=3.6.2.1' - url: https://conda.anaconda.org/conda-forge/osx-arm64/onnx-1.14.0-py39h09fa150_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/onnx-1.15.0-py39h0aa7e6e_0.conda hash: - md5: 7408be0d67077506377d7f98fe12c211 - sha256: 5dba0d4bc4cb7cc9ab2969112c95ed85641550d7f1c5687864249f77b9b37c85 + md5: 2d86c4daacb5e33742f2b4bcaaccdb8d + sha256: fcfcdd5d11a5b39a9225b569befd354b1e76bbbc6d6c754206cd52738f5b0d7f category: main optional: false - name: onnxconverter-common @@ -10410,38 +10617,39 @@ package: category: main optional: false - name: orc - version: 1.9.0 + version: 1.9.2 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' - libprotobuf: '>=3.21.12,<3.22.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' libstdcxx-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' snappy: '>=1.1.10,<2.0a0' - zstd: '>=1.5.2,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/orc-1.9.0-h2f23424_1.conda + zstd: '>=1.5.5,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/orc-1.9.2-h4b38347_0.conda hash: - md5: 9571eb3eb0f7fe8b59956a7786babbcd - sha256: 0948e8ce1b13f9e351df91996141fea60a0ace4d347667ed3dc59315427e0b8c + md5: 6e6f990b097d3e237e18a8e321d08484 + sha256: a06dd76bc0f2f99f5db5e348298c906007c3aa9e31b963f71d16e63f770b900b category: main optional: false - name: orc - version: 1.9.0 + version: 1.9.2 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=15.0.7' - libprotobuf: '>=3.21.12,<3.22.0a0' + __osx: '>=10.9' + libcxx: '>=16.0.6' + libprotobuf: '>=4.24.4,<4.24.5.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' snappy: '>=1.1.10,<2.0a0' - zstd: '>=1.5.2,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/orc-1.9.0-ha98e9e8_1.conda + zstd: '>=1.5.5,<1.6.0a0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/orc-1.9.2-h7c018df_0.conda hash: - md5: 951e399cfb5abae00231ad5e4f50e933 - sha256: 908d626a5d13d79ffb056841e1dde0823caac043a2c1d9d3ca7d3f7e6de7a68c + md5: 1ef4159e9686d95ce8ea9f1d4d999f29 + sha256: b1ad0f09dc69a8956079371d9853534f991f8311352e4e21503e6e5d20e4017b category: main optional: false - name: overrides @@ -10728,7 +10936,7 @@ package: category: main optional: false - name: paramiko - version: 3.3.1 + version: 3.4.0 manager: conda platform: linux-64 dependencies: @@ -10736,14 +10944,14 @@ package: cryptography: '>=3.3' pynacl: '>=1.5' python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.3.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.4.0-pyhd8ed1ab_0.conda hash: - md5: 8d4563992b27cdb8e673d1ca16962c9d - sha256: 9139c13cb7ea5729af862db5fb0523daa5900b9b4fa36637cd7f9c01be665f80 + md5: a5e792523b028b06d7ce6e65a6cd4a33 + sha256: 2e66359261954a79b66858c30e69ea6dd4380bf8bd733940527386b25e31dd13 category: main optional: false - name: paramiko - version: 3.3.1 + version: 3.4.0 manager: conda platform: osx-arm64 dependencies: @@ -10751,10 +10959,10 @@ package: cryptography: '>=3.3' bcrypt: '>=3.2' pynacl: '>=1.5' - url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.3.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/paramiko-3.4.0-pyhd8ed1ab_0.conda hash: - md5: 8d4563992b27cdb8e673d1ca16962c9d - sha256: 9139c13cb7ea5729af862db5fb0523daa5900b9b4fa36637cd7f9c01be665f80 + md5: a5e792523b028b06d7ce6e65a6cd4a33 + sha256: 2e66359261954a79b66858c30e69ea6dd4380bf8bd733940527386b25e31dd13 category: main optional: false - name: parso @@ -10936,12 +11144,12 @@ package: category: main optional: false - name: pillow - version: 10.1.0 + version: 10.2.0 manager: conda platform: linux-64 dependencies: freetype: '>=2.12.1,<3.0a0' - lcms2: '>=2.15,<3.0a0' + lcms2: '>=2.16,<3.0a0' libgcc-ng: '>=12' libjpeg-turbo: '>=3.0.0,<4.0a0' libtiff: '>=4.6.0,<4.7.0a0' @@ -10952,19 +11160,19 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* tk: '>=8.6.13,<8.7.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.1.0-py39had0adad_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/pillow-10.2.0-py39had0adad_0.conda hash: - md5: eeaa413fddccecb2ab7f747bdb55b07f - sha256: b474fbd1cb38a6c29372248809bbf339a2d2eff72c11601f05b19a3188dd372e + md5: 2972754dc054bb079d1d121918b5126f + sha256: 6936d54f9830ac66bee7b26187eb2297d80febe110e978cd9ae6a54e62ec6aaf category: main optional: false - name: pillow - version: 10.1.0 + version: 10.2.0 manager: conda platform: osx-arm64 dependencies: freetype: '>=2.12.1,<3.0a0' - lcms2: '>=2.15,<3.0a0' + lcms2: '>=2.16,<3.0a0' libjpeg-turbo: '>=3.0.0,<4.0a0' libtiff: '>=4.6.0,<4.7.0a0' libwebp-base: '>=1.3.2,<2.0a0' @@ -10974,63 +11182,63 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* tk: '>=8.6.13,<8.7.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-10.1.0-py39h755f0b7_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pillow-10.2.0-py39h755f0b7_0.conda hash: - md5: aa7743f5af2e30366316554982dd2e3f - sha256: 706b71b8b4eed61593ee6adaf7e52a7ae690895cd138d8b73e1f0e5ba1707bd7 + md5: 9309cdb83d6c1617a58e57b61c556105 + sha256: 0e91b5467c3994760832fca4a45ed084853061b5c42a8015da4e19fed7e5b7aa category: main optional: false - name: pip - version: 23.3.1 + version: 23.3.2 manager: conda platform: linux-64 dependencies: python: '>=3.7' setuptools: '' wheel: '' - url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.2-pyhd8ed1ab_0.conda hash: - md5: 2400c0b86889f43aa52067161e1fb108 - sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563 + md5: 8591c748f98dcc02253003533bc2e4b1 + sha256: 29096d1d53c61aeef518729add2f405df86b3629d1d738a35b15095e6a02eeed category: main optional: false - name: pip - version: 23.3.1 + version: 23.3.2 manager: conda platform: osx-arm64 dependencies: setuptools: '' wheel: '' python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/pip-23.3.2-pyhd8ed1ab_0.conda hash: - md5: 2400c0b86889f43aa52067161e1fb108 - sha256: 435829a03e1c6009f013f29bb83de8b876c388820bf8cf69a7baeec25f6a3563 + md5: 8591c748f98dcc02253003533bc2e4b1 + sha256: 29096d1d53c61aeef518729add2f405df86b3629d1d738a35b15095e6a02eeed category: main optional: false - name: pixman - version: 0.42.2 + version: 0.43.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.0-h59595ed_0.conda hash: - md5: 700edd63ccd5fc66b70b1c028cea9a68 - sha256: ae917851474eb3b08812b02c9e945d040808523ec53f828aa74a90b0cdf15f57 + md5: 6b4b43013628634b6cfdee6b74fd696b + sha256: 07a5ffcd34e241f900433af4c6d4904518aab76add4e1e40a2c4bad93ae43f2b category: main optional: false - name: pixman - version: 0.42.2 + version: 0.43.0 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=15.0.7' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.42.2-h13dd4ca_0.conda + libcxx: '>=15' + url: https://conda.anaconda.org/conda-forge/osx-arm64/pixman-0.43.0-hebf3989_0.conda hash: - md5: f96347021db6f33ccabe314ddeab62d4 - sha256: 90e60dc5604e356d47ef97b23b13759ef3d8b70fa2c637f4809d29851435d7d7 + md5: 6becbd8db3b0aa168259018c3806b814 + sha256: 4587c34eb9ada1ba82c3e9f8ffe7354dff6c385f0e8c94fe1bbc775f4ddba5c9 category: main optional: false - name: pkgutil-resolve-name @@ -11112,7 +11320,7 @@ package: category: main optional: false - name: polars - version: 0.19.19 + version: 0.20.5 manager: conda platform: linux-64 dependencies: @@ -11121,14 +11329,14 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing_extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/linux-64/polars-0.19.19-py39h90d8ae4_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/polars-0.20.5-py39h927a070_0.conda hash: - md5: 9cefe0d7ce9208c3afbbac29951aff59 - sha256: 862ab4acd653e75da36a371c5c50fdb295b28b07d496b54b3d0f908858639968 + md5: 739c3045e502a5d8f4a2e9aba4756cde + sha256: 566669f4c2c5dd3af41ae5fb6d6ca1647cdf9b4facc2e4e606ec04aaa171a20e category: main optional: false - name: polars - version: 0.19.19 + version: 0.20.5 manager: conda platform: osx-arm64 dependencies: @@ -11136,10 +11344,10 @@ package: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* typing_extensions: '>=4.0.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/polars-0.19.19-py39hf9b72a1_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/polars-0.20.5-py39he979254_0.conda hash: - md5: 64dc65feee192e9e9e9312ed63ba0460 - sha256: 5978bd41bda03050fe9578e3950afd746cf6473d6efc31d50892b76d1dd7b919 + md5: 54bc76cb1099824a7fbbbd5e1d5b4f34 + sha256: 05548cf53731537d518ab748fb2fc35670a6191ca3b626d1b023c2a6094a4a8f category: main optional: false - name: poppler @@ -11257,7 +11465,7 @@ package: krb5: '>=1.21.2,<1.22.0a0' libgcc-ng: '>=12' libpq: '16.1' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.12.2,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' openssl: '>=3.2.0,<4.0a0' readline: '>=8.2,<9.0a0' @@ -11276,16 +11484,16 @@ package: dependencies: krb5: '>=1.21.2,<1.22.0a0' libpq: '16.1' - libxml2: '>=2.12.2,<2.13.0a0' + libxml2: '>=2.11.6,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' openssl: '>=3.2.0,<4.0a0' readline: '>=8.2,<9.0a0' tzcode: '' tzdata: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/postgresql-16.1-h1d0603d_7.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/postgresql-16.1-hc6ab77f_7.conda hash: - md5: 69c2cda2fa0234a7c1f823f696f8180d - sha256: 1b527b6d23626837879e9fd832f225d27366188d05880f598ea17f7495f21994 + md5: bba5c8dd372e5e2cf81bf71e6104fa7b + sha256: 5830151c5b72e95ecbff45b36a88405875514ea27cfce1b5381e98d6cee9974a category: main optional: false - name: pre-commit @@ -11323,29 +11531,29 @@ package: category: main optional: false - name: progressbar2 - version: 4.2.0 + version: 4.3.2 manager: conda platform: linux-64 dependencies: - python: '>=3.7' - python-utils: '>=3.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.2.0-pyhd8ed1ab_0.tar.bz2 + python: '>=3.8' + python-utils: '>=3.8.1' + url: https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.3.2-pyhd8ed1ab_0.conda hash: - md5: d883564cf1e9ba190f6b285036c5f949 - sha256: c7a0307fbe83694f84c4024b176aaa484be9277ba5fde82e24a02bc72f24a53f + md5: d55cd7adfc98beaef0c717c61fa3d294 + sha256: 349bfed6f822309cf10f1a42f637b9c417759a87a1d282b7f36a49c4394a3393 category: main optional: false - name: progressbar2 - version: 4.2.0 + version: 4.3.2 manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - python-utils: '>=3.0.0' - url: https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.2.0-pyhd8ed1ab_0.tar.bz2 + python: '>=3.8' + python-utils: '>=3.8.1' + url: https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.3.2-pyhd8ed1ab_0.conda hash: - md5: d883564cf1e9ba190f6b285036c5f949 - sha256: c7a0307fbe83694f84c4024b176aaa484be9277ba5fde82e24a02bc72f24a53f + md5: d55cd7adfc98beaef0c717c61fa3d294 + sha256: 349bfed6f822309cf10f1a42f637b9c417759a87a1d282b7f36a49c4394a3393 category: main optional: false - name: proj @@ -11511,36 +11719,39 @@ package: category: main optional: false - name: protobuf - version: 4.21.12 + version: 4.24.4 manager: conda platform: linux-64 dependencies: + libabseil: '>=20230802.1,<20230803.0a0' libgcc-ng: '>=12' - libprotobuf: '>=3.21.12,<3.22.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' libstdcxx-ng: '>=12' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* setuptools: '' - url: https://conda.anaconda.org/conda-forge/linux-64/protobuf-4.21.12-py39h227be39_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/protobuf-4.24.4-py39h60f6b12_0.conda hash: - md5: 984b4ee0c3241d7ce715f8a731421073 - sha256: 22c02a0ae705170d169642056ce2cdf17a9c64b394dc24ed5133e9761bd4486f + md5: c25d58de8c0408f157ce2937bf6d2043 + sha256: 191a019f70fa8819842a93e6e1d4a371c3e3926740a47c51a6b840d95a61f601 category: main optional: false - name: protobuf - version: 4.21.12 + version: 4.24.4 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=14.0.6' - libprotobuf: '>=3.21.12,<3.22.0a0' + __osx: '>=10.9' + libabseil: '>=20230802.1,<20230803.0a0' + libcxx: '>=16.0.6' + libprotobuf: '>=4.24.4,<4.24.5.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* setuptools: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/protobuf-4.21.12-py39h23fbdae_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/protobuf-4.24.4-py39h6b23cf8_0.conda hash: - md5: 0b221c03b2823acffb6365b8cc31e887 - sha256: 54929bb589a9273e62eeefc1d2940716d1c3465b375222cd00e4b510d2844a3b + md5: 4a8be10067b23676d37dc60dceb2864a + sha256: f39a877accd33d477a973e211c105d81e5bc5ce0087f87b3fe0940c9d79eb78f category: main optional: false - name: protoc-gen-swagger @@ -11570,30 +11781,30 @@ package: category: main optional: false - name: psutil - version: 5.9.5 + version: 5.9.7 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py39hd1e30aa_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.7-py39hd1e30aa_0.conda hash: - md5: c2e412b0f11e5983bcfc35d9beb91ecb - sha256: 02df40691ebe8632ad18ca641c70b74302b538922ccc86c3afee5de83f4298f0 + md5: 34d2731732bc7de6269657d5d9fd6e79 + sha256: 1f5e5d4ce98df5dbfc8478a3339e3848891fed2f26405676ee39010777245894 category: main optional: false - name: psutil - version: 5.9.5 + version: 5.9.7 manager: conda platform: osx-arm64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-5.9.5-py39h0f82c59_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/psutil-5.9.7-py39h17cfd9d_0.conda hash: - md5: 1eb70daf1eb8478c5ccccf680a2dc025 - sha256: de53e5cd7ab4e14a957fcba7fa467beca916e4461f621f32a77dce331b95237c + md5: d3a5f34ecbce3ec8d4ba09f638416d32 + sha256: 0f83365c68837be6f5b861ced13ecef64245f2977d7c7059d149576242832236 category: main optional: false - name: psycopg2 @@ -11772,36 +11983,36 @@ package: category: main optional: false - name: pyarrow - version: 12.0.1 + version: 13.0.0 manager: conda platform: linux-64 dependencies: - libarrow: 12.0.1 + libarrow: 13.0.0 libgcc-ng: '>=12' libstdcxx-ng: '>=12' - numpy: '>=1.21.6,<2.0a0' + numpy: '>=1.22.4,<2.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-12.0.1-py39hfbd5978_8_cpu.conda + url: https://conda.anaconda.org/conda-forge/linux-64/pyarrow-13.0.0-py39h6925388_23_cpu.conda hash: - md5: fd98f7cc88572cff805a9576c8448c65 - sha256: 36036e1b71b85b218b542f996ddf8b25e8d4dd0411855c9bfad21f79d7dfac36 + md5: fc0f30899de0ebba12091adbee386f94 + sha256: 0b723965361c9b2ecd31369a314aa9667e2482ce8f0912a6dd87fa70f7d2126a category: main optional: false - name: pyarrow - version: 12.0.1 + version: 13.0.0 manager: conda platform: osx-arm64 dependencies: - libarrow: 12.0.1 - libcxx: '>=15.0.7' - numpy: '>=1.21.6,<2.0a0' + libarrow: 13.0.0 + libcxx: '>=14' + numpy: '>=1.22.4,<2.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-12.0.1-py39hf40061a_8_cpu.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pyarrow-13.0.0-py39hb25a2a0_23_cpu.conda hash: - md5: 63896ce85b0d0afd38363fdb00e10d9f - sha256: d469979b28263e75fb7c64fdee8878ab02dff0d246004573e11b9bdb301856a4 + md5: 652a9fa809d6944921ca49c6ff1472a1 + sha256: acb21e3792c47c7b57dece0da175a2083f00186a86e9f1414b17969b4d896f8c category: main optional: false - name: pyarrow-hotfix @@ -12192,20 +12403,21 @@ package: ld_impl_linux-64: '>=2.36.1' libffi: '>=3.4,<4.0a0' libgcc-ng: '>=12' - libnsl: '>=2.0.0,<2.1.0a0' - libsqlite: '>=3.43.0,<4.0a0' + libnsl: '>=2.0.1,<2.1.0a0' + libsqlite: '>=3.44.2,<4.0a0' libuuid: '>=2.38.1,<3.0a0' + libxcrypt: '>=4.4.36' libzlib: '>=1.2.13,<1.3.0a0' ncurses: '>=6.4,<7.0a0' - openssl: '>=3.1.2,<4.0a0' + openssl: '>=3.2.0,<4.0a0' readline: '>=8.2,<9.0a0' - tk: '>=8.6.12,<8.7.0a0' + tk: '>=8.6.13,<8.7.0a0' tzdata: '' xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda + url: https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_1_cpython.conda hash: - md5: 3ede353bc605068d9677e700b1847382 - sha256: 18db2220328abee8eb19f51c8df88bcfdf3a557b8181e7f5bda291deb067e40f + md5: 255a7002aeec7a067ff19b545aca6328 + sha256: c0e800d255a771926007043d2859cbbbdb1387477ec813f085640c8887b391a2 category: main optional: false - name: python @@ -12215,18 +12427,18 @@ package: dependencies: bzip2: '>=1.0.8,<2.0a0' libffi: '>=3.4,<4.0a0' - libsqlite: '>=3.43.0,<4.0a0' + libsqlite: '>=3.44.2,<4.0a0' libzlib: '>=1.2.13,<1.3.0a0' ncurses: '>=6.4,<7.0a0' - openssl: '>=3.1.2,<4.0a0' + openssl: '>=3.2.0,<4.0a0' readline: '>=8.2,<9.0a0' - tk: '>=8.6.12,<8.7.0a0' + tk: '>=8.6.13,<8.7.0a0' tzdata: '' xz: '>=5.2.6,<6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.9.18-hfa1ae8a_0_cpython.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/python-3.9.18-hd7ebdb9_1_cpython.conda hash: - md5: 7672472bbab6419a1211403e7c041116 - sha256: d6a4c49354b73248741b8b8cbfb0516ef6ceb3d6b3a4d110946195c7df228672 + md5: c48f67fd7147f37c941037de0a328560 + sha256: 7336d2cfefff2466f1f60144f10f5bf98f9d176a8c72ca85336baa5dc32299ef category: main optional: false - name: python-dateutil @@ -12256,27 +12468,27 @@ package: category: main optional: false - name: python-fastjsonschema - version: 2.19.0 + version: 2.19.1 manager: conda platform: linux-64 dependencies: python: '>=3.3' - url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.19.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.19.1-pyhd8ed1ab_0.conda hash: - md5: e4dbdb3585c0266b4710467fe7b75cf4 - sha256: fdfe3f387c5ebde803605e1e90871c424519d2bfe2eb3bf9caad1c5a07f4c462 + md5: 4d3ceee3af4b0f9a1f48f57176bf8625 + sha256: 38b2db169d65cc5595e3ce63294c4fdb6a242ecf71f70b3ad8cad3bd4230d82f category: main optional: false - name: python-fastjsonschema - version: 2.19.0 + version: 2.19.1 manager: conda platform: osx-arm64 dependencies: python: '>=3.3' - url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.19.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.19.1-pyhd8ed1ab_0.conda hash: - md5: e4dbdb3585c0266b4710467fe7b75cf4 - sha256: fdfe3f387c5ebde803605e1e90871c424519d2bfe2eb3bf9caad1c5a07f4c462 + md5: 4d3ceee3af4b0f9a1f48f57176bf8625 + sha256: 38b2db169d65cc5595e3ce63294c4fdb6a242ecf71f70b3ad8cad3bd4230d82f category: main optional: false - name: python-flatbuffers @@ -12328,7 +12540,7 @@ package: category: main optional: false - name: python-kubernetes - version: 28.1.0 + version: 29.0.0 manager: conda platform: linux-64 dependencies: @@ -12343,14 +12555,14 @@ package: six: '>=1.9.0' urllib3: '>=1.24.2,<2.0' websocket-client: '>=0.32.0,!=0.40.0,!=0.41.*,!=0.42.*' - url: https://conda.anaconda.org/conda-forge/noarch/python-kubernetes-28.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-kubernetes-29.0.0-pyhd8ed1ab_0.conda hash: - md5: 19e0767d01e052487650c0341c186219 - sha256: 559098188048ea52739815b2311b06bf381bf4c7a7924baaf2e99de0c01b2113 + md5: a94f4c6a1cff1e9837a8b62ae35c673f + sha256: 30e268b5e3299ae77f5bc34a2e290e47d5663f72be2ae3451ae7c24936670a74 category: main optional: false - name: python-kubernetes - version: 28.1.0 + version: 29.0.0 manager: conda platform: osx-arm64 dependencies: @@ -12365,10 +12577,10 @@ package: websocket-client: '>=0.32.0,!=0.40.0,!=0.41.*,!=0.42.*' oauthlib: '>=3.2.2' urllib3: '>=1.24.2,<2.0' - url: https://conda.anaconda.org/conda-forge/noarch/python-kubernetes-28.1.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/python-kubernetes-29.0.0-pyhd8ed1ab_0.conda hash: - md5: 19e0767d01e052487650c0341c186219 - sha256: 559098188048ea52739815b2311b06bf381bf4c7a7924baaf2e99de0c01b2113 + md5: a94f4c6a1cff1e9837a8b62ae35c673f + sha256: 30e268b5e3299ae77f5bc34a2e290e47d5663f72be2ae3451ae7c24936670a74 category: main optional: false - name: python-slugify @@ -12499,58 +12711,62 @@ package: category: main optional: false - name: pytorch - version: 2.0.0 + version: 2.1.0 manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' _openmp_mutex: '>=4.5' filelock: '' + fsspec: '' jinja2: '' libcblas: '>=3.9.0,<4.0a0' libgcc-ng: '>=12' - libprotobuf: '>=3.21.12,<3.22.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' libstdcxx-ng: '>=12' - libuv: '>=1.44.2,<2.0a0' - mkl: '>=2022.2.1,<2023.0a0' + libtorch: 2.1.0.* + libuv: '>=1.46.0,<2.0a0' + mkl: '>=2023.2.0,<2024.0a0' networkx: '' - numpy: '>=1.21.6,<2.0a0' + numpy: '>=1.22.4,<2.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* sleef: '>=3.5.1,<4.0a0' sympy: '' typing_extensions: '' - url: https://conda.anaconda.org/conda-forge/linux-64/pytorch-2.0.0-cpu_mkl_py39h9f63279_101.conda + url: https://conda.anaconda.org/conda-forge/linux-64/pytorch-2.1.0-cpu_mkl_py39h9c325db_102.conda hash: - md5: 75e2cdb9b19d38367cad1c14b66fc3f5 - sha256: b775669b515de43cf2de62fd3ab35a1a4e77f34cef932debcb8a3769718b8229 + md5: d2c82847c1510b059a2dbf99661bedf4 + sha256: 87bbe2b919af09acf697b53749aeda1e004c4bb58224508ce0b3c1f8e1cf2c26 category: main optional: false - name: pytorch - version: 2.0.0 + version: 2.1.0 manager: conda platform: osx-arm64 dependencies: filelock: '' + fsspec: '' jinja2: '' libcblas: '>=3.9.0,<4.0a0' - libcxx: '>=15.0.7' + libcxx: '>=14' liblapack: '>=3.9.0,<4.0a0' - libprotobuf: '>=3.21.12,<3.22.0a0' - libuv: '>=1.44.2,<2.0a0' - llvm-openmp: '>=15.0.7' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libtorch: 2.1.0.* + libuv: '>=1.46.0,<2.0a0' + llvm-openmp: '>=16.0.6' networkx: '' nomkl: '' - numpy: '>=1.21.6,<2.0a0' + numpy: '>=1.22.4,<2.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* sleef: '>=3.5.1,<4.0a0' sympy: '' typing_extensions: '' - url: https://conda.anaconda.org/conda-forge/osx-arm64/pytorch-2.0.0-cpu_generic_py39hb35d202_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/pytorch-2.1.0-cpu_generic_py39h2c040b9_2.conda hash: - md5: 03e438278d6510f7b56d89a39d20f0ff - sha256: 6305cf96428d35275b7ea20cd8f9e9ece4b2f4f4d029e244cfd1b8a837d8f661 + md5: d7341f6a60401e53af264d0b9b835c88 + sha256: 2acf7b4577cd169dde3eccc2a696ce0dcee893837baaae68e0bef3efd8e4cf32 category: main optional: false - name: pytz @@ -12785,42 +13001,42 @@ package: category: main optional: false - name: rdma-core - version: '28.9' + version: '49.0' manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc-ng: '>=12' + libnl: '>=3.9.0,<4.0a0' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/rdma-core-28.9-h59595ed_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/rdma-core-49.0-hd3aeb46_2.conda hash: - md5: aeffb7c06b5f65e55e6c637408dc4100 - sha256: 832f9393ab3144ce6468c6f150db9d398fad4451e96a8879afb3059f0c9902f6 + md5: 855579013120ad3078e7d5dcadb643e1 + sha256: 3f541042f7b51484a996eab53493b9242969bc406614eba76ceecfa4f4a0822a category: main optional: false - name: re2 - version: 2023.03.02 + version: 2023.06.02 manager: conda platform: linux-64 dependencies: - libgcc-ng: '>=12' - libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.03.02-h8c504da_0.conda + libre2-11: 2023.06.02 + url: https://conda.anaconda.org/conda-forge/linux-64/re2-2023.06.02-h2873b5e_0.conda hash: - md5: 206f8fa808748f6e90599c3368a1114e - sha256: 1727f893a352ca735fb96b09f9edf6fe18c409d65550fd37e8a192919e8c827b + md5: bb2d5e593ef13fe4aff0bc9440f945ae + sha256: 3e0bfb04b6d43312d711c5b49dbc3c7660b2e6e681ed504b1b322794462a1bcd category: main optional: false - name: re2 - version: 2023.03.02 + version: 2023.06.02 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=14.0.6' - url: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2023.03.02-hc5e2d97_0.conda + libre2-11: 2023.06.02 + url: https://conda.anaconda.org/conda-forge/osx-arm64/re2-2023.06.02-h6135d0a_0.conda hash: - md5: 7a851c0ab05247e3246eca2c3b243b9a - sha256: 39bc32dcef3b699e6f748cc51d5e6b05ab788334d5787c64f069f0122e74c0c5 + md5: 8f23674174b155300696a2be8b5c1407 + sha256: 963847258a82d9647311c5eb8829a49ac2161df12a304d5d6e61f788f0563442 category: main optional: false - name: readline @@ -12879,31 +13095,31 @@ package: category: main optional: false - name: referencing - version: 0.32.0 + version: 0.32.1 manager: conda platform: linux-64 dependencies: attrs: '>=22.2.0' python: '>=3.8' rpds-py: '>=0.7.0' - url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.32.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.32.1-pyhd8ed1ab_0.conda hash: - md5: a7b5a535cd614e384594530aee7e6061 - sha256: dfd40282910a45e58882ed94b502b2a09f475efb04eaaa3bd8b3b5a9b21a19c3 + md5: 753a592b4e99d7d2cde6a8fd0797f414 + sha256: 658beff40c6355af0eeec624bbe4e892b4c68c0af2d8ff4c06677e6547140506 category: main optional: false - name: referencing - version: 0.32.0 + version: 0.32.1 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' attrs: '>=22.2.0' rpds-py: '>=0.7.0' - url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.32.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/referencing-0.32.1-pyhd8ed1ab_0.conda hash: - md5: a7b5a535cd614e384594530aee7e6061 - sha256: dfd40282910a45e58882ed94b502b2a09f475efb04eaaa3bd8b3b5a9b21a19c3 + md5: 753a592b4e99d7d2cde6a8fd0797f414 + sha256: 658beff40c6355af0eeec624bbe4e892b4c68c0af2d8ff4c06677e6547140506 category: main optional: false - name: requests @@ -13075,58 +13291,58 @@ package: category: main optional: false - name: rich-click - version: 1.7.2 + version: 1.7.3 manager: conda platform: linux-64 dependencies: click: '>=7,<9' python: '>=3.7' rich: '>=10' - url: https://conda.anaconda.org/conda-forge/noarch/rich-click-1.7.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/rich-click-1.7.3-pyhd8ed1ab_0.conda hash: - md5: fe1eae76a71246959cefd09b5b00869b - sha256: 4bfddbc5716a09bfea1c3345b1eed0ddb36d0e55f357afb60ca3e73c9231cc68 + md5: dc858795a3dcc70135ae2c61b75a251a + sha256: d6265dbdbc2c1bda6dc96af0edc482a75e03e9f531cd8f6ff77619becdf02378 category: main optional: false - name: rich-click - version: 1.7.2 + version: 1.7.3 manager: conda platform: osx-arm64 dependencies: python: '>=3.7' click: '>=7,<9' rich: '>=10' - url: https://conda.anaconda.org/conda-forge/noarch/rich-click-1.7.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/rich-click-1.7.3-pyhd8ed1ab_0.conda hash: - md5: fe1eae76a71246959cefd09b5b00869b - sha256: 4bfddbc5716a09bfea1c3345b1eed0ddb36d0e55f357afb60ca3e73c9231cc68 + md5: dc858795a3dcc70135ae2c61b75a251a + sha256: d6265dbdbc2c1bda6dc96af0edc482a75e03e9f531cd8f6ff77619becdf02378 category: main optional: false - name: rpds-py - version: 0.13.2 + version: 0.17.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.13.2-py39h9fdd4d6_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.17.1-py39h9fdd4d6_0.conda hash: - md5: 75f11bc5ee4f3cf1d12bc17066b6a367 - sha256: f7e4dc74a7c8928edd8bf63316f09443943faf723487310c0f4ab76a5c62a5e6 + md5: 601e09c9de429baaabce5f1283c51fdf + sha256: 0ec45efa1c5599a9779ac34cf5bcb7602a2294f79b8959a7b15b92b4275979ee category: main optional: false - name: rpds-py - version: 0.13.2 + version: 0.17.1 manager: conda platform: osx-arm64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.13.2-py39h8fec3ad_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/rpds-py-0.17.1-py39h8fec3ad_0.conda hash: - md5: b728c70cc4b7e617535a3e92769608a9 - sha256: fdfada05bed7be977e070ce0e4b4f1d478b5c4c71ee04508529cd34a8cc072dd + md5: 07873cbc88530a1210d61027ec3d4801 + sha256: 3ed6823681d74b465804d7db3aae81630a4db3c919297c6634aac2939eaf9d6d category: main optional: false - name: rsa @@ -13242,16 +13458,16 @@ package: category: main optional: false - name: s2n - version: 1.3.49 + version: 1.4.1 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' - openssl: '>=3.1.2,<4.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.49-h06160fa_0.conda + openssl: '>=3.2.0,<4.0a0' + url: https://conda.anaconda.org/conda-forge/linux-64/s2n-1.4.1-h06160fa_0.conda hash: - md5: 1d78349eb26366ecc034a4afe70a8534 - sha256: 7ef7af04a626003737ad311fe45165addcb53928117f1a892b78114f74be724c + md5: 54ae57d17d038b6a7aa7fdb55350d338 + sha256: 6f21a270e5fcf824d71b637ea26e389e469b3dc44a7e51062c27556c6e771b37 category: main optional: false - name: s3fs @@ -13285,12 +13501,12 @@ package: category: main optional: false - name: scikit-learn - version: 1.3.2 + version: 1.4.0 manager: conda platform: linux-64 dependencies: _openmp_mutex: '>=4.5' - joblib: '>=1.1.1' + joblib: '>=1.2.0' libgcc-ng: '>=12' libstdcxx-ng: '>=12' numpy: '>=1.22.4,<2.0a0' @@ -13298,30 +13514,29 @@ package: python_abi: 3.9.* scipy: '' threadpoolctl: '>=2.0.0' - url: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.3.2-py39ha22ef79_2.conda + url: https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.4.0-py39ha22ef79_0.conda hash: - md5: bdd0cdcd58ced4168aeef7046663f2f6 - sha256: 32e66143b566e558f414e461f99011e8ccddbfc87c4bb3de77f86450f807f54c + md5: cb8da9a55b56e7fda3553c919f89431b + sha256: 29080f3b936839256acbd3a686e912f2d00e97c84e79357e26ad7a7cfb67a4e9 category: main optional: false - name: scikit-learn - version: 1.3.2 + version: 1.4.0 manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' - joblib: '>=1.1.1' - libcxx: '>=16.0.6' + joblib: '>=1.2.0' + libcxx: '>=15' llvm-openmp: '>=17.0.6' numpy: '>=1.22.4,<2.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* scipy: '' threadpoolctl: '>=2.0.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.3.2-py39h172c841_2.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/scikit-learn-1.4.0-py39h6dd658b_0.conda hash: - md5: 3f6ead0da40ab93f9d20b624bc3b1c87 - sha256: 4b81346f420e09990dd799acf89376a27d383927663c2dab968c59aa9a6f4212 + md5: 177d0e427f7fe3e8fd0d8aa5c9a32de4 + sha256: 0b18ab4449e9bb1b18c84becdf6dbb4982fedd502e683dfe49299fde2c20e701 category: main optional: false - name: scipy @@ -13410,15 +13625,15 @@ package: category: main optional: false - name: setuptools - version: 68.2.2 + version: 69.0.3 manager: conda platform: linux-64 dependencies: python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-69.0.3-pyhd8ed1ab_0.conda hash: - md5: fc2166155db840c634a1291a5c35a709 - sha256: 851901b1f8f2049edb36a675f0c3f9a98e1495ef4eb214761b048c6f696a06f7 + md5: 40695fdfd15a92121ed2922900d0308b + sha256: 0fe2a0473ad03dac6c7f5c42ef36a8e90673c88a0350dfefdea4b08d43803db2 category: main optional: false - name: setuptools @@ -13804,9 +14019,9 @@ package: sphinxcontrib-qthelp: '' imagesize: '' python: '>=3.6' + importlib-metadata: '>=4.4' pygments: '>=2.0' alabaster: '>=0.7,<0.8' - importlib-metadata: '>=4.4' requests: '>=2.5.0' jinja2: '>=2.3' snowballstemmer: '>=1.1' @@ -14386,7 +14601,7 @@ package: category: main optional: false - name: sqlalchemy - version: 2.0.23 + version: 2.0.25 manager: conda platform: linux-64 dependencies: @@ -14394,26 +14609,26 @@ package: libgcc-ng: '>=12' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - typing-extensions: '>=4.2.0' - url: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.23-py39hd1e30aa_0.conda + typing-extensions: '>=4.6.0' + url: https://conda.anaconda.org/conda-forge/linux-64/sqlalchemy-2.0.25-py39hd1e30aa_0.conda hash: - md5: ad585ef3085a52e4890fe7ea1d2a0606 - sha256: 45ce1f68861b61e21638624f99017c16c53b3abab60691d9c48bf8586681c4e2 + md5: 36930c67bcfd8c1f1cf6cb5b9622e0fe + sha256: c4be30fbf84bc00f1d3f19eb03a2a105fa05183095d0f48fbc8a808e525c65b8 category: main optional: false - name: sqlalchemy - version: 2.0.23 + version: 2.0.25 manager: conda platform: osx-arm64 dependencies: greenlet: '!=0.4.17' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - typing-extensions: '>=4.2.0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.23-py39h17cfd9d_0.conda + typing-extensions: '>=4.6.0' + url: https://conda.anaconda.org/conda-forge/osx-arm64/sqlalchemy-2.0.25-py39h17cfd9d_0.conda hash: - md5: 8facab2c48a6eebe53d4664e86461e1b - sha256: a020f91d478a9b71080c7d31c9759d43418cf04d438d6b8ed9ef0d7351a1179a + md5: a11c96827d0355cca921e77ad1973348 + sha256: 8c9f02958249dbe89433b93a921ec121821c970f1a53b63982d2c6359696d7e0 category: main optional: false - name: sqlite @@ -14604,16 +14819,17 @@ package: category: main optional: false - name: tbb - version: 2021.7.0 + version: 2021.11.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' + libhwloc: '>=2.9.3,<2.9.4.0a0' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.7.0-h924138e_0.tar.bz2 + url: https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.11.0-h00ab1b0_0.conda hash: - md5: 819421f81b127a5547bf96ad57eccdd9 - sha256: 452a6def24912d2a118d863095c3f9cb05fe5c997357431a0ca4452eb7f65f08 + md5: fde515afbbe6e36eb4564965c20b1058 + sha256: 05f2282cb204eeb62dbc698e14475f0bbb7f1eb07081d7f5da37d61ad3c4acb3 category: main optional: false - name: tblib @@ -14665,13 +14881,13 @@ package: category: main optional: false - name: tensorboard - version: 2.14.1 + version: 2.15.1 manager: conda platform: linux-64 dependencies: absl-py: '>=0.4' google-auth: '>=1.6.3,<3' - google-auth-oauthlib: '>=0.5,<1.1' + google-auth-oauthlib: '>=0.5,<2' grpcio: '>=1.48.2' markdown: '>=2.6.8' numpy: '>=1.12.0' @@ -14682,14 +14898,14 @@ package: six: '>=1.9' tensorboard-data-server: '>=0.7.0,<0.8.0' werkzeug: '>=1.0.1' - url: https://conda.anaconda.org/conda-forge/noarch/tensorboard-2.14.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/tensorboard-2.15.1-pyhd8ed1ab_0.conda hash: - md5: fd77b4ee10b417788c0026de58568d01 - sha256: 19d91a20089855c613f3cf73bd72facab9f666539971bb69280c075c97c6491a + md5: 0bba0cc8e5247bca0411ee63e9b0bb2f + sha256: 652c97025f21fdb6473ccaa5b2c53ca8ae816d550a596a4544566cfcf5273591 category: main optional: false - name: tensorboard - version: 2.14.1 + version: 2.15.1 manager: conda platform: osx-arm64 dependencies: @@ -14705,11 +14921,11 @@ package: grpcio: '>=1.48.2' protobuf: '>=3.19.6' tensorboard-data-server: '>=0.7.0,<0.8.0' - google-auth-oauthlib: '>=0.5,<1.1' - url: https://conda.anaconda.org/conda-forge/noarch/tensorboard-2.14.1-pyhd8ed1ab_0.conda + google-auth-oauthlib: '>=0.5,<2' + url: https://conda.anaconda.org/conda-forge/noarch/tensorboard-2.15.1-pyhd8ed1ab_0.conda hash: - md5: fd77b4ee10b417788c0026de58568d01 - sha256: 19d91a20089855c613f3cf73bd72facab9f666539971bb69280c075c97c6491a + md5: 0bba0cc8e5247bca0411ee63e9b0bb2f + sha256: 652c97025f21fdb6473ccaa5b2c53ca8ae816d550a596a4544566cfcf5273591 category: main optional: false - name: tensorboard-data-server @@ -14742,37 +14958,37 @@ package: category: main optional: false - name: tensorflow - version: 2.14.0 + version: 2.15.0 manager: conda platform: linux-64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - tensorflow-base: 2.14.0 - tensorflow-estimator: 2.14.0 - url: https://conda.anaconda.org/conda-forge/linux-64/tensorflow-2.14.0-cpu_py39h4655687_0.conda + tensorflow-base: 2.15.0 + tensorflow-estimator: 2.15.0 + url: https://conda.anaconda.org/conda-forge/linux-64/tensorflow-2.15.0-cpu_py39h433cda9_1.conda hash: - md5: 2b69c2c80e61a9bfe86a9e9b78ff4ba7 - sha256: 140178c0a7ad4323a3a03a8fbc5f3228dd8d45d6c21661a30a385356f852d0b3 + md5: 38968dee79b3677fdc693a562c66495b + sha256: 4373911c6cde755f5a518770cf70d083927cd440fca14359ff731cc0c6eb226a category: main optional: false - name: tensorflow - version: 2.14.0 + version: 2.15.0 manager: conda platform: osx-arm64 dependencies: python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - tensorflow-base: 2.14.0 - tensorflow-estimator: 2.14.0 - url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-2.14.0-cpu_py39h2839aeb_0.conda + tensorflow-base: 2.15.0 + tensorflow-estimator: 2.15.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-2.15.0-cpu_py39h89c7d27_1.conda hash: - md5: b493f70acef9bd0b5883a3cdc3a308f6 - sha256: 2fb43d6c4f168ff4e51b3b5258db49b3c47c4da8984e398e317a137975f77ae2 + md5: bfefaaa2093af733d1fa8dc22f27c26e + sha256: 81f3e4cbea6eeead17a756f0cca55aafb817baec817caca1f8d5c15f74a5b5be category: main optional: false - name: tensorflow-base - version: 2.14.0 + version: 2.15.0 manager: conda platform: linux-64 dependencies: @@ -14782,23 +14998,23 @@ package: gast: '>=0.2.1,!=0.5.0,!=0.5.1,!=0.5.2' giflib: '>=5.2.1,<5.3.0a0' google-pasta: '>=0.1.1' - grpcio: 1.54.* + grpcio: 1.59.* h5py: '>=2.9.0' icu: '>=73.2,<74.0a0' - keras: '>=2.14,<2.15' - libabseil: '>=20230125.3,<20230126.0a0' - libcurl: '>=8.4.0,<9.0a0' + keras: '>=2.15,<2.16' + libabseil: '>=20230802.1,<20230803.0a0' + libcurl: '>=8.5.0,<9.0a0' libgcc-ng: '>=12' - libgrpc: '>=1.54.3,<1.55.0a0' + libgrpc: '>=1.59.3,<1.60.0a0' libjpeg-turbo: '>=3.0.0,<4.0a0' libpng: '>=1.6.39,<1.7.0a0' - libprotobuf: '>=3.21.12,<3.22.0a0' - libsqlite: '>=3.44.0,<4.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libsqlite: '>=3.44.2,<4.0a0' libstdcxx-ng: '>=12' libzlib: '>=1.2.13,<1.3.0a0' ml_dtypes: 0.2.0.* numpy: '>=1.22.4,<2.0a0' - openssl: '>=3.1.4,<4.0a0' + openssl: '>=3.2.0,<4.0a0' opt_einsum: '>=2.3.2' packaging: '' protobuf: '>=3.20.3,<5,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' @@ -14807,18 +15023,18 @@ package: python_abi: 3.9.* six: '>=1.12' snappy: '>=1.1.10,<2.0a0' - tensorboard: '>=2.14,<2.15' + tensorboard: '>=2.15,<2.16' termcolor: '>=1.1.0' typing_extensions: '>=3.6.6' wrapt: '>=1.11.0,<1.15' - url: https://conda.anaconda.org/conda-forge/linux-64/tensorflow-base-2.14.0-cpu_py39h65ed569_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/tensorflow-base-2.15.0-cpu_py39he1df281_1.conda hash: - md5: da26d8c9430738ef95213bf143467748 - sha256: a13ee6d37ea23cc2c31b8bbec609d9384280fd1545eda02f5dfb46ce69a3d6f0 + md5: 4232ae33223a83cf9a67751c93f3600d + sha256: 225295a7e957e1871ba550aef8ff09ff56c38b34ef2aa68197c1eeaa099fe8c1 category: main optional: false - name: tensorflow-base - version: 2.14.0 + version: 2.15.0 manager: conda platform: osx-arm64 dependencies: @@ -14828,22 +15044,22 @@ package: gast: '>=0.2.1,!=0.5.0,!=0.5.1,!=0.5.2' giflib: '>=5.2.1,<5.3.0a0' google-pasta: '>=0.1.1' - grpcio: 1.54.* + grpcio: 1.59.* h5py: '>=2.9.0' icu: '>=73.2,<74.0a0' - keras: '>=2.14,<2.15' - libabseil: '>=20230125.3,<20230126.0a0' - libcurl: '>=8.4.0,<9.0a0' - libcxx: '>=16.0.6' - libgrpc: '>=1.54.3,<1.55.0a0' + keras: '>=2.15,<2.16' + libabseil: '>=20230802.1,<20230803.0a0' + libcurl: '>=8.5.0,<9.0a0' + libcxx: '>=15' + libgrpc: '>=1.59.3,<1.60.0a0' libjpeg-turbo: '>=3.0.0,<4.0a0' libpng: '>=1.6.39,<1.7.0a0' - libprotobuf: '>=3.21.12,<3.22.0a0' - libsqlite: '>=3.44.0,<4.0a0' + libprotobuf: '>=4.24.4,<4.24.5.0a0' + libsqlite: '>=3.44.2,<4.0a0' libzlib: '>=1.2.13,<1.3.0a0' ml_dtypes: 0.2.0.* numpy: '>=1.22.4,<2.0a0' - openssl: '>=3.1.4,<4.0a0' + openssl: '>=3.2.0,<4.0a0' opt_einsum: '>=2.3.2' packaging: '' protobuf: '>=3.20.3,<5,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5' @@ -14852,71 +15068,71 @@ package: python_abi: 3.9.* six: '>=1.12' snappy: '>=1.1.10,<2.0a0' - tensorboard: '>=2.14,<2.15' + tensorboard: '>=2.15,<2.16' termcolor: '>=1.1.0' typing_extensions: '>=3.6.6' wrapt: '>=1.11.0,<1.15' - url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-base-2.14.0-cpu_py39ha6a491e_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-base-2.15.0-cpu_py39ha39414a_1.conda hash: - md5: 8400feabaf44f32247a9bc82b659afb7 - sha256: e2cefe1d85c5b9e10b35515adfd408df5585f64920dc84aa9bf62fba4c1cb4d9 + md5: d43097583588f87202f400e7cd26f2b8 + sha256: 220204350f31420504f9b7b7c532fff84fdc14dbc3f2202ee83baa89a4bd40f3 category: main optional: false - name: tensorflow-estimator - version: 2.14.0 + version: 2.15.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libstdcxx-ng: '>=12' - openssl: '>=3.1.4,<4.0a0' + openssl: '>=3.2.0,<4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - tensorflow-base: 2.14.0 - url: https://conda.anaconda.org/conda-forge/linux-64/tensorflow-estimator-2.14.0-cpu_py39hd56b5fd_0.conda + tensorflow-base: 2.15.0 + url: https://conda.anaconda.org/conda-forge/linux-64/tensorflow-estimator-2.15.0-cpu_py39hd56b5fd_1.conda hash: - md5: 01e077379807b9b0d703c68cd0b38c50 - sha256: 24593e6b59600e1a0c9340eeaa406d9b77ee04d7c9d349aef1020f625f872f09 + md5: 6da35921a24a8acde27b34936839c2ba + sha256: f02e785bcc451fbdc6267cba106ddb5dbc3ec4dd4281abd82702bab9fc355e34 category: main optional: false - name: tensorflow-estimator - version: 2.14.0 + version: 2.15.0 manager: conda platform: osx-arm64 dependencies: - libcxx: '>=16.0.6' - openssl: '>=3.1.4,<4.0a0' + libcxx: '>=15' + openssl: '>=3.2.0,<4.0a0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - tensorflow-base: 2.14.0 - url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-estimator-2.14.0-cpu_py39ha1daed2_0.conda + tensorflow-base: 2.15.0 + url: https://conda.anaconda.org/conda-forge/osx-arm64/tensorflow-estimator-2.15.0-cpu_py39ha1daed2_1.conda hash: - md5: 080a34a0c9508f7b877e4e4bed648f7f - sha256: a370bf33e2309f0e98b298ed6a8bd90f8cf2b7662fe85c03822dbef627f18618 + md5: 9fc315e012c9c336239e115e66e9a300 + sha256: 8c0da8e1398bb85023865085e1bdc8f3293e82a0bb75525136b2a3e357f1d1af category: main optional: false - name: termcolor - version: 2.3.0 + version: 2.4.0 manager: conda platform: linux-64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.3.0-pyhd8ed1ab_0.conda + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.4.0-pyhd8ed1ab_0.conda hash: - md5: 440d508f025b1692168caaf436504af3 - sha256: db8c38777093d87fc05dc3f57d26c171c4e4896f17a013c6f839050c989a759e + md5: a5033708ad9283907c3b1bc1f90d0d0d + sha256: 59588d41f2c02d599fd6528583013d85bd47d17b1acec11edbb29deadd81fbca category: main optional: false - name: termcolor - version: 2.3.0 + version: 2.4.0 manager: conda platform: osx-arm64 dependencies: - python: '>=3.7' - url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.3.0-pyhd8ed1ab_0.conda + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/termcolor-2.4.0-pyhd8ed1ab_0.conda hash: - md5: 440d508f025b1692168caaf436504af3 - sha256: db8c38777093d87fc05dc3f57d26c171c4e4896f17a013c6f839050c989a759e + md5: a5033708ad9283907c3b1bc1f90d0d0d + sha256: 59588d41f2c02d599fd6528583013d85bd47d17b1acec11edbb29deadd81fbca category: main optional: false - name: terminado @@ -14998,7 +15214,7 @@ package: category: main optional: false - name: tf2onnx - version: 1.15.1 + version: 1.16.1 manager: conda platform: linux-64 dependencies: @@ -15009,14 +15225,14 @@ package: requests: '' six: '' tensorflow: '>=2.6' - url: https://conda.anaconda.org/conda-forge/noarch/tf2onnx-1.15.1-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/tf2onnx-1.16.1-pyhd8ed1ab_0.conda hash: - md5: 41a891ad34703e0ebc2d36098b8bbe56 - sha256: 403f37c0c4881d11fbb63573c542105d9d1165294f35b1491143085c76aa5160 + md5: 3882e49e3d01c69231a7e48d09ca0ec6 + sha256: 17b0ed248ed0a5ac822ed6a4fa55b122c7c4ab01e069d4dacec5606f6d655233 category: main optional: false - name: tf2onnx - version: 1.15.1 + version: 1.16.1 manager: conda platform: osx-arm64 dependencies: @@ -15027,10 +15243,10 @@ package: tensorflow: '>=2.6' python-flatbuffers: '>=1.12' onnx: '>=1.4.1' - url: https://conda.anaconda.org/conda-forge/noarch/tf2onnx-1.15.1-pyhd8ed1ab_1.conda + url: https://conda.anaconda.org/conda-forge/noarch/tf2onnx-1.16.1-pyhd8ed1ab_0.conda hash: - md5: 41a891ad34703e0ebc2d36098b8bbe56 - sha256: 403f37c0c4881d11fbb63573c542105d9d1165294f35b1491143085c76aa5160 + md5: 3882e49e3d01c69231a7e48d09ca0ec6 + sha256: 17b0ed248ed0a5ac822ed6a4fa55b122c7c4ab01e069d4dacec5606f6d655233 category: main optional: false - name: threadpoolctl @@ -15058,41 +15274,52 @@ package: category: main optional: false - name: tiledb - version: 2.18.2 + version: 2.19.0 manager: conda platform: linux-64 dependencies: + azure-core-cpp: '>=1.10.3,<2.0a0' + azure-storage-blobs-cpp: '>=12.10.0,<13.0a0' + azure-storage-common-cpp: '>=12.5.0,<13.0a0' bzip2: '>=1.0.8,<2.0a0' + libabseil: '>=20230802.1,<20230803.0a0' + libcurl: '>=8.5.0,<9.0a0' libgcc-ng: '>=12' + libgoogle-cloud: '>=2.12.0,<2.13.0a0' libstdcxx-ng: '>=12' - libxml2: '>=2.12.1,<2.13.0a0' + libxml2: '>=2.12.3,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' openssl: '>=3.2.0,<4.0a0' zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.18.2-h99f50a1_1.conda + url: https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.19.0-hc1131af_0.conda hash: - md5: 68c1b90224a38f6a8926140340c28bb3 - sha256: 59b8ffdff6ed696cd1a7cc84f3fe0bc43f540ae45032b654fc3c7edd6bce2ddf + md5: 51f6de9ca86bdeeb92e89e4abccd03e8 + sha256: b2896839b41289205562b211a6e45ebdbb97018e076c1ae4faecbfcc421810d0 category: main optional: false - name: tiledb - version: 2.18.2 + version: 2.19.0 manager: conda platform: osx-arm64 dependencies: - __osx: '>=10.9' + azure-core-cpp: '>=1.10.3,<2.0a0' + azure-storage-blobs-cpp: '>=12.10.0,<13.0a0' + azure-storage-common-cpp: '>=12.5.0,<13.0a0' bzip2: '>=1.0.8,<2.0a0' - libcxx: '>=16.0.6' - libxml2: '>=2.12.1,<2.13.0a0' + libabseil: '>=20230802.1,<20230803.0a0' + libcurl: '>=8.5.0,<9.0a0' + libcxx: '>=15' + libgoogle-cloud: '>=2.12.0,<2.13.0a0' + libxml2: '>=2.12.3,<3.0.0a0' libzlib: '>=1.2.13,<1.3.0a0' lz4-c: '>=1.9.3,<1.10.0a0' openssl: '>=3.2.0,<4.0a0' zstd: '>=1.5.5,<1.6.0a0' - url: https://conda.anaconda.org/conda-forge/osx-arm64/tiledb-2.18.2-hcd9d348_1.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/tiledb-2.19.0-h567544c_0.conda hash: - md5: 5acc6437b79888d44e51f9d5b5dcc22a - sha256: 5d6b8c514f4a116d76b33b48e4cc09568711ef03753158e4a7ec1bbb8c8e255c + md5: 60e4d67eedc2f19bfbdf964187f52d15 + sha256: beeeefd593a5f0fa70b25d7d9563dd54c3840e85f6c5d982e2066f7904077191 category: main optional: false - name: tinycss2 @@ -15296,27 +15523,27 @@ package: category: main optional: false - name: traitlets - version: 5.14.0 + version: 5.14.1 manager: conda platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.1-pyhd8ed1ab_0.conda hash: - md5: 886f4a84ddb49b943b1697ac314e85b3 - sha256: c32412029033264140926be474d327d7fd57c0d11db9b1745396b3d4db78a799 + md5: 1c6acfdc7ecbfe09954c4216da99c146 + sha256: fa78d68f74ec8aae5c93f135140bfdbbf0ab60a79c6062b55d73c316068545ec category: main optional: false - name: traitlets - version: 5.14.0 + version: 5.14.1 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.0-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.1-pyhd8ed1ab_0.conda hash: - md5: 886f4a84ddb49b943b1697ac314e85b3 - sha256: c32412029033264140926be474d327d7fd57c0d11db9b1745396b3d4db78a799 + md5: 1c6acfdc7ecbfe09954c4216da99c146 + sha256: fa78d68f74ec8aae5c93f135140bfdbbf0ab60a79c6062b55d73c316068545ec category: main optional: false - name: typeguard @@ -15348,27 +15575,27 @@ package: category: main optional: false - name: types-python-dateutil - version: 2.8.19.14 + version: 2.8.19.20240106 manager: conda platform: linux-64 dependencies: python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.8.19.14-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.8.19.20240106-pyhd8ed1ab_0.conda hash: - md5: 4df15c51a543e806d439490b862be1c6 - sha256: 7b0129c72d371fa7a06ed5dd1d701844c20d03bb4641a38a88a982b347d087e2 + md5: c9096a546660b9079dce531c0039e074 + sha256: 09ef8cc587bdea80a83b6f820dbae24daadcf82be088fb0a9f6495781653e300 category: main optional: false - name: types-python-dateutil - version: 2.8.19.14 + version: 2.8.19.20240106 manager: conda platform: osx-arm64 dependencies: python: '>=3.6' - url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.8.19.14-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.8.19.20240106-pyhd8ed1ab_0.conda hash: - md5: 4df15c51a543e806d439490b862be1c6 - sha256: 7b0129c72d371fa7a06ed5dd1d701844c20d03bb4641a38a88a982b347d087e2 + md5: c9096a546660b9079dce531c0039e074 + sha256: 09ef8cc587bdea80a83b6f820dbae24daadcf82be088fb0a9f6495781653e300 category: main optional: false - name: typing-extensions @@ -15472,49 +15699,49 @@ package: category: main optional: false - name: tzcode - version: 2023c + version: 2023d manager: conda platform: linux-64 dependencies: __glibc: '>=2.17,<3.0.a0' libgcc-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2023c-h0b41bf4_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/tzcode-2023d-h3f72095_0.conda hash: - md5: 0c0533894f21c3d35697cb8378d390e2 - sha256: 62b0d3eee4260d310f578015305834b8a588377f796e5e290ec267da8a51a027 + md5: 1c63518899838477ebd497e3e3327f81 + sha256: 0eab7ec2f4c983efb365bacc2e7bd6620f516a50d2e9b183ba1c9c243601cce3 category: main optional: false - name: tzcode - version: 2023c + version: 2023d manager: conda platform: osx-arm64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/osx-arm64/tzcode-2023c-h1a8c8d9_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/tzcode-2023d-h93a5062_0.conda hash: - md5: 96779d3be996d78411b083f99a51199c - sha256: 0a60ff53272547a0f80862f0a1969a5d1cec16bd2e9098ed5b07d317682a4361 + md5: 8f6c1eef62c660bfb43897fe14b2ca95 + sha256: a4bed39a41b26d5f8134fd0cb0df15bc0b9e645e3ee6f88ee81c8d24651eb8cd category: main optional: false - name: tzdata - version: 2023c + version: 2023d manager: conda platform: linux-64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda hash: - md5: 939e3e74d8be4dac89ce83b20de2492a - sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55 + md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0 + sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d category: main optional: false - name: tzdata - version: 2023c + version: 2023d manager: conda platform: osx-arm64 dependencies: {} - url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/tzdata-2023d-h0c530f3_0.conda hash: - md5: 939e3e74d8be4dac89ce83b20de2492a - sha256: 0449138224adfa125b220154408419ec37c06b0b49f63c5954724325903ecf55 + md5: 8dee24b8be2d9ff81e7bd4d7d97ff1b0 + sha256: 04f2ab3e36f2015841551415bf16bf62933bd94b7085d4be5493b388e95a9c3d category: main optional: false - name: tzlocal @@ -15544,18 +15771,18 @@ package: category: main optional: false - name: ucx - version: 1.14.1 + version: 1.15.0 manager: conda platform: linux-64 dependencies: libgcc-ng: '>=12' libnuma: '>=2.0.16,<3.0a0' libstdcxx-ng: '>=12' - rdma-core: '>=28.9,<29.0a0' - url: https://conda.anaconda.org/conda-forge/linux-64/ucx-1.14.1-h64cca9d_5.conda + rdma-core: '>=49' + url: https://conda.anaconda.org/conda-forge/linux-64/ucx-1.15.0-h75e419f_2.conda hash: - md5: 39aa3b356d10d7e5add0c540945a0944 - sha256: a62f3fb56849dc37270f9078e1c8ba32328bc3ba4d32cf1f7dace48b431d5abe + md5: 5798de4587dc70fa6db81663e79f176a + sha256: 81fdea1a5ee5075bf4cbdc665b08d0e2ee9d3476f9d16acc58e2bbd913e9197b category: main optional: false - name: ukkonen @@ -15617,27 +15844,27 @@ package: category: main optional: false - name: unidecode - version: 1.3.7 + version: 1.3.8 manager: conda platform: linux-64 dependencies: python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/unidecode-1.3.7-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/unidecode-1.3.8-pyhd8ed1ab_0.conda hash: - md5: 06532129755273a6d5a0e0cd179330a5 - sha256: 6f6012c9103324680b8cfa666dd42b9de1da942520b3dc223fda3a43021d6189 + md5: 913724e0dfe2708b7b7d4e35b8cc2e0f + sha256: 3f29636a555736983ac2bdeb6e41a5cd85b572fa4f6cc2270d6c6543d8eb8c0b category: main optional: false - name: unidecode - version: 1.3.7 + version: 1.3.8 manager: conda platform: osx-arm64 dependencies: python: '>=3.5' - url: https://conda.anaconda.org/conda-forge/noarch/unidecode-1.3.7-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/unidecode-1.3.8-pyhd8ed1ab_0.conda hash: - md5: 06532129755273a6d5a0e0cd179330a5 - sha256: 6f6012c9103324680b8cfa666dd42b9de1da942520b3dc223fda3a43021d6189 + md5: 913724e0dfe2708b7b7d4e35b8cc2e0f + sha256: 3f29636a555736983ac2bdeb6e41a5cd85b572fa4f6cc2270d6c6543d8eb8c0b category: main optional: false - name: uri-template @@ -15815,27 +16042,27 @@ package: category: main optional: false - name: wcwidth - version: 0.2.12 + version: 0.2.13 manager: conda platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.12-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda hash: - md5: bf4a1d1a97ca27b0b65bacd9e238b484 - sha256: ca757d0fc2dbd422af9d3238a8b4b630a6e11df3707a447bd89540656770d1d7 + md5: 68f0738df502a14213624b288c60c9ad + sha256: b6cd2fee7e728e620ec736d8dfee29c6c9e2adbd4e695a31f1d8f834a83e57e3 category: main optional: false - name: wcwidth - version: 0.2.12 + version: 0.2.13 manager: conda platform: osx-arm64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.12-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda hash: - md5: bf4a1d1a97ca27b0b65bacd9e238b484 - sha256: ca757d0fc2dbd422af9d3238a8b4b630a6e11df3707a447bd89540656770d1d7 + md5: 68f0738df502a14213624b288c60c9ad + sha256: b6cd2fee7e728e620ec736d8dfee29c6c9e2adbd4e695a31f1d8f834a83e57e3 category: main optional: false - name: webcolors @@ -16012,33 +16239,33 @@ package: category: main optional: false - name: xerces-c - version: 3.2.4 + version: 3.2.5 manager: conda platform: linux-64 dependencies: icu: '>=73.2,<74.0a0' - libcurl: '>=8.2.1,<9.0a0' + libcurl: '>=8.5.0,<9.0a0' libgcc-ng: '>=12' - libnsl: '>=2.0.0,<2.1.0a0' + libnsl: '>=2.0.1,<2.1.0a0' libstdcxx-ng: '>=12' - url: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.4-hac6953d_3.conda + url: https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-hac6953d_0.conda hash: - md5: 297e6a75dc1b6a440cd341a85eab8a00 - sha256: faf1c8f0e625466efec442e987737057ca304f1fcf79055da4d9e93e49f14ffa + md5: 63b80ca78d29380fe69e69412dcbe4ac + sha256: 75d06ca406f03f653d7a3183f2a1ccfdb3a3c6c830493933ec4c3c98e06a32bb category: main optional: false - name: xerces-c - version: 3.2.4 + version: 3.2.5 manager: conda platform: osx-arm64 dependencies: icu: '>=73.2,<74.0a0' - libcurl: '>=8.2.1,<9.0a0' - libcxx: '>=15.0.7' - url: https://conda.anaconda.org/conda-forge/osx-arm64/xerces-c-3.2.4-hd886eac_3.conda + libcurl: '>=8.5.0,<9.0a0' + libcxx: '>=15' + url: https://conda.anaconda.org/conda-forge/osx-arm64/xerces-c-3.2.5-hf393695_0.conda hash: - md5: 916e77cb0be0040410881fba8e28b5bb - sha256: 5ecc3322ddcad0a002a44bd4dddfe898b9e02951c629f6962c23b3bcf6014c9f + md5: 5e4741a1e687aee5fc9c409a0476bef2 + sha256: 8ad901a5fe535ebd16b469cf8e46cf174f7e6e4d9b432cc8cc02666a87e7e2ee category: main optional: false - name: xorg-kbproto @@ -16299,7 +16526,7 @@ package: category: main optional: false - name: yarl - version: 1.9.3 + version: 1.9.4 manager: conda platform: linux-64 dependencies: @@ -16308,14 +16535,14 @@ package: multidict: '>=4.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.9.3-py39hd1e30aa_0.conda + url: https://conda.anaconda.org/conda-forge/linux-64/yarl-1.9.4-py39hd1e30aa_0.conda hash: - md5: c8654dea9ff4b633fbdfddff7e321bb9 - sha256: 871782b22363b3fb449e2a29a3620045a1c40643e3726767f5b9cd4455ec36c7 + md5: 7288bccf99dd979dfcf80bb372c3de3f + sha256: a0370c724d347103ae1a7c8a49166cc69359d80055c11bc5d7222d259efd8f12 category: main optional: false - name: yarl - version: 1.9.3 + version: 1.9.4 manager: conda platform: osx-arm64 dependencies: @@ -16323,10 +16550,10 @@ package: multidict: '>=4.0' python: '>=3.9,<3.10.0a0' python_abi: 3.9.* - url: https://conda.anaconda.org/conda-forge/osx-arm64/yarl-1.9.3-py39h17cfd9d_0.conda + url: https://conda.anaconda.org/conda-forge/osx-arm64/yarl-1.9.4-py39h17cfd9d_0.conda hash: - md5: b882e8dbbb5792b54bc8d9a44d742047 - sha256: 761a23146d980333d8ec0676ca9044f5f104804fc2ea753a0ba4ab1d3b6cda37 + md5: 28614a7a52d6907a1e79ef6706e07400 + sha256: be088ed67246f51deb9a025617408bc8c80845d861f5695d9633aaca3b9fc555 category: main optional: false - name: zeromq @@ -16688,6 +16915,54 @@ package: sha256: fb1ab7b0548fe44b3d538041f0a374505b7f990d448a935ea36649c5ccab5acf category: main optional: false +- name: grpcio + version: 1.43.0 + manager: pip + platform: linux-64 + dependencies: + six: '>=1.5.2' + url: https://files.pythonhosted.org/packages/81/e7/72ce7de706f5969359ff5a8ff27521f1d0fa63dbe08ac8546674ace0709d/grpcio-1.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + hash: + sha256: 594aaa0469f4fca7773e80d8c27bf1298e7bbce5f6da0f084b07489a708f16ab + category: main + optional: false +- name: grpcio + version: 1.43.0 + manager: pip + platform: osx-arm64 + dependencies: + six: '>=1.5.2' + url: https://files.pythonhosted.org/packages/c6/6b/5f7cd38ff3ac80f47cbe56618fe45502f90b41a56f5d9e248ee574e14687/grpcio-1.43.0.tar.gz + hash: + sha256: 735d9a437c262ab039d02defddcb9f8f545d7009ae61c0114e19dda3843febe5 + category: main + optional: false +- name: grpcio-status + version: 1.43.0 + manager: pip + platform: linux-64 + dependencies: + protobuf: '>=3.6.0' + grpcio: '>=1.43.0' + googleapis-common-protos: '>=1.5.5' + url: https://files.pythonhosted.org/packages/a3/6b/4d49846da5b5a496658fde7c1315c2ed48b3d98f7460122a19f94af39248/grpcio_status-1.43.0-py3-none-any.whl + hash: + sha256: 9036b24f5769adafdc3e91d9434c20e9ede0b30f50cc6bff105c0f414bb9e0e0 + category: main + optional: false +- name: grpcio-status + version: 1.43.0 + manager: pip + platform: osx-arm64 + dependencies: + protobuf: '>=3.6.0' + grpcio: '>=1.43.0' + googleapis-common-protos: '>=1.5.5' + url: https://files.pythonhosted.org/packages/a3/6b/4d49846da5b5a496658fde7c1315c2ed48b3d98f7460122a19f94af39248/grpcio_status-1.43.0-py3-none-any.whl + hash: + sha256: 9036b24f5769adafdc3e91d9434c20e9ede0b30f50cc6bff105c0f414bb9e0e0 + category: main + optional: false - name: htmlmin version: 0.1.12 manager: pip @@ -16778,6 +17053,21 @@ package: sha256: bf14aa0eb22b58c231243dccf7e7f42f7beec48970f2549b3a6acc737d1a4ba4 category: main optional: false +- name: modin + version: 0.22.3 + manager: pip + platform: osx-arm64 + dependencies: + pandas: 1.5.3 + packaging: '*' + numpy: '>=1.18.5' + fsspec: '*' + psutil: '*' + url: https://files.pythonhosted.org/packages/79/68/e560890fe0b8b89eb55e30bf7c11c4f43d1975da578e516a0996a7fce562/modin-0.22.3-py3-none-any.whl + hash: + sha256: e438ead6eb8dc536dbf8c74365e007d88d6187196f2d0756b77d6551bf35a686 + category: main + optional: false - name: numba version: 0.58.1 manager: pip @@ -16802,6 +17092,19 @@ package: sha256: 9e9356e943617f5e35a74bf56ff6e7cc83e6b1865d5e13cee535d79bf2cae954 category: main optional: false +- name: pandas + version: 1.5.3 + manager: pip + platform: osx-arm64 + dependencies: + python-dateutil: '>=2.8.1' + pytz: '>=2020.1' + numpy: '>=1.20.3' + url: https://files.pythonhosted.org/packages/a7/2b/c71df8794e8e75ba1ec9da1c1a2efc946590aa79a05148a4138405ef5f72/pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl + hash: + sha256: a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a + category: main + optional: false - name: patsy version: 0.5.4 manager: pip diff --git a/monodocs-environment.yaml b/monodocs-environment.yaml index 6a707295ea..140552aa2d 100644 --- a/monodocs-environment.yaml +++ b/monodocs-environment.yaml @@ -6,7 +6,7 @@ dependencies: - pip - codespell - furo - - flytekit + - flytekit>=1.10.2 - gitpython - ipython!=8.7.0 - graphviz @@ -67,6 +67,7 @@ dependencies: - flytekitplugins-kfpytorch - flytekitplugins-sqlalchemy - dolt_integrations + - modin - google-cloud - ydata_profiling - whylogs==1.3.3 # whylogs diff --git a/rsts/Makefile b/rsts/Makefile deleted file mode 100644 index a761edf1c9..0000000000 --- a/rsts/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SPHINXPROJ = flyte -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/rsts/_static/custom.css b/rsts/_static/custom.css deleted file mode 100644 index 9744524445..0000000000 --- a/rsts/_static/custom.css +++ /dev/null @@ -1,33 +0,0 @@ -.getting-started-panels div.card-body { - padding: 0; -} - -.getting-started-panels a.btn-primary { - color: white !important; - background-color: var(--color-link); - border-color: var(--color-link); -} - -.getting-started-panels a.btn-outline-primary { - color: white !important; - background-color: var(--color-link); - opacity: 0.5; - border-color: var(--color-link); -} - -html .tabbed-set > label { - color: var(--color-foreground-border); -} - -html .tabbed-set > input:checked + label { - border-color: var(--color-link); - color: var(--color-link); -} - -html .tabbed-set > label:hover { - color: var(--color-link); -} - -html .tabbed-content { - box-shadow: 0 -.0625rem var(--color-background-border),0 .0625rem var(--color-background-border); -} \ No newline at end of file diff --git a/rsts/community/contribute.rst b/rsts/community/contribute.rst deleted file mode 100644 index b34416ff4b..0000000000 --- a/rsts/community/contribute.rst +++ /dev/null @@ -1,734 +0,0 @@ -.. _contribute_Flyte: - -##################### -Contributing to Flyte -##################### - -.. tags:: Contribute, Basic - -Thank you for taking the time to contribute to Flyte! -Please read our `Code of Conduct `__ before contributing to Flyte. - -Here are some guidelines for you to follow, which will make your first and follow-up contributions easier. - -TL;DR: Find the repo-specific contribution guidelines in the `Component Reference <#component-reference>`__ section. - -💻 Becoming a contributor -========================= - -An issue tagged with `good first issue `__ is the best place to start for first-time contributors. - -To assign an issue to yourself, comment ``#self-assign`` or ``#take``. - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/contribution_guide/self-assign.png - :alt: self-assign - -**Appetizer for every repo: Fork and clone the concerned repository. Create a new branch on your fork and make the required changes. Create a pull request once your work is ready for review.** - -.. note:: - To open a pull request, refer to `GitHub's guide `__ for detailed instructions. - -Example PR for your reference: `GitHub PR `__. -A couple of checks are introduced to help maintain the robustness of the project. - -#. To get through DCO, sign off on every commit (`Reference `__) -#. To improve code coverage, write unit tests to test your code -#. Make sure all the tests pass. If you face any issues, please let us know - -On a side note, format your Go code with ``golangci-lint`` followed by ``goimports`` (use ``make lint`` and ``make goimports``), and Python code with ``black`` and ``isort`` (use ``make fmt``). -If make targets are not available, you can manually format the code. -Refer to `Effective Go `__, `Black `__, and `Isort `__ for full coding standards. - -As you become more involved with the project, you may be able to be added as a contributor to the repos you're working on, -but there is a medium term effort to move all development to forks. - -📃 Documentation -================ - -Flyte uses Sphinx for documentation. ``protoc-gen-doc`` is used to generate the documentation from ``.proto`` files. - -Sphinx spans multiple repositories under `flyteorg `__. It uses reStructured Text (rst) files to store the documentation content. -For API- and code-related content, it extracts docstrings from the code files. - -To get started, refer to the `reStructuredText reference `__. - -For minor edits that don't require a local setup, you can edit the GitHub page in the documentation to propose improvements. - -Intersphinx -*********** - -`Intersphinx `__ can generate automatic links to the documentation of objects in other projects. - -To establish a reference to any other documentation from Flyte or within it, use Intersphinx. - -To do so, create an ``intersphinx_mapping`` in the ``conf.py`` file which should be present in the respective ``docs`` repository. -For example, ``rsts`` is the docs repository for the ``flyte`` repo. - -For example: - -.. code-block:: python - - intersphinx_mapping = { - "python": ("https://docs.python.org/3", None), - "flytekit": ("https://flyte.readthedocs.io/projects/flytekit/en/master/", None), - } - -The key refers to the name used to refer to the file (while referencing the documentation), and the URL denotes the precise location. - -Here is an example using ``:std:doc``: - -* Direct reference - - .. code-block:: text - - Task: :std:doc:`generated/flytekit.task` - - Output: - - Task: :std:doc:`generated/flytekit.task` - -* Custom name - - .. code-block:: text - - :std:doc:`Using custom words ` - - Output: - - :std:doc:`Using custom words ` - -| - -You can cross-reference multiple Python objects. Check out this `section `__ to learn more. - -| - -For instance, `task` decorator in flytekit uses the ``func`` role. - -.. code-block:: text - - Link to flytekit code :py:func:`flytekit:flytekit.task` - -Output: - -Link to flytekit code :py:func:`flytekit:flytekit.task` - -| - -Here are a couple more examples. - -.. code-block:: text - - :py:mod:`Module ` - :py:class:`Class ` - :py:data:`Data ` - :py:func:`Function ` - :py:meth:`Method ` - -Output: - -:py:mod:`Module ` - -:py:class:`Class ` - -:py:data:`Data ` - -:py:func:`Function ` - -:py:meth:`Method ` - -🧱 Component reference -====================== - -To understand how the below components interact with each other, refer to :ref:`Understand the lifecycle of a workflow `. - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/contribution_guide/dependency_graph.png - :alt: Dependency graph between various flyteorg repos - :align: center - :figclass: align-center - - The dependency graph between various flyte repos - -``flyte`` -********* - -.. list-table:: - - * - `Repo `__ - * - **Purpose**: Deployment, Documentation, and Issues - * - **Languages**: Kustomize & RST - -.. note:: - For the ``flyte`` repo, run the following command in the repo's root to generate documentation locally. - - .. code-block:: console - - make -C rsts html - -``flyteidl`` -************ - -.. list-table:: - - * - `Repo `__ - * - **Purpose**: Flyte workflow specification is in `protocol buffers `__ which forms the core of Flyte - * - **Language**: Protobuf - * - **Guidelines**: Refer to the `README `__ - -``flytepropeller`` -****************** - -.. list-table:: - - * - `Repo `__ | `Code Reference `__ - * - **Purpose**: Kubernetes-native operator - * - **Language**: Go - * - **Guidelines:** - - * Check for Makefile in the root repo - * Run the following commands: - * ``make generate`` - * ``make test_unit`` - * ``make link`` - * To compile, run ``make compile`` - -``flyteadmin`` -************** - -.. list-table:: - - * - `Repo `__ | `Code Reference `__ - * - **Purpose**: Control Plane - * - **Language**: Go - * - **Guidelines**: - - * Check for Makefile in the root repo - * If the service code has to be tested, run it locally: - * ``make compile`` - * ``make server`` - * To seed data locally: - * ``make compile`` - * ``make seed_projects`` - * ``make migrate`` - * To run integration tests locally: - * ``make integration`` - * (or to run in containerized dockernetes): ``make k8s_integration`` - -``flytekit`` -************ - -.. list-table:: - - * - `Repo `__ - * - **Purpose**: Python SDK & Tools - * - **Language**: Python - * - **Guidelines**: Refer to the `Flytekit Contribution Guide `__ - -``flyteconsole`` -**************** - -.. list-table:: - - * - `Repo `__ - * - **Purpose**: Admin Console - * - **Language**: Typescript - * - **Guidelines**: Refer to the `README `__ - -``datacatalog`` -*************** - -.. list-table:: - - * - `Repo `__ | `Code Reference `__ - * - **Purpose**: Manage Input & Output Artifacts - * - **Language**: Go - -``flyteplugins`` -**************** - -.. list-table:: - - * - `Repo `__ | `Code Reference `__ - * - **Purpose**: Flyte Plugins - * - **Language**: Go - * - **Guidelines**: - - * Check for Makefile in the root repo - * Run the following commands: - * ``make generate`` - * ``make test_unit`` - * ``make link`` - -``flytestdlib`` -*************** - -.. list-table:: - - * - `Repo `__ - * - **Purpose**: Standard Library for Shared Components - * - **Language**: Go - -``flytesnacks`` -*************** - -.. list-table:: - - * - `Repo `__ - * - **Purpose**: Examples, Tips, and Tricks to use Flytekit SDKs - * - **Language**: Python (In the future, Java examples will be added) - * - **Guidelines**: Refer to the `Flytesnacks Contribution Guide `__ - -``flytectl`` -************ - -.. list-table:: - - * - `Repo `__ - * - **Purpose**: A standalone Flyte CLI - * - **Language**: Go - * - **Guidelines**: Refer to the `FlyteCTL Contribution Guide `__ - - -🔮 Development Environment Setup Guide -====================================== - -This guide provides a step-by-step approach to setting up a local development environment for -`flyteidl `_, `flyteadmin `_, -`flyteplugins `_, `flytepropeller `_, -`flytekit `_ , `flyteconsole `_, -`datacatalog `_, and `flytestdlib `_. - -The video below is a tutorial on how to set up a local development environment for Flyte. - -.. youtube:: V-KlVQmQAjE - -Requirements -************ - -This guide has been tested and used on AWS EC2 with an Ubuntu 22.04 -image. The following tools are required: - -- `Docker `__ -- `Kubectl `__ -- `Go `__ - -Content -******* - -- `How to setup dev environment for flyteidl, flyteadmin, flyteplugins, - flytepropeller, datacatalog and flytestdlib? <#how-to-setup-dev-environment-for-flyteidl-flyteadmin-flyteplugins-flytepropeller-datacatalog-and-flytestdlib>`__ - -- `How to setup dev environment for - flytekit? <#how-to-setup-dev-environment-for-flytekit>`__ - -- `How to setup dev environment for - flyteconsole? <#how-to-setup-dev-environment-for-flyteconsole>`__ - -- `How to access Flyte UI, minio, postgres, k3s, and endpoints? - <#how-to-access-flyte-ui-minio-postgres-k3s-and-endpoints>`__ - -How to setup dev environment for flyteidl, flyteadmin, flyteplugins, flytepropeller, datacatalog and flytestdlib? -****************************************************************************************************************************** - -**1. Install flytectl** - - -`Flytectl `__ is a portable and lightweight command-line interface to work with Flyte. - -.. code:: shell - - # Step1: Install the latest version of flytectl - curl -sL https://ctl.flyte.org/install | bash - # flyteorg/flytectl info checking GitHub for latest tag - # flyteorg/flytectl info found version: 0.6.39 for v0.6.39/Linux/x86_64 - # flyteorg/flytectl info installed ./bin/flytectl - - # Step2: Export flytectl path based on the previous log "flyteorg/flytectl info installed ./bin/flytectl" - export PATH=$PATH:/home/ubuntu/bin # replace with your path - -**2. Build a k3s cluster that runs minio and postgres Pods.** - - -| `Minio `__ is an S3-compatible object store that will be used later to store task output, input, etc. -| `Postgres `__ is an open-source object-relational database that will later be used by flyteadmin/dataCatalog to - store all Flyte information. - -.. code:: shell - - # Step1: Start k3s cluster, create Pods for postgres and minio. Note: We cannot access Flyte UI yet! but we can access the minio console now. - flytectl demo start --dev - # 👨‍💻 Flyte is ready! Flyte UI is available at http://localhost:30080/console 🚀 🚀 🎉 - # ❇️ Run the following command to export demo environment variables for accessing flytectl - # export FLYTECTL_CONFIG=/home/ubuntu/.flyte/config-sandbox.yaml - # 🐋 Flyte sandbox ships with a Docker registry. Tag and push custom workflow images to localhost:30000 - # 📂 The Minio API is hosted on localhost:30002. Use http://localhost:30080/minio/login for Minio console - - # Step2: Export FLYTECTL_CONFIG as the previous log indicated. - FLYTECTL_CONFIG=/home/ubuntu/.flyte/config-sandbox.yaml - - # Step3: The kubeconfig will be automatically copied to the user's main kubeconfig (default is `/.kube/config`) with "flyte-sandbox" as the context name. - # Check that we can access the K3s cluster. Verify that postgres and minio are running. - kubectl get pod -n flyte - # NAME READY STATUS RESTARTS AGE - # flyte-sandbox-docker-registry-85745c899d-dns8q 1/1 Running 0 5m - # flyte-sandbox-kubernetes-dashboard-6757db879c-wl4wd 1/1 Running 0 5m - # flyte-sandbox-proxy-d95874857-2wc5n 1/1 Running 0 5m - # flyte-sandbox-minio-645c8ddf7c-sp6cc 1/1 Running 0 5m - # flyte-sandbox-postgresql-0 1/1 Running 0 5m - - -**3. Run all Flyte components (flyteadmin, flytepropeller, datacatalog, flyteconsole, etc) in a single binary.** - -The `Flyte repository `__ includes Go code -that integrates all Flyte components into a single binary. - -.. code:: shell - - # Step1: Clone flyte repo - git clone https://github.com/flyteorg/flyte.git - cd flyte - - # Step2: Build a single binary that bundles all the Flyte components. - # The version of each component/library used to build the single binary are defined in `go.mod`. - sudo apt-get -y install jq # You may need to install jq - go mod tidy - make compile - - # Step3: Prepare a namespace template for the cluster resource controller. - # The configuration file "flyte-single-binary-local.yaml" has an entry named cluster_resources.templatePath. - # This entry needs to direct to a directory containing the templates for the cluster resource controller to use. - # We will now create a simple template that allows the automatic creation of required namespaces for projects. - # For example, with Flyte's default project "flytesnacks", the controller will auto-create the following namespaces: - # flytesnacks-staging, flytesnacks-development, and flytesnacks-production. - mkdir $HOME/.flyte/cluster-resource-templates/ - echo "apiVersion: v1 - kind: Namespace - metadata: - name: '{{ namespace }}'" > $HOME/.flyte/cluster-resource-templates/namespace.yaml - - # Step4: Running the single binary. - # The POD_NAMESPACE environment variable is necessary for the webhook to function correctly. - # You may encounter an error due to `ERROR: duplicate key value violates unique constraint`. Running the command again will solve the problem. - POD_NAMESPACE=flyte ./flyte start --config flyte-single-binary-local.yaml - # All logs from flyteadmin, flyteplugins, flytepropeller, etc. will appear in the terminal. - - -**4. Build single binary with your own code.** - - -The following instructions provide guidance on how to build single binary with your customized code under the ``flyteadmin`` as an example. - - -- **Note** Although we'll use ``flyteadmin`` as an example, these steps can be applied to other Flyte components or libraries as well. - ``{flyteadmin}`` below can be substituted with other Flyte components/libraries: ``flyteidl``, ``flyteplugins``, ``flytepropeller``, ``datacatalog``, or ``flytestdlib``. -- **Note** If you want to learn how flyte compiles those components and replace the repositories, you can study how ``go mod edit`` works. - -.. code:: shell - - # Step1: Install Go. Flyte uses Go 1.19, so make sure to switch to Go 1.19. - export PATH=$PATH:$(go env GOPATH)/bin - go install golang.org/dl/go1.19@latest - go1.19 download - export GOROOT=$(go1.19 env GOROOT) - export PATH="$GOROOT/bin:$PATH" - - # You may need to install goimports to fix lint errors. - # Refer to https://pkg.go.dev/golang.org/x/tools/cmd/goimports - go install golang.org/x/tools/cmd/goimports@latest - export PATH=$(go env GOPATH)/bin:$PATH - - # Step2: Go to the {flyteadmin} repository, modify the source code accordingly. - cd flyte/flyteadmin - - # Step3: Now, you can build the single binary. Go back to Flyte directory. - go mod tidy - make compile - POD_NAMESPACE=flyte ./flyte start --config flyte-single-binary-local.yaml - -**5. Test by running a hello world workflow.** - - -.. code:: shell - - # Step1: Install flytekit - pip install flytekit && export PATH=$PATH:/home/ubuntu/.local/bin - - # Step2: Run a hello world example - pyflyte run --remote https://raw.githubusercontent.com/flyteorg/flytesnacks/master/examples/basics/basics/hello_world.py hello_world_wf - # Go to http://localhost:30080/console/projects/flytesnacks/domains/development/executions/fd63f88a55fed4bba846 to see execution in the console. - # You can go to the [flytesnacks repository](https://github.com/flyteorg/flytesnacks) to see more useful examples. - -**6. Tear down the k3s cluster after finishing developing.** - - -.. code:: shell - - flytectl demo teardown - # context removed for "flyte-sandbox". - # 🧹 🧹 Sandbox cluster is removed successfully. - # ❇️ Run the following command to unset sandbox environment variables for accessing flytectl - # unset FLYTECTL_CONFIG - -How to setup dev environment for flytekit? -******************************************* - -**1. Set up local Flyte Cluster.** - - -If you are also modifying the code for flyteidl, flyteadmin, flyteplugins, flytepropeller datacatalog, or flytestdlib, -refer to the instructions in the `previous section <#how-to-setup-dev-environment-for-flyteidl-flyteadmin-flyteplugins-flytepropeller-datacatalog-and-flytestdlib>`__ to set up a local Flyte cluster. - -If not, we can start backends with a single command. - -.. code:: shell - - # Step1: Install the latest version of flytectl, a portable and lightweight command-line interface to work with Flyte. - curl -sL https://ctl.flyte.org/install | bash - # flyteorg/flytectl info checking GitHub for latest tag - # flyteorg/flytectl info found version: 0.6.39 for v0.6.39/Linux/x86_64 - # flyteorg/flytectl info installed ./bin/flytectl - - # Step2: Export flytectl path based on the previous log "flyteorg/flytectl info installed ./bin/flytectl" - export PATH=$PATH:/home/ubuntu/bin # replace with your path - - # Step3: Starts the Flyte demo cluster. This will setup a k3s cluster running minio, postgres Pods, and all Flyte components: flyteadmin, flyteplugins, flytepropeller, etc. - # See https://docs.flyte.org/projects/flytectl/en/latest/gen/flytectl_demo_start.html for more details. - flytectl demo start - # 👨‍💻 Flyte is ready! Flyte UI is available at http://localhost:30080/console 🚀 🚀 🎉 - # ❇️ Run the following command to export demo environment variables for accessing flytectl - # export FLYTECTL_CONFIG=/home/ubuntu/.flyte/config-sandbox.yaml - # 🐋 Flyte sandbox ships with a Docker registry. Tag and push custom workflow images to localhost:30000 - # 📂 The Minio API is hosted on localhost:30002. Use http://localhost:30080/minio/login for Minio console - -**2. Run workflow locally.** - - -.. code:: shell - - # Step1: Build a virtual environment for developing Flytekit. This will allow your local changes to take effect when the same Python interpreter runs `import flytekit`. - git clone https://github.com/flyteorg/flytekit.git # replace with your own repo - cd flytekit - virtualenv ~/.virtualenvs/flytekit - source ~/.virtualenvs/flytekit/bin/activate - make setup - pip install -e . - - # If you are also developing the plugins, consider the following: - - # Installing Specific Plugins: - # If you wish to only use few plugins, you can install them individually. - # Take [Flytekit BigQuery Plugin](https://github.com/flyteorg/flytekit/tree/master/plugins/flytekit-bigquery#flytekit-bigquery-plugin) for example: - # You have to go to the bigquery plugin folder and install it. - cd plugins/flytekit-bigquery/ - pip install -e . - # Now you can use the bigquery plugin, and the performance is fast. - - # (Optional) Installing All Plugins: - # If you wish to install all available plugins, you can execute the command below. - # However, it's not typically recommended because the current version of plugins does not support - # lazy loading. This can lead to a slowdown in the performance of your Python engine. - cd plugins - pip install -e . - # Now you can use all plugins, but the performance is slow. - - # Step2: Modify the source code for flytekit, then run unit tests and lint. - make lint - make test - - # Step3: Run a hello world sample to test locally - pyflyte run https://raw.githubusercontent.com/flyteorg/flytesnacks/master/examples/basics/basics/hello_world.py hello_world_wf - # Running hello_world_wf() hello world - -**3. Run workflow in sandbox.** - - -Before running your workflow in the sandbox, make sure you're able to successfully run it locally. -To deploy the workflow in the sandbox, you'll need to build a Flytekit image. -Create a Dockerfile in your Flytekit directory with the minimum required configuration to run a task, as shown below. -If your task requires additional components, such as plugins, you may find it useful to refer to the construction of the `officail flitekit image `__ - -.. code:: Dockerfile - - FROM python:3.9-slim-buster - USER root - WORKDIR /root - ENV PYTHONPATH /root - RUN apt-get update && apt-get install build-essential -y - RUN apt-get install git -y - # The following line is an example of how to install your modified plugins. In this case, it demonstrates how to install the 'deck' plugin. - # RUN pip install -U git+https://github.com/Yicheng-Lu-llll/flytekit.git@"demo#egg=flytekitplugins-deck-standard&subdirectory=plugins/flytekit-deck-standard" # replace with your own repo and branch - RUN pip install -U git+https://github.com/Yicheng-Lu-llll/flytekit.git@demo # replace with your own repo and branch - ENV FLYTE_INTERNAL_IMAGE "localhost:30000/flytekit:demo" # replace with your own image name and tag - -The instructions below explain how to build the image, push the image to -the Flyte cluster, and finally submit the workflow. - -.. code:: shell - - # Step1: Ensure you have pushed your changes to the remote repo - # In the flytekit folder - git add . && git commit -s -m "develop" && git push - - # Step2: Build the image - # In the flytekit folder - export FLYTE_INTERNAL_IMAGE="localhost:30000/flytekit:demo" # replace with your own image name and tag - docker build --no-cache -t "${FLYTE_INTERNAL_IMAGE}" -f ./Dockerfile . - - # Step3: Push the image to the Flyte cluster - docker push ${FLYTE_INTERNAL_IMAGE} - - # Step4: Submit a hello world workflow to the Flyte cluster - cd flytesnacks - pyflyte run --image ${FLYTE_INTERNAL_IMAGE} --remote https://raw.githubusercontent.com/flyteorg/flytesnacks/master/examples/basics/basics/hello_world.py hello_world_wf - # Go to http://localhost:30080/console/projects/flytesnacks/domains/development/executions/f5c17e1b5640c4336bf8 to see execution in the console. - -How to setup dev environment for flyteconsole? -********************************************** - -**1. Set up local Flyte cluster.** - -Depending on your needs, refer to one of the following guides to setup up the Flyte cluster: - -- If you do not need to change the backend code, refer to the section on `How to Set Up a Dev Environment for Flytekit? <#how-to-setup-dev-environment-for-flytekit>`__ -- If you need to change the backend code, refer to the section on `How to setup dev environment for flyteidl, flyteadmin, flyteplugins, flytepropeller, datacatalog and flytestdlib? <#how-to-setup-dev-environment-for-flyteidl-flyteadmin-flyteplugins-flytepropeller-datacatalog-and-flytestdlib>`__ - - -**2. Start flyteconsole.** - - -.. code:: shell - - # Step1: Clone the repo and navigate to the Flyteconsole folder - git clone https://github.com/flyteorg/flyteconsole.git - cd flyteconsole - - # Step2: Install Node.js 18. Refer to https://github.com/nodesource/distributions/blob/master/README.md#using-ubuntu-2. - curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash - &&\ - sudo apt-get install -y nodejs - - # Step3: Install yarn. Refer to https://classic.yarnpkg.com/lang/en/docs/install/#debian-stable. - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - - echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list - sudo apt update && sudo apt install yarn - - # Step4: Add environment variables - export BASE_URL=/console - export ADMIN_API_URL=http://localhost:30080 - export DISABLE_AUTH=1 - export ADMIN_API_USE_SSL="http" - - # Step5: Generate SSL certificate - # Note, since we will use HTTP, SSL is not required. However, missing an SSL certificate will cause an error when starting Flyteconsole. - make generate_ssl - - # Step6: Install node packages - yarn install - yarn build:types # It is fine if seeing error `Property 'at' does not exist on type 'string[]'` - yarn run build:prod - - # Step7: Start flyteconsole - yarn start - -**3. Install the Chrome plugin:** `Moesif Origin & CORS Changer `__. - - -We need to disable `CORS `__ to load resources. - -:: - - 1. Activate plugin (toggle to "on") - 2. Open 'Advanced Settings': - 3. set Access-Control-Allow-Credentials: true - -**4. Go to** http://localhost:3000/console/. - - -How to access Flyte UI, minio, postgres, k3s, and endpoints? -************************************************************************* - - -This section presumes a local Flyte cluster is already setup. If it isn't, refer to either: - -- `How to setup dev environment for flytekit? <#how-to-setup-dev-environment-for-flytekit>`__ -- `How to setup dev environment for flyteidl, flyteadmin, flyteplugins, flytepropeller, datacatalog and flytestdlib? <#how-to-setup-dev-environment-for-flyteidl-flyteadmin-flyteplugins-flytepropeller-datacatalog-and-flytestdlib>`__ - - -**1. Access the Flyte UI.** - - -`Flyte UI `__ is a web-based user interface for Flyte that lets you interact with Flyte objects and build directed acyclic graphs (DAGs) for your workflows. - -You can access it via http://localhost:30080/console. - -**2. Access the minio console.** - - -Core Flyte components, such as admin, propeller, and datacatalog, as well as user runtime containers rely on an object store (in this case, minio) to hold files. -During development, you might need to examine files such as `input.pb/output.pb `__, or `deck.html `__ stored in minio. - -Access the minio console at: http://localhost:30080/minio/login. The default credentials are: - -- Username: ``minio`` -- Password: ``miniostorage`` - - -**3. Access the postgres.** - - -FlyteAdmin and datacatalog use postgres to store persistent records, and you can interact with postgres on port ``30001``. Here is an example of using `psql` to connect: - -.. code:: shell - - # Step1: Install the PostgreSQL client. - sudo apt-get update - sudo apt-get install postgresql-client - - # Step2: Connect to the PostgreSQL server. The password is "postgres". - psql -h localhost -p 30001 -U postgres -d flyte - - -**4. Access the k3s dashboard.** - - -Access the k3s dashboard at: http://localhost:30080/kubernetes-dashboard. - -**5. Access the endpoints.** - - -Service endpoints are defined in the `flyteidl` repository under the `service` directory. You can browse them at `here `__. - -For example, the endpoint for the `ListTaskExecutions `__ API is: - -.. code:: shell - - /api/v1/task_executions/{node_execution_id.execution_id.project}/{node_execution_id.execution_id.domain}/{node_execution_id.execution_id.name}/{node_execution_id.node_id} - -You can access this endpoint at: - -.. code:: shell - - # replace with your specific task execution parameters - http://localhost:30080/api/v1/task_executions/flytesnacks/development/fe92c0a8cbf684ad19a8/n0?limit=10000 - - - - - - -🐞 File an issue -================ - -We use `GitHub Issues `__ for issue tracking. The following issue types are available for filing an issue: - -* `Plugin Request `__ -* `Bug Report `__ -* `Documentation Bug/Update Request `__ -* `Core Feature Request `__ -* `Flytectl Feature Request `__ -* `Housekeeping `__ -* `UI Feature Request `__ - -If none of the above fit your requirements, file a `blank `__ issue. -Also, add relevant labels to your issue. For example, if you are filing a Flytekit plugin request, add the ``flytekit`` label. - -For feedback at any point in the contribution process, feel free to reach out to us on `Slack `__. diff --git a/rsts/community/index.rst b/rsts/community/index.rst deleted file mode 100644 index c2ee55ae23..0000000000 --- a/rsts/community/index.rst +++ /dev/null @@ -1,127 +0,0 @@ -.. _community: - -########## -Community -########## - -Flyte is an ambitious open source project and would not be possible without an -amazing community. We are a completely open community and strive to treat -every member with respect. You will find the community welcoming and responsive! - -Please join us on: - -.. image:: https://img.shields.io/badge/Slack-Chat-pink?style=for-the-badge - :target: https://slack.flyte.org - :alt: Flyte Slack - -.. image:: https://img.shields.io/badge/Github-Discussion-green?style=for-the-badge - :target: https://github.com/flyteorg/flyte/discussions - :alt: Github Discussion - -.. image:: https://img.shields.io/badge/Twitter-Social-blue?style=for-the-badge - :target: https://twitter.com/flyteorg - :alt: Twitter - -.. image:: https://img.shields.io/badge/LinkedIn-Social-lightblue?style=for-the-badge - :target: https://www.linkedin.com/groups/13962256 - :alt: LinkedIn - - -Open Source Community Meeting ------------------------------ - -When: every other Tuesday, 9:00 AM Pacific Time. -You're welcome to join and learn from other community members sharing their experiences with Flyte or any other technology from the AI ecosystem. -Check out the event details and add it to your `calendar `_, or just pop in! - -.. image:: https://img.shields.io/badge/Join-Zoom-blue?style=for-the-badge - :target: https://www.addevent.com/event/EA7823958 - :alt: Zoom Link - -Office Hours ------------- - -`Book a 30 minutes session `_ with a Flyte maintainer and get your questions answered! - -Schedule your session depending on the topic to secure the availability of a maintainer with expertise in the area: - -- **7:00a.m. PT**: - - Anything flytekit-related - - Flyte releases - - flytepropeller features - - Plugin implementation - - Platform configuration -- **1:00p.m. PT**: - - Flyte deployment, auth -- **9:00p.m. PT**: - - Flytekit-related - - Use cases - - Getting started (workflow onboarding) - - Integrations - - -Newsletter ----------- - -`Join the Flyte mailing list `_ to receive the monthly newsletter - - -Slack guidelines ------------------ - -Flyte strives to build and maintain an open, inclusive, productive and self-governing open source community. In consequence, -we expect all community members to respect the following guidelines: - -Abide by the `LF's Code of Conduct `__ -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -As a Linux Foundation project, we must enforce the rules that govern professional and positive open source communities. - -Avoid using DMs and @mentions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Whenever possible, post your questions and responses in public channels so other Community Members can benefit from the conversation and outcomes. -Exceptions to this are when you need to share private or sensible information. In such a case, the outcome should still be shared publicly. -Limit the use of @mentions of other Community Members to be considerate of notification noise. - -Make use of threads -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Threads help us keep conversations contained and organized, reducing the time it takes to give you the support you need. - -Thread best practices: - -- Don't break your question into multiple messages. Put everything in one. -- For long questions, write a few sentences in the first message, and put the rest in a thread. -- If there's a code snippet (more than 5 lines of code), put it inside the thread. -- Avoid using the “Also send to channel” feature unless it's really necessary. -- If your question contains multiple questions, make sure to break them into multiple messages, so each could be answered in a separate thread. - - -Do not post the same question across multiple channels -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -If you consider that question needs to be shared on other channels, ask it once and then indicate explicitly that you're cross-posting. - -If you're having a tough time getting the support you need (or aren't sure where to go!), please DM @David Espejo(he/him) or @Samhita Alla for support. - -Do not solicit members of our Slack -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The Flyte Community exists to collaborate with, learn from, and support one another. It is not a space to pitch your products or services directly to our members via public channels, private channels, or direct messages. - -We are excited to have a growing presence from vendors to help answer questions from Community Members as they may arise, but we have a strict 3-strike policy against solicitation: - -- First occurrence: We'll give you a friendly but public reminder that the behavior is inappropriate according to our guidelines. -- Second occurrence: We'll send you a DM warning that any additional violations will result in removal from the community. -- Third occurrence: We'll delete or ban your account. - -We reserve the right to ban users without notice if they are clearly spamming our Community Members. - -If you want to promote a product or service, go to the #shameless-promotion channel and make sure to follow these rules: - -- Don't post more than two promotional posts per week -- Non-relevant topics aren't allowed - -Messages that don't follow these rules will be deleted. - - diff --git a/rsts/community/roadmap.rst b/rsts/community/roadmap.rst deleted file mode 100644 index 3e6bc3f5ae..0000000000 --- a/rsts/community/roadmap.rst +++ /dev/null @@ -1,145 +0,0 @@ -.. _community_roadmap: - -############### -Roadmap -############### - -How the Community Works -======================= -Flyte is actively used in production at multiple companies. We pride ourselves on being extremely customer-focused, and care about providing a high quality customer experience. We therefore always -prioritize stability, reliability, observability and maintainability over raw feature development. - -Features are usually developed in response to specific use cases and user scenarios. That being said, we are proactively thinking about the evolution of the system and how we want to keep adapting to changing requirements. Thus most of our changes reflect future development scenarios, and in -cases where we feel rapid prototyping would enable us to discover potential pitfalls or uncover hidden use cases, we would proactively develop features behind feature flags. - -It is extremely important to let the community know about your use cases, so that we adapt parts of Flyte to meet those requirements. We welcome collaboration and contributions, but please follow our `Contribution Guidelines `_. The quarterly planning meeting is also hosted publicly, please see more below. - - -Milestones and Release Processes -================================ -Flyte consists of many components and services. Each service is independently iterated and coordinated by maintaining backwards compatible contracts using Protobuf messages defined in `FlyteIDL `__. - -Release Cadence ---------------- -We aim to release Flyte quarterly, with the understanding that rather than being tied strictly to the calendar, we aim to have substantial features, improvements, and bug fixes at each quarter. If features slated for a given release are delayed, then the release will be delayed as well. The increased time will also give the Flyte development team more time to beta test each feature and release. - -Versioning Scheme ------------------ -*Please keep in mind the CI work to implement this scheme is still in progress* - -At each quarterly release, major components of Flyte and the Flyte repository itself will be released with an incremented minor version number and the version number will be aligned across those components. The major version number will remain ``1`` for the foreseeable future. That is, if the current version of Flyte is ``1.2.x``, the next release will be ``1.3.0`` for Flyte and the major components. - -After each version is released, merges to master will be assigned beta releases of the next release version. That is, if ``flytepropeller`` version ``v1.2.0`` was just released, the next merge to master will be tagged ``v1.3.0b0``. - -Not strictly forcing a time-constraint on the Flyte release cycle means that if a substantial number of changes is merged, perhaps due to a security issue or just a rapid pace of feature development, we can always bring up the timeline of the release. - -Components with versions aligned -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -* Propeller -* Admin -* Console -* datacatalog -* flytectl -* flytesnacks -* Flytekit -* flytekit-java - -The last two we are going to tie together for now, but realize that we may want to unpin in the future. - -Components versioned independently -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -* flyteidl -* flytestdlib -* flyteplugins -* flytecopilot - -Helm Charts -^^^^^^^^^^^ -Helm charts deserve a special mention here. Unlike the other components which will have patch versions that differ, the Flyte release version and the Helm chart version will always be identical down to the patch. That is, a Flyte release is a Helm release and vice-versa. - -Release Branches and Patching ------------------------------ -After each minor release, a release branch will be created. There will be no alignment of patch versions across the components. That is, by the end of the ``1.3.x`` release cycle, ``flyteadmin`` may be on ``1.3.8`` and ``flytepropeller`` may be on ``1.3.2``. - -When developing bug fixes, by default we will continue to develop off of master, which will not be the stable branch. After such bug fixes are merged, it will be the responsibility of the developer to ensure that the patches are also applied to prior releases. At the current time, we propose only supporting one release back (two for security patches). That is, if ``flytepropeller`` has a bug fix that results in ``v1.3.0b0`` that patch will be applied to the ``v1.2.x`` release, but not the ``v1.1.x`` release. - -Beta Patch Releases -^^^^^^^^^^^^^^^^^^^ -We also propose that beta patch versions be merged into the release branch when patching prior releases. For example, assuming no patches have yet to be made to the ``v1.2.0`` release, when porting a bug fix that resulted in ``v1.3.0b0`` onto the ``release-v1.2`` branch, the developer can first release ``v1.2.1b0`` for testing into ``release-v1.2`` before releasing the ``v1.2.1`` release. Such beta releases should be made at the discretion of the developer. - -Whether or not a patch version of any of the Flyte components also creates a Flyte patch release shall also be left to the discretion of the developer. - -Documentation Versioning ------------------------- -We also currently have an issue with our documentation versioning. While our readthedocs page does have versioning enabled and we publish the [docs version](https://github.com/flyteorg/flyte/blob/80c098f10334b1c916d1e4274ab9f204152d9d80/rsts/conf.py#L33), all the [intersphinx mappings](https://github.com/flyteorg/flyte/blob/80c098f10334b1c916d1e4274ab9f204152d9d80/rsts/conf.py#L219) just point to `latest`. Keep in mind that this mapping not only exists in this `flyte` repo, but also in all the other repos that that mapping points to. That is, to maintain an accurate mapping of different versions of documentation, we'll need to update the mapping in all the repos. - -To remediate this, we propose the following: - -* Documentation should be pinned only to Major.Minor on all the repos that have their versions "aligned". - - * This means that as we release patch versions of Admin, Propeller, etc., if we're on v1.1 for instance, as Admin code/auto-generated documentation changes, the v1.1 listing of readthedocs will automatically pick it up. -* Repos that are not aligned will just default to the "latest" documentation version. - -Planning Process -================ - -Quarterly Planning ------------------- -Members of the community should feel free to join these! Core members of the Flyte team will come prepared with general initiatives in mind. We will use these meetings to prioritize these ideas, assess community interest and impact, and decide what goes into the GitHub milestone for the next release. Members of the community looking to contribute should also join. Please look for this meeting invite on the calendar - it may not be set up as a recurring meeting simply because it will likely change by a few days each quarter. - -Change Management ------------------- -To ensure that changes are trackable and the history is explainable, we use a slightly cumbersome but helpful process, with the following immediate goals: -- Every PR is associated with an issue (automatic searchable documentation) -- Large PRs are associated with Proposals -- Every major change is associated with documentation -- Owner files exist for all repositories - -Issue Lifecycle ---------------- -- Incoming issues are tagged automatically as untriaged. -- Periodically, members of the Flyte community will meet to triage incoming issues. We aim to do this on a weekly basis. -- During this meeting we'll attempt to assign each issue to a milestone. Some issues however will need to be investigated before we can fully assess. -- Once an issue is assigned to a milestone, this means we are committed to delivering it that release. This means the burden for adding something to the milestone is relatively high. Issues that slip should only slip for good reason. - -Browse Features and Issues -============================ - -Issues by Theme ----------------- - -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Theme | Description | Open Issues | Comment | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Bugs | Currently known and open bugs. | `Bugs `_ | We are always working on bugs. Open a new one `here `_. | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Security | Issues related to security enhancements. | `Security issues `_ | | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Docs | All issues open with our documentation | `Docs issues `_ | Starting Feb 2021, we will be completely overhauling our docs. Feedback appreciated! | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Features | All new features in development | `Features issues `_ | | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Plugins | New capabilities and plugins that are built into Flyte. | `Plugins issues `_ | This is one of the best places to get started contributing to Flyte. Issues with both | -| | These could be hosted services, K8s native execution, etc. | | `plugins` and `flytekit` labels refer to purely client-side plugins and are the fastest to contribute to. | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Scale | These issues deal with performance, reliability and | `Scale issues `_ | We are always working on these issues and we would love to hear feedback about what you | -| | scalability of Flyte | | would want to change or what we should prioritize. | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ -| Contribute | If you are looking to contribute and want a great first issue, | `Contribute issues `_ | These are the best issues to get started with. | -| | check out these issues | | | -+-------------+----------------------------------------------------------------+---------------------------------------------------------------------------------------+--------------------------------------------------------------------------------------------------------------+ - - -Issues by Components ---------------------- - -+---------------+---------------------------------------+------------------------------------------------------------------------+ -| Theme | Description | Open Issues | -+===============+=======================================+========================================================================+ -| Flyte Console | Issues concerning our web UI. | `Flyte Console issues `_ | -+---------------+---------------------------------------+------------------------------------------------------------------------+ -| Flytectl | Issues concerning our standalone CLI. | `Flytectl issues `_ | -+---------------+---------------------------------------+------------------------------------------------------------------------+ - -For an overview of what we're currently working on, check out our `live roadmap `__. - diff --git a/rsts/community/troubleshoot.rst b/rsts/community/troubleshoot.rst deleted file mode 100644 index b4f6c271d4..0000000000 --- a/rsts/community/troubleshoot.rst +++ /dev/null @@ -1,135 +0,0 @@ -.. _troubleshoot: - -===================== -Troubleshooting Guide -===================== - -.. tags:: Troubleshoot, Basic - -The content in this section will help Flyte users isolate the most probable causes for some of the common issues that could arise while getting started with the project. - -Before getting started, collect the following information from the underlying infrastructure: - -- Capture the ``Status`` column from the output of: - -.. prompt:: bash $ - - $ kubectl describe pod -n - -Where will typically correspond to the node execution string that you can find in the UI. - -- Pay close attention to the `Events` section in the output. -- Also, collect the logs from the Pod: - -.. prompt:: bash $ - - $ kubectl logs pods -n - -Where will typically correspond to the Flyte -, e.g. flytesnacks-development. - -Depending on the contents of the logs or the `Events`, you can try different things: - -Debugging common execution errors ----------------------------------- - -``message: '0/1 nodes are available: 1 Insufficient cpu. preemption: 0/1 nodes are available: 1 No preemption victims found for incoming pod.'`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This issue is more common on MacOS devices. Make sure that your Docker daemon has allocated a minimum of 4 CPU cores and 3GB of RAM - -``terminated with exit code (137). Reason [OOMKilled]`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- For single binary environment deployed with Helm chart, make sure you are using `the most recent charts `_ - -- For EKS deployments, you cand adjust resource limits and requests in the `inline `_ section of the ``eks-production.yaml`` file. Example: - -.. code-block:: yaml - - inline: - task_resources: - defaults: - cpu: 100m - memory: 100Mi - storage: 100Mi - limits: - memory: 1Gi - -- Also, the default container resource limits are can be overridden from the task itself: - -.. code-block:: python - - from flytekit import Resources, task - @task(limits=Resources(mem="256Mi") - def your_task(... - -``Error: ImagePullBackOff`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- If your environment requires the use of a network proxy use the ``--env`` option when starting the sandbox and pass the proxy configuration: - -.. prompt:: bash $ - - $ flytectl demo start --env HTTP_PROXY= - -- If you're building a custom Docker image, make sure to use a tag other than ``latest``. Otherwise, the Kubernetes default pull policy will be changed from ``IfNotPresent`` to ``Always``, forcing an image pull with every Pod deployment. - -Issues running workloads -------------------------- - -``OPENSSL_internal:WRONG_VERSION_NUMBER`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- For ``flyte-binary``: make sure that the endpoint name you have set in your ``config.yaml`` file, is included in the DNS names of the SSL certificate installed (be it self signed or issued by a Certificate Authority) -- For ``sandbox``: verify the ``FLYTECTL_CONFIG`` environment variable has the correct value by running: - -.. prompt:: bash $ - - $ export FLYTECTL_CONFIG=~/.flyte/config-sandbox.yaml - -``ModuleNotFoundError`` -^^^^^^^^^^^^^^^^^^^^^^^ - -- If you're using a custom container image and using Docker, make sure your ``Dockerfile`` is located at the same level of the ``flyte`` directory and that there is an empty ``__init__.py`` file in your project's folder : - -.. prompt:: bash $ - - myflyteapp - ├── Dockerfile - ├── docker_build_and_tag.sh - ├── flyte - │ ├── __init__.py - │ └── workflows - │ ├── __init__.py - │ └── example.py - └── requirements.txt - -``An error occurred (AccessDenied) when calling the PutObject operation`` in an EKS deployment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -- Make sure that the Kubernetes service account Flyte is using has the annotation that refers to the IAM Role is connected to: - -.. prompt:: bash $ - - $ kubectl describe sa -n - -Example output: - -.. prompt:: bash $ - - Name: - Namespace: flyte - Labels: app.kubernetes.io/managed-by=eksctl - Annotations: eks.amazonaws.com/role-arn: arn:aws:iam:::role/flyte-system-role - Image pull secrets: - Mountable secrets: - Tokens: - Events: - -- Otherwise, obtain your IAM role's ARN and manually annotate the service account: - -.. prompt:: bash $ - - $ kubectl annotate serviceaccount -n eks.amazonaws.com/role-arn=arn:aws:iam::xxxx:role/ - -- Refer to this community-maintained `guides `_ for further information about Flyte deployment on EKS diff --git a/rsts/concepts/admin.rst b/rsts/concepts/admin.rst deleted file mode 100644 index 3a64c7e2a0..0000000000 --- a/rsts/concepts/admin.rst +++ /dev/null @@ -1,500 +0,0 @@ -.. _divedeep-admin: - -########## -FlyteAdmin -########## - -.. tags:: Advanced, Design - -Admin Structure -=============== - -FlyteAdmin serves as the main Flyte API to process all client requests to the system. Clients include the FlyteConsole, which calls: - -1. FlyteAdmin to list the workflows, get execution details, etc. -2. Flytekit, which in turn calls FlyteAdmin to register, launch workflows, etc. - -Below, we'll dive into each component defined in admin in more detail. - -RPC ---- - -FlyteAdmin uses the `grpc-gateway `__ library to serve incoming gRPC and HTTP requests with identical handlers. -Refer to the admin service :std:ref:`definition ` for a detailed API overview, including request and response entities. -The RPC handlers are thin shims that enforce request structure validation and call out to the appropriate :ref:`manager ` methods to process requests. - -You can find a detailed explanation of the service in the :ref:`admin service ` page. - -.. _divedeep-admin-manager: - -Managers --------- - -The Admin API is broken up into entities: - -- Executions -- Launch plans -- Node Executions -- Projects (and their respective domains) -- Task Executions -- Tasks -- Workflows - -Each API entity has an entity manager in FlyteAdmin responsible for implementing business logic for the entity. -Entity managers handle full validation of creating, updating and getting requests and -data persistence in the backing store (see the :ref:`divedeep-admin-repository` section). - - -Additional Components -+++++++++++++++++++++ - -The managers utilize additional components to process requests. These additional components include: - -- :ref:`workflow engine `: compiles workflows and launches workflow executions from launch plans. -- :ref:`data ` (remote cloud storage): offloads data blobs to the configured cloud provider. -- :ref:`runtime `: loads values from a config file to assign task resources, initialization values, execution queues, and more. -- :ref:`async processes `: provides functions to schedule and execute the workflows as well as enqueue and trigger notifications. - -.. _divedeep-admin-repository: - -Repository ----------- -Serialized entities (tasks, workflows, launch plans) and executions (workflow-, node- and task-) are stored as protos defined -`here `__. -We use the excellent `gorm `__ library to interface with our database, which currently supports a Postgres -implementation. You can find the actual code for issuing queries with gorm in the -`gormimpl `__ directory. - -Models -++++++ -Database models are defined in the `models `__ directory and correspond 1:1 with the database tables [0]_. - -The full set of database tables includes: - -- executions -- execution_events -- launch_plans -- node_executions -- node_execution_events -- tasks -- task_executions -- workflows - -These database models inherit primary keys and indexes as defined in the corresponding `models `__ file. - -The repositories code also includes `transformers `__. -These convert entities from the database format to a response format for the external API. -If you change either of these structures, you must change the corresponding transformers too. - - -.. _divedeep-admin-async: - -Component Details -================= - -This section dives into the details of each top-level directory defined in ``pkg/``. - -Asynchronous Components ------------------------ - -Notifications and schedules are handled by async routines that are responsible for enqueuing and subsequently processing dequeued messages. - -FlyteAdmin uses the `gizmo toolkit `__ to abstract queueing implementation. Gizmo's -`pubsub `__ library offers implementations for Amazon SNS/SQS, Google Pubsub, Kafka topics, and publishing over HTTP. - -For the sandbox development, no-op implementations of the notifications and schedule handlers are used to remove external cloud dependencies. - - -Common ------- - -As the name implies, ``common`` houses shared components used across different FlyteAdmin components in a single, top-level directory to avoid cyclic dependencies. These components include execution naming and phase utils, query filter definitions, query sorting definitions, and named constants. - -.. _divedeep-admin-data: - -Data ------ - -Data interfaces are primarily handled by the `storage `__ library implemented in ``flytestdlib``. However, neither this nor the underlying `stow `__ library expose `HEAD `__ support. Hence, the data package in admin exists as the layer responsible for additional, remote data operations. - -Errors ------- - -The errors directory contains centrally defined errors that are designed for compatibility with gRPC statuses. - -.. _divedeep-admin-config: - -Runtime -------- -Values specific to the FlyteAdmin application, including task, workflow registration, and execution are configured in the `runtime `__ directory. These interfaces expose values configured in the ``flyteadmin`` top-level key in the application config. - -.. _divedeep-admin-workflowengine: - -Workflow engine ----------------- - -This directory contains the interfaces to build and execute workflows leveraging FlytePropeller compiler and client components. - -.. [0] Given the unique naming constraints, some models are redefined in `migration_models `__ to guarantee unique index values. - -.. _divedeep-admin-service: - - -FlyteAdmin Service Background -============================= - -Entities ---------- - -The :std:ref:`admin service definition ` defines REST operations for the entities that -FlyteAdmin administers. - -As a refresher, the primary :ref:`entities ` across Flyte maps to FlyteAdmin entities. - -Static entities -+++++++++++++++ - -These include: - -- Workflows -- Tasks -- Launch Plans - -Permitted operations include: - -- Create -- Get -- List - -The above entities are designated by an :std:ref:`identifier ` -that consists of a project, domain, name, and version specification. These entities are, for the most part, immutable. To update one of these entities, the updated -version must be re-registered with a unique and new version identifier attribute. - -One caveat is that the launch plan can toggle between :std:ref:`ACTIVE and INACTIVE ` states. -At a given point in time, only one launch plan version across a shared {Project, Domain, Name} specification can be active. The state affects the scheduled launch plans only. -An inactive launch plan can be used to launch individual executions. However, only an active launch plan runs on a schedule (given it has a schedule defined). - - -Static entities metadata (Named Entities) -+++++++++++++++++++++++++++++++++++++++++ - -A :std:ref:`named entity ` includes metadata for one of the above entities -(workflow, task or launch plan) across versions. It also includes a resource type (workflow, task or launch plan) and an -:std:ref:`id ` which is composed of project, domain and name. -The named entity also includes metadata, which are mutable attributes about the referenced entity. - -This metadata includes: - -- Description: a human-readable description for the Named Entity collection. -- State (workflows only): this determines whether the workflow is shown on the overview list of workflows scoped by project and domain. - -Permitted operations include: - -- Create -- Update -- Get -- List - - -Execution entities -++++++++++++++++++ - -These include: - -- (Workflow) executions -- Node executions -- Task executions - -Permitted operations include: - -- Create -- Get -- List - -After an execution begins, FlytePropeller monitors the execution and sends the events which the admin uses to update the above executions. - -These :std:ref:`events ` include - -- WorkflowExecutionEvent -- NodeExecutionEvent -- TaskExecutionEvent - -and contain information about respective phase transitions, phase transition time and optional output data if the event concerns a terminal phase change. - -These events provide the **only** way to update an execution. No raw update endpoint exists. - -To track the lifecycle of an execution, admin and store attributes such as `duration` and `timestamp` at which an execution transitioned to running and end time are used. - -For debugging purposes, admin also stores Workflow and Node execution events in its database, but does not currently expose them through an API. Because array tasks can yield many executions, admin does **not** store TaskExecutionEvents. - - -Platform entities -+++++++++++++++++ -Projects: Like named entities, projects have mutable metadata such as human-readable names and descriptions, in addition to their unique string ids. - -Permitted project operations include: - -- Register -- List - -.. _divedeep-admin-matchable-resources: - -Matchable resources -+++++++++++++++++++ - -A thorough background on :ref:`matchable resources ` explains -their purpose and application logic. As a summary, these are used to override system level defaults for Kubernetes cluster -resource management, default execution values, and more across different levels of specificity. - -These entities consist of: - -- ProjectDomainAttributes -- WorkflowAttributes - -``ProjectDomainAttributes`` configure customizable overrides at the project and domain level, and ``WorkflowAttributes`` configure customizable overrides at the project, domain and workflow level. - -Permitted attribute operations include: - -- Update (implicitly creates if there is no existing override) -- Get -- Delete - - -Defaults --------- - -Task resource defaults -++++++++++++++++++++++ - -User-facing documentation on configuring task resource requests and limits can be found in :std:ref:`cookbook:customizing task resources`. - -As a system administrator you may want to define default task resource requests and limits across your Flyte deployment. -This can be done through the flyteadmin config. - -**Default** values get injected as the task requests and limits when a task definition omits a specific resource. -**Limit** values are only used as validation. Neither a task request nor limit can exceed the limit for a resource type. - - -Using the Admin Service ------------------------ - -Adding request filters -++++++++++++++++++++++ - -We use `gRPC Gateway `_ to reverse proxy HTTP requests into gRPC. -While this allows for a single implementation for both HTTP and gRPC, an important limitation is that fields mapped to the path pattern cannot be -repeated and must have a primitive (non-message) type. Unfortunately this means that repeated string filters cannot use a proper protobuf message. Instead, they use -the internal syntax shown below:: - - func(field,value) or func(field, value) - -For example, multiple filters would be appended to an http request like:: - - ?filters=ne(version, TheWorst)+eq(workflow.name, workflow) - -Timestamp fields use the ``RFC3339Nano`` spec (For example: "2006-01-02T15:04:05.999999999Z07:00") - -The fully supported set of filter functions are - -- contains -- gt (greater than) -- gte (greter than or equal to) -- lt (less than) -- lte (less than or equal to) -- eq (equal) -- ne (not equal) -- value_in (for repeated sets of values) - -"value_in" is a special case where multiple values are passed to the filter expression. For example:: - - value_in(phase, RUNNING;SUCCEEDED;FAILED) - -.. note:: - If you're issuing your requests over http(s), be sure to URL encode the ";" semicolon using ``%3B`` like so: ``value_in(phase, RUNNING%3BSUCCEEDED%3BFAILED)`` - -Filterable fields vary based on entity types: - -- Task - - - project - - domain - - name - - version - - created_at - -- Workflow - - - project - - domain - - name - - version - - created_at - -- Launch plans - - - project - - domain - - name - - version - - created_at - - updated_at - - workflows.{any workflow field above} (for example: workflow.domain) - - state (you must use the integer enum, for example: 1) - - States are defined in :std:ref:`launchplanstate `. - -- Named Entity Metadata - - - state (you must use the integer enum, for example: 1) - - States are defined in :std:ref:`namedentitystate `. - -- Executions (Workflow executions) - - - project - - domain - - name - - workflow.{any workflow field above} (for example: workflow.domain) - - launch_plan.{any launch plan field above} (for example: launch_plan.name) - - phase (you must use the upper-cased string name, for example: ``RUNNING``) - - Phases are defined in :std:ref:`workflowexecution.phase `. - - execution_created_at - - execution_updated_at - - duration (in seconds) - - mode (you must use the integer enum, for example: 1) - - Modes are defined in :std:ref:`executionmode `. - - user (authenticated user or role from flytekit config) - -- Node Executions - - - node_id - - execution.{any execution field above} (for example: execution.domain) - - phase (you must use the upper-cased string name, for example: ``QUEUED``) - - Phases are defined in :std:ref:`nodeexecution.phase `. - - started_at - - node_execution_created_at - - node_execution_updated_at - - duration (in seconds) - -- Task Executions - - - retry_attempt - - task.{any task field above} (for example: task.version) - - execution.{any execution field above} (for example: execution.domain) - - node_execution.{any node execution field above} (for example: node_execution.phase) - - phase (you must use the upper-cased string name, for example: ``SUCCEEDED``) - - Phases are defined in :std:ref:`taskexecution.phase `. - - started_at - - task_execution_created_at - - task_execution_updated_at - - duration (in seconds) - -Putting It All Together ------------------------ - -If you wish to query specific executions that were launched using a specific launch plan for a workflow with specific attributes, use: - -:: - - gte(duration, 100)+value_in(phase,RUNNING;SUCCEEDED;FAILED)+eq(lauch_plan.project, foo) - +eq(launch_plan.domain, bar)+eq(launch_plan.name, baz) - +eq(launch_plan.version, 1234) - +lte(workflow.created_at,2018-11-29T17:34:05.000000000Z07:00) - - - -Adding sorting to requests -++++++++++++++++++++++++++ - -Only a subset of fields are supported for sorting list queries. The explicit list is shown below: - -- ListTasks - - - project - - domain - - name - - version - - created_at - -- ListTaskIds - - - project - - domain - -- ListWorkflows - - - project - - domain - - name - - version - - created_at - -- ListWorkflowIds - - - project - - domain - -- ListLaunchPlans - - - project - - domain - - name - - version - - created_at - - updated_at - - state (you must use the integer enum, for example: 1) - - States are defined in :std:ref:`launchplanstate `. - -- ListWorkflowIds - - - project - - domain - -- ListExecutions - - - project - - domain - - name - - phase (you must use the upper-cased string name, for example: ``RUNNING``) - - Phases are defined in :std:ref:`workflowexecution.phase `. - - execution_created_at - - execution_updated_at - - duration (in seconds) - - mode (you must use the integer enum, for example: 1) - - Modes are defined :std:ref:`execution.proto `. - -- ListNodeExecutions - - - node_id - - retry_attempt - - phase (you must use the upper-cased string name, for example: ``QUEUED``) - - Phases are defined in :std:ref:`nodeexecution.phase `. - - started_at - - node_execution_created_at - - node_execution_updated_at - - duration (in seconds) - -- ListTaskExecutions - - - retry_attempt - - phase (you must use the upper-cased string name, for example: ``SUCCEEDED``) - - Phases are defined in :std:ref:`taskexecution.phase `. - - started_at - - task_execution_created_at - - task_execution_updated_at - - duration (in seconds) - -Sorting syntax --------------- - -Adding sorting to a request requires specifying the ``key``. For example: The attribute you wish to sort on. Sorting can also optionally specify the direction (one of ``ASCENDING`` or ``DESCENDING``) where ``DESCENDING`` is the default. - -Example sorting HTTP parameter: - -:: - - sort_by.key=created_at&sort_by.direction=DESCENDING - -Alternatively, since ``DESCENDING`` is the default sorting direction, the above could be written as - -:: - - sort_by.key=created_at diff --git a/rsts/concepts/architecture.rst b/rsts/concepts/architecture.rst deleted file mode 100644 index 062523146c..0000000000 --- a/rsts/concepts/architecture.rst +++ /dev/null @@ -1,167 +0,0 @@ -.. _divedeep-architecture-overview: - -###################### -Component Architecture -###################### - -.. tags:: Advanced, Glossary, Design - -This document aims to demystify how Flyte's major components ``Flyteidl``, ``Flytekit``, ``Flytectl``, ``FlyteConsole``, ``FlyteAdmin``, ``FlytePropeller``, and ``FlytePlugins`` fit together at a high level. - -FlyteIDL -======== - -In Flyte, entities like "Workflows", "Tasks", "Launch Plans", and "Schedules" are recognized by multiple system components. For components to communicate effectively, they need a shared understanding about the structure of these entities. - -Flyteidl (Interface Definition Language) is where shared Flyte entities are defined. It also defines the RPC service definition for the :std:ref:`core Flyte API `. - -Flyteidl uses the `protobuf `_ schema to describe entities. Clients are generated for Python, Golang, and JavaScript and imported by Flyte components. - - -Planes -====== - -Flyte components are separated into 3 logical planes. The planes are summarized and explained in detail below. The goal is that these planes can be replaced by alternate implementations. - -+-------------------+---------------------------------------------------------------------------------------------------------------+ -| **User Plane** | The User Plane consists of all user tools that assist in interacting with the core Flyte API. | -| | These tools include the FlyteConsole, Flytekit, and Flytectl. | -+-------------------+---------------------------------------------------------------------------------------------------------------+ -| **Control Plane** | The Control Plane implements the core Flyte API. | -| | It serves all client requests coming from the User Plane. | -| | It stores information such as current and past running workflows, and provides that information upon request. | -| | It also accepts requests to execute workflows, but offloads the work to the Data Plane. | -+-------------------+---------------------------------------------------------------------------------------------------------------+ -| **Data Plane** | The sole responsibility of the the Data Plane is to fulfill workflows. | -| | It accepts workflow requests from the Control Plane and guides the workflow to completion, | -| | launching tasks on a cluster of machines as necessary based on the workflow graph. | -| | It sends status events back to the control plane so the information can be stored and surfaced to end-users. | -+-------------------+---------------------------------------------------------------------------------------------------------------+ - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/architecture/flyte-logical-architecture.png - -User Plane ----------- - -In Flyte, workflows are represented as a Directed Acyclic Graph (DAG) of tasks. While this representation is logical for services, managing workflow DAGs in this format is a tedious exercise for humans. The Flyte User Plane provides tools to create, manage, and visualize workflows in a format that is easily digestible to the users. - -These tools include: - -Flytekit - Flytekit is an SDK that helps users design new workflows using the Python programming language. It can parse the Python code, compile it into a valid Workflow DAG, and submit it to Flyte for execution. - -FlyteConsole - FlyteConsole provides the Web interface for Flyte. Users and administrators can use the console to view workflows, launch plans, schedules, tasks, and individual task executions. The console provides tools to visualize workflows, and surfaces relevant logs for debugging failed tasks. - -Flytectl - Flytectl provides interactive access to Flyte to launch and access workflows via terminal. - - -Control Plane -------------- - -The Control Plane supports the core REST/gRPC API defined in Flyteidl. User Plane tools like FlyteConsole and Flytekit contact the control plane on behalf of users to store and retrieve information. - -Currently, the entire control plane is handled by a single service called **FlyteAdmin**. - -FlyteAdmin is stateless. It processes requests to create entities like tasks, workflows, and schedules by persisting data in a relational database. - -While FlyteAdmin serves the Workflow Execution API, it does not itself execute workflows. To launch workflow executions, FlyteAdmin sends the workflow DAG to the DataPlane. For added scalability and fault-tolerance, FlyteAdmin can be configured to load-balance workflows across multiple isolated data-plane clusters. - - -Data Plane ----------- - -The Data Plane is the engine that accepts DAGs, and fulfills workflow executions by launching tasks in the order defined by the graph. Requests to the Data Plane generally come via the control plane, and not from end-users. - -In order to support compute-intensive workflows at massive scale, the Data Plane needs to launch containers on a cluster of machines. The current implementation leverages `Kubernetes `_ for cluster management. - -Unlike the user-facing Control Plane, the Data Plane does not expose a traditional REST/gRPC API. To launch an execution in the Data Plane, you create a “flyteworkflow” resource in Kubernetes. -A “flyteworkflow” is a Kubernetes `Custom Resource `_ (CRD) created by our team. This custom resource represents the Flyte workflow DAG. - -The core state machine that processes flyteworkflows is the worker known as **FlytePropeller**. - -FlytePropeller leverages the Kubernetes `operator pattern `_. It polls the Kubernetes API, looking for newly created flyteworkflow resources. FlytePropeller understands the workflow DAG, and launches the appropriate Kubernetes pods as needed to complete tasks. It periodically checks for completed tasks, launching downstream tasks until the workflow is complete. - -**Plugins** - -Each task in a flyteworkflow DAG has a specified **type**. The logic for fulfilling a task is determined by its task type. -In the basic case, FlytePropeller launches a single Kubernetes pod to fulfill a task. -Complex task types require workloads to be distributed across hundreds of pods. - -The type-specific task logic is separated into isolated code modules known as **plugins**. -Each task type has an associated plugin that is responsible for handling tasks of its type. -For each task in a workflow, FlytePropeller activates the appropriate plugin based on the task type in order to fulfill the task. - -The Flyte team has pre-built plugins for Hive, Spark, AWS Batch, and :ref:`more `. -To support new use-cases, developers can create their own plugins and bundle them in their FlytePropeller deployment. - -Component Code Architecture -=========================== - -.. panels:: - :container: container-lg pb-4 - :column: col-lg-12 p-2 - :body: text-center - - .. link-button:: flytepropeller-architecture - :type: ref - :text: FlytePropeller - :classes: btn-block stretched-link - - --- - - .. link-button:: native-scheduler-architecture - :type: ref - :text: Flyte Native Scheduler - :classes: btn-block stretched-link - -Component Code References -========================= - -.. panels:: - :container: container-lg pb-4 - :column: col-lg-12 p-2 - :body: text-center - - .. link-button:: https://pkg.go.dev/mod/github.com/flyteorg/flyteadmin - :type: url - :text: FlyteAdmin - :classes: btn-block stretched-link - - --- - - .. link-button:: https://pkg.go.dev/mod/github.com/flyteorg/flytepropeller - :type: url - :text: FlytePropeller - :classes: btn-block stretched-link - - --- - - .. link-button:: https://pkg.go.dev/mod/github.com/flyteorg/datacatalog - :type: url - :text: DataCatalog - :classes: btn-block stretched-link - - --- - - .. link-button:: https://pkg.go.dev/mod/github.com/flyteorg/flyteplugins - :type: url - :text: FlytePlugins - :classes: btn-block stretched-link - - --- - - .. link-button:: https://pkg.go.dev/github.com/flyteorg/flyteadmin/scheduler - :type: url - :text: Flyte Native Scheduler - :classes: btn-block stretched-link - - -.. toctree:: - :maxdepth: 1 - :name: component code architecture - :hidden: - - component_architecture/flytepropeller_architecture - component_architecture/native_scheduler_architecture diff --git a/rsts/concepts/basics.rst b/rsts/concepts/basics.rst deleted file mode 100644 index 9ebf154761..0000000000 --- a/rsts/concepts/basics.rst +++ /dev/null @@ -1,155 +0,0 @@ -.. _divedeep: - -############# -Concepts -############# - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: divedeep-tasks - :type: ref - :text: Tasks - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - A **Task** is any independent unit of processing. Tasks can be pure functions or functions with side-effects. - Each definition of a task also has associated configurations and requirements specifications. - - --- - - .. link-button:: divedeep-workflows - :type: ref - :text: Workflows - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Workflows** are programs that are guaranteed to eventually reach a terminal state and are represented as - Directed Acyclic Graphs (DAGs) expressed in protobuf. - - --- - - .. link-button:: divedeep-nodes - :type: ref - :text: Nodes - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - A **Node** is an encapsulation of an instance of a Task. Nodes represent the unit of work, where multiple Nodes are - interconnected via workflows. - - --- - - .. link-button:: divedeep-launchplans - :type: ref - :text: Launch Plans - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Launch Plans** provide a mechanism to specialize input parameters for workflows associated with different schedules. - - --- - - .. link-button:: concepts-schedules - :type: ref - :text: Scheduling Launch Plans - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Scheduling** is critical to data and ML jobs. Flyte provides a native Cron-style scheduler. - - --- - - .. link-button:: divedeep-registration - :type: ref - :text: Registration - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Registration** is the process of uploading a workflow and its task definitions to the FlyteAdmin service. - Registration creates an inventory of available tasks, workflows and launch plans, declared per project and domain. - - --- - - .. link-button:: divedeep-executions - :type: ref - :text: Executions - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Executions** are instances of workflows, nodes or tasks created in the system as a result of a user-requested - execution or a scheduled execution. - - --- - - .. link-button:: divedeep-state-machine - :type: ref - :text: State Machine for an Execution - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The various states an **Execution** passes through. - - --- - - .. link-button:: divedeep-execution-timeline - :type: ref - :text: Life of an Execution - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - How an **Execution** progresses through the FlytePropeller execution engine and the timeline. - - --- - - .. link-button:: divedeep-data-management - :type: ref - :text: How Flyte Manages Data - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - What is **metadata**? How are large amounts of **raw data** handled? How does data flow between tasks? - - --- - - .. link-button:: ui - :type: ref - :text: Flyte UI Walkthrough - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - A quick overview of the **FlyteConsole**. - - --- - - .. link-button:: divedeep-catalog - :type: ref - :text: Platform-wide Memoization/Caching - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - A deeper dive into **memoization** and the mechanics of memoization in Flyte. - - --- - - .. link-button:: divedeep-versioning - :type: ref - :text: Workflow & Task Versioning - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - A deeper dive into one of Flyte's most important features: versioning of workflows and tasks. - - -The diagram below shows how inputs flow through tasks and workflows to produce outputs. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/basics/flyte_wf_tasks_high_level.png - - -.. toctree:: - :maxdepth: 1 - :name: Core Concepts - :hidden: - - tasks - workflows - nodes - launchplans - schedules - registration - executions - state_machine - execution_timeline - data_management - flyte_console - catalog - versioning - workflow_lifecycle diff --git a/rsts/concepts/catalog.rst b/rsts/concepts/catalog.rst deleted file mode 100644 index 8b092e73c0..0000000000 --- a/rsts/concepts/catalog.rst +++ /dev/null @@ -1,63 +0,0 @@ -.. _divedeep-catalog: - -What is Data Catalog? -===================== - -.. tags:: Advanced, Design - -`DataCatalog `__ is a service to index parameterized, strongly-typed data artifacts across revisions. It allows clients to query artifacts based on meta information and tags. - - -How Flyte Memoizes Task Executions on Data Catalog --------------------------------------------------- - -Flyte `memoizes task executions` by creating artifacts in DataCatalog and associating meta information regarding the execution with the artifact. Let's walk through what happens when a task execution is cached on DataCatalog. - -Every task instance is represented as a DataSet: - -.. code-block:: javascript - - Dataset { - project: Flyte project the task was registered in - domain: Flyte domain for the task execution - name: flyte_task- - version: -- - } - -Every task execution is represented as an Artifact in the Dataset above: - -.. code-block:: javascript - - Artifact { - id: uuid - Metadata: [executionName, executionVersion] - ArtifactData: [List of ArtifactData] - } - - - ArtifactData { - Name: - value: - } - -To retrieve the Artifact, tag the Artifact with a hash of the input values for the memoized task execution: - -.. code-block:: javascript - - ArtifactTag { - Name: flyte_cached- - } - -When caching an execution, FlytePropeller will: - -1. Create a dataset for the task. -2. Create an artifact that represents the execution, along with the artifact data that represents the execution output. -3. Tag the artifact with a unique hash of the input values. - -To ensure that the task execution is memoized, Flyte Propeller will: - -1. Compute the tag by computing the hash of the input. -2. Check if a tagged artifact exists with that hash. - - - If it exists, we have a cache hit and the Propeller can skip the task execution. - - If an artifact is not associated with the tag, Propeller needs to run the task. diff --git a/rsts/concepts/component_architecture/flytepropeller_architecture.rst b/rsts/concepts/component_architecture/flytepropeller_architecture.rst deleted file mode 100644 index a04f6dbe4d..0000000000 --- a/rsts/concepts/component_architecture/flytepropeller_architecture.rst +++ /dev/null @@ -1,81 +0,0 @@ -.. _flytepropeller-architecture: - -########################### -FlytePropeller Architecture -########################### - -.. tags:: Advanced, Design - -.. note:: - In the frame of this document, we use the term “workflow” to describe the single execution of a workflow definition. - -Introduction -============ - -A Flyte :ref:`workflow ` is represented as a Directed Acyclic Graph (DAG) of interconnected Nodes. Flyte supports a robust collection of Node types to ensure diverse functionality. - -- ``TaskNodes`` support a plugin system to externally add system integrations. -- Control flow can be altered during runtime using ``BranchNodes``, which prune downstream evaluation paths based on input. -- ``DynamicNodes`` add nodes to the DAG. -- ``WorkflowNodes`` allow embedding workflows within each other. - -FlytePropeller is responsible for scheduling and tracking execution of Flyte workflows. It is implemented using a K8s controller and adheres to the established K8s design principles. In this scheme, resources are periodically evaluated and the goal is to transition from the observed state to a requested state. - -In our case, workflows are the resources and they are iteratively evaluated to transition from the current state to success. During each loop, the current workflow state is established as the phase of workflow nodes and subsequent tasks, and FlytePropeller performs operations to transition this state to success. The operations may include scheduling (or rescheduling) node executions, evaluating dynamic or branch nodes, etc. These design decisions ensure that FlytePropeller can scale to manage a large number of concurrent workflows without performance degradation. - -This document attempts to break down the FlytePropeller architecture by tracking workflow life cycle through each internal component. Below is a high-level illustration of the FlytePropeller architecture and a flow chart of each component's responsibilities during FlyteWorkflow execution. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/architecture/flytepropeller_architecture.png - -Components -========== - -FlyteWorkflow CRD / K8s Integration ------------------------------------ - -Workflows in Flyte are maintained as Custom Resource Definitions (CRDs) in Kubernetes, which are stored in the backing etcd cluster. Each execution of a workflow definition results in the creation of a new FlyteWorkflow CR (Custom Resource) which maintains a state for the entirety of processing. CRDs provide variable definitions to describe both resource specifications (spec) and status' (status). The FlyteWorkflow CRD uses the spec subsection to detail the workflow DAG, embodying node dependencies, etc. The status subsection tracks workflow metadata including overall workflow status, node/task phases, status/phase transition timestamps, etc. - -K8s exposes a powerful controller/operator API that enables entities to track creation/updates over a specific resource type. FlytePropeller uses this API to track FlyteWorkflows, meaning every time an instance of the FlyteWorkflow CR is created/updated, the FlytePropeller instance is notified. FlyteAdmin is the common entry point, where initialization of FlyteWorkflow CRs may be triggered by user workflow definition executions, automatic relaunches, or periodically scheduled workflow definition executions. However, it is conceivable to manually create FlyteWorkflow CRs, but this will have limited visibility and usability. - -WorkQueue/WorkerPool ----------------------- - -FlytePropeller supports concurrent execution of multiple, unique workflows using a WorkQueue and WorkerPool. - -The WorkQueue is a FIFO queue storing workflow ID strings that require a lookup to retrieve the FlyteWorkflow CR to ensure up-to-date status. A workflow may be added to the queue in a variety of circumstances: - -#. A new FlyteWorkflow CR is created or an existing instance is updated -#. The K8s Informer resyncs the FlyteWorkflow periodically (necessary to detect workflow timeouts and ensure liveness) -#. A FlytePropeller worker experiences an error during a processing loop -#. The WorkflowExecutor observes a completed downstream node -#. A NodeHandler observes state change and explicitly enqueues its owner (For example, K8s pod informer observes completion of a task) - -The WorkerPool is implemented as a collection of goroutines, one for each worker. Using this lightweight construct, FlytePropeller can scale to 1000s of workers on a single CPU. Workers continually poll the WorkQueue for workflows. On success, the workflow is executed (passed to WorkflowExecutor). - -WorkflowExecutor ----------------- - -The WorkflowExecutor is responsible for handling high-level workflow operations. This includes maintaining the workflow phase (for example: running, failing, succeeded, etc.) according to the underlying node phases and administering pending cleanup operations. For example, aborting existing node evaluations during workflow failures or removing FlyteWorkflow CRD finalizers on completion to ensure the CR is deleted. Additionally, at the conclusion of each evaluation round, the WorkflowExecutor updates the FlyteWorkflow CR with updated metadata fields to track the status between evaluation iterations. - -NodeExecutor ------------- - -The NodeExecutor is executed on a single node, beginning with the workflow's start node. It traverses the workflow using a visitor pattern with a modified depth-first search (DFS), evaluating each node along the path. A few examples of node evaluation based on phase: successful nodes are skipped, unevaluated nodes are queued for processing, and failed nodes may be reattempted up to a configurable threshold. There are many configurable parameters to tune evaluation criteria including max parallelism which restricts the number of nodes which may be scheduled concurrently. Additionally, nodes may be retried to ensure recoverability on failure. - -The NodeExecutor is also responsible for linking data readers/writers to facilitate data transfer between node executions. The data transfer process occurs automatically within Flyte, using efficient K8s events rather than a polling listener pattern which incurs more overhead. Relatively small amounts of data may be passed between nodes inline, but it is more common to pass data URLs to backing storage. A component of this is writing to and checking the data cache, which facilitates the reuse of previously completed evaluations. - -NodeHandlers ------------- - -FlytePropeller includes a robust collection of NodeHandlers to support diverse evaluation of the workflow DAG: - -* **TaskHandler (Plugins)**: These are responsible for executing plugin specific tasks. This may include contacting FlyteAdmin to schedule K8s pod to perform work, calling a web API to begin/track evaluation, and much more. The plugin paradigm exposes an extensible interface for adding functionality to Flyte workflows. -* **DynamicHandler**: Flyte workflow CRs are initialized using a DAG compiled during the registration process. The numerous benefits of this approach are beyond the scope of this document. However, there are situations where the complete DAG is unknown at compile time. For example, when executing a task on each value of an input list. Using Dynamic nodes, a new DAG subgraph may be dynamically compiled during runtime and linked to the existing FlyteWorkflow CR. -* **WorkflowHandler**: This handler allows embedding workflows within another workflow definition. The API exposes this functionality using either (1) an inline execution, where the workflow function is invoked directly resulting in a single FlyteWorkflow CR with an appended sub-workflow, or (2) a launch plan, which uses a TODO to create a separate sub-FlyteWorkflow CR whose execution state is linked to the parent FlyteWorkflow CR. -* **BranchHandler**: The branch handler allows the DAG to follow a specific control path based on input (or computed) values. -* **Start / End Handlers**: These are dummy handlers which process input and output data and in turn transition start and end nodes to success. - -FlyteAdmin Events ------------------ - -It should be noted that the WorkflowExecutor, NodeExecutor, and TaskHandlers send events to FlyteAdmin, enabling it to track workflows in near real-time. diff --git a/rsts/concepts/component_architecture/native_scheduler_architecture.rst b/rsts/concepts/component_architecture/native_scheduler_architecture.rst deleted file mode 100644 index 19f13ef6c7..0000000000 --- a/rsts/concepts/component_architecture/native_scheduler_architecture.rst +++ /dev/null @@ -1,77 +0,0 @@ -.. _native-scheduler-architecture: - -################################### -Flyte Native Scheduler Architecture -################################### - -.. tags:: Advanced, Design - -Introduction -============ -Any workflow engine needs functionality to support scheduled executions. Flyte -fulfills this using an in-built native scheduler, which schedules fixed rate and -cron-based schedules. The workflow author specifies the schedule during the -:ref:`launchplan creation ` -and :ref:`activates or deactivates ` -the schedule using the -:ref:`admin APIs ` -exposed for the launch plan. - -Characteristics -=============== - -#. Cloud provider independent -#. Standard `cron `__ support -#. Independently scalable -#. Small memory footprint -#. Schedules run as lightweight goroutines -#. Fault tolerant and available -#. Support in sandbox environment - - -Components -========== - -Schedule Management -------------------- - -This component supports creation/activation and deactivation of schedules. Each schedule is tied to a launch plan and is versioned in a similar manner. The schedule is created or its state is changed to activated/deactivated whenever the `admin API `__ is invoked for it with `ACTIVE/INACTIVE state `__. This is done either through `flytectl `__ or through any other client that calls the GRPC API. -The API is similar to a launchplan, ensuring that only one schedule is active for a given launchplan. - - -Scheduler ---------- - -This component is a singleton and is responsible for reading the schedules from the DB and running them at the cadence defined by the schedule. The lowest granularity supported is `minutes` for scheduling through both cron and fixed rate schedulers. The scheduler can run in one replica, two at the most during redeployment. Multiple replicas will only duplicate the work, since each execution for a scheduleTime will have a unique identifier derived from the schedule name and the time of the schedule. The idempotency aspect of the admin for the same identifier prevents duplication on the admin side. The scheduler runs continuously in a loop reading the updated schedule entries in the data store and adding or removing the schedules. Removing a schedule will not alter the in-flight goroutines launched by the scheduler. Thus, the behavior of these executions is undefined. - - -Snapshoter -********** - -This component is responsible for writing the snapshot state of all schedules at a regular cadence to a persistent store. It uses a DB to store the GOB format of the snapshot, which is versioned. The snapshot is a map[string]time.Time, which stores a map of schedule names to their last execution times. During bootup, the snapshot is bootstrapped from the data store and loaded into memory. The Scheduler uses this snapshot to schedule any missed schedules. - -CatchupAll-System -***************** -This component runs at bootup and catches up all the schedules to current time, i.e., time.Now(). New runs for the schedules are sent to the admin in parallel. -Any failure in catching up is considered a hard failure and stops the scheduler. The rerun tries to catchup from the last snapshot of data. - -GOCronWrapper -************* - -This component is responsible for locking in the time for the scheduled job to be invoked and adding those to the cron scheduler. It is a wrapper around `this framework `__ for fixed rate and cron schedules that creates in-memory representation of the scheduled job functions. The scheduler schedules a function with scheduleTime parameters. When this scheduled function is invoked, the scheduleTime parameters provide the current schedule time used by the scheduler. This scheduler supports standard cron scheduling which has 5 `fields `__. It requires 5 entries representing ``minute``, ``hour``, ``day of month``, ``month`` and ``day of week``, in that order. - -Job Executor -************ - -The job executor component is responsible for sending the scheduled executions to FlyteAdmin. The job function accepts ``scheduleTime`` and the schedule which is used to create an execution request to the admin. Each job function is tied to the schedule which is executed in a separate goroutine in accordance with the schedule cadence. - -Monitoring ----------- - -To monitor the system health, the following metrics are published by the native scheduler: - -#. JobFuncPanicCounter : count of crashes of the job functions executed by the scheduler. -#. JobScheduledFailedCounter : count of scheduling failures by the scheduler. -#. CatchupErrCounter : count of unsuccessful attempts to catchup on the schedules. -#. FailedExecutionCounter : count of unsuccessful attempts to fire executions of a schedule. -#. SuccessfulExecutionCounter : count of successful attempts to fire executions of a schedule. diff --git a/rsts/concepts/console.rst b/rsts/concepts/console.rst deleted file mode 100644 index d872f8990c..0000000000 --- a/rsts/concepts/console.rst +++ /dev/null @@ -1,128 +0,0 @@ -.. _divedeep-console: - -############ -FlyteConsole -############ - -.. tags:: Intermediate, Contribute - -FlyteConsole is the web UI for the Flyte platform. Here's a video that dives into the graph UX: - -.. youtube:: 7YSc-QHk_Ec - -********************* -Running FlyteConsole -********************* - -===================== -Install Dependencies -===================== -Running FlyteConsole locally requires `NodeJS `_ and -`yarn `_. Once these are installed, all of the dependencies -can be installed by running ``yarn`` in the project directory. - -====================== -Environment Variables -====================== -Before we can run the server, we need to set up an environment variable or two. - -``ADMIN_API_URL`` (default: `window.location.origin `_) - -FlyteConsole displays information fetched from the FlyteAdmin API. This -environment variable specifies the host prefix used in constructing API requests. - -.. NOTE:: - This is only the host portion of the API endpoint, consisting of the - protocol, domain, and port (if not using the standard 80/443). - -This value will be combined with a suffix (such as ``/api/v1``) to construct the -final URL used in an API request. - -**Default Behavior** - -In most cases, ``FlyteConsole`` is hosted in the same cluster as the Admin -API, meaning that the domain used to access the console is the same as that used to -access the API. For this reason, if no value is set for ``ADMIN_API_URL``, the -default behavior is to use the value of `window.location.origin`. - - -**``BASE_URL`` (default: ``undefined``)** - -This allows running the console at a prefix on the target host. This is -necessary when hosting the API and console on the same domain (with prefixes of -``/api/v1`` and ``/console`` for example). For local development, this is -usually not needed, so the default behavior is to run without a prefix. - - -**``CORS_PROXY_PREFIX`` (default: ``/cors_proxy``)** - -Sets the local endpoint for `CORS request proxying `_. - -=============== -Run the Server -=============== - -To start the local development server, run ``yarn start``. This will spin up a -Webpack development server, compile all of the code into bundles, and start the -NodeJS server on the default port (3000). All requests to the NodeJS server will -be stalled until the bundles have finished. The application will be accessible -at http://localhost:3000 (if using the default port). - -************ -Development -************ - -========== -Storybook -========== - -FlyteConsole uses `Storybook `__. -Component stories live next to the components they test in the ``__stories__`` -directory with the filename pattern ``{Component}.stories.tsx``. - -You can run storybook with ``npm run storybook``, and view the stories at http://localhost:9001. - -============================= -Protobuf and the Network tab -============================= - -Communication with the FlyteAdmin API is done using Protobuf as the -request/response format. Protobuf is a binary format, which means looking at -responses in the Network tab won't be helpful. To make debugging easier, -each network request is logged to the console with its URL, followed by the -decoded Protobuf payload. You must have debug output enabled (on by default in -development) to see these messages. - -============ -Debug Output -============ - -This application makes use of the `debug `_ -library to provide namespaced debug output in the browser console. In -development, all debug output is enabled. For other environments, the debug -output must be enabled manually. You can do this by setting a flag in -localStorage using the console: ``localStorage.debug = 'flyte:*'``. Each module in -the application sets its own namespace. So if you'd like to only view output for -a single module, you can specify that one specifically -(ex. ``localStorage.debug = 'flyte:adminEntity'`` to only see decoded Flyte -Admin API requests). - -.. _cors-proxy: - -============== -CORS Proxying -============== - -In the common hosting arrangement, all API requests are made to the same origin -serving the client application, making CORS unnecessary. For any requests which -do not share the same ``origin`` value, the client application will route -requests through a special endpoint on the NodeJS server. One example would be -hosting the Admin API on a different domain than the console. Another example is fetching execution data from external storage such as S3. This is done to -minimize the extra configuration required for ingress to the Admin API -and data storage, as well as to simplify local development of the console without -the need to grant CORS access to ``localhost``. - -The requests and responses are piped through the NodeJS server with minimal -overhead. However, it is still recommended to host the Admin API and console on -the same domain to prevent unnecessary load on the NodeJS server and extra -latency on API requests due to the additional hop. diff --git a/rsts/concepts/control_plane.rst b/rsts/concepts/control_plane.rst deleted file mode 100644 index 16347e6389..0000000000 --- a/rsts/concepts/control_plane.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. _control-plane: - -################ -Control Plane -################ - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: divedeep-projects - :type: ref - :text: Projects - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Projects** are a multi-tenancy primitive in Flyte that allow logical grouping of Flyte workflows and tasks, which - often correspond to source code repositories. - - --- - - .. link-button:: divedeep-domains - :type: ref - :text: Domains - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Domains** enable workflows to be executed in different environments, with separate resource isolation and feature - configuration. - - --- - - .. link-button:: divedeep-admin - :type: ref - :text: FlyteAdmin - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **FlyteAdmin** is the backend that serves the main Flyte API, processing all client requests to the system. - - --- - - .. link-button:: divedeep-console - :type: ref - :text: Flyte Console - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - **Flyte Console** is the web UI for the Flyte platform. - - -.. toctree:: - :maxdepth: 1 - :hidden: - - projects - domains - admin - console - dynamic_spec diff --git a/rsts/concepts/data_management.rst b/rsts/concepts/data_management.rst deleted file mode 100644 index 0d4edbd0a8..0000000000 --- a/rsts/concepts/data_management.rst +++ /dev/null @@ -1,176 +0,0 @@ -.. _divedeep-data-management: - -################################# -Understand How Flyte Handles Data -################################# - -.. tags:: Basic, Glossary, Design - -Types of Data -============= - -There are two parts to the data in Flyte: - -1. Metadata - -* It consists of data about inputs to a task, and other artifacts. -* It is configured globally for FlytePropeller, FlyteAdmin etc., and the running pods/jobs need access to this bucket to get the data. - -2. Raw data - -* It is the actual data (such as the Pandas DataFrame, Spark DataFrame, etc.). -* Raw data paths are unique for every execution, and the prefixes can be modified per execution. -* None of the Flyte control plane components would access the raw data. This provides great separation of data between the control plane and the data plane. - -.. note: - Metadata and raw data can be present in entirely separate buckets. - - -Let us consider a simple Python task: - -.. code-block:: python - - @task - def my_task(m: int, n: str, o: FlyteFile) -> pd.DataFrame: - ... - -In the above code sample, ``m``, ``n``, ``o`` are inputs to the task. -``m`` of type ``int`` and ``n`` of type ``str`` are simple primitive types, while ``o`` is an arbitrarily sized file. -All of them from Flyte's point of view are ``data``. -The difference lies in how Flyte stores and passes each of these data items. - -For every task that receives input, Flyte sends an **Inputs Metadata** object, which contains all the primitive or simple scalar values inlined, but in the case of -complex, large objects, they are offloaded and the `Metadata` simply stores a reference to the object. In our example, ``m`` and ``n`` are inlined while -``o`` and the output ``pd.DataFrame`` are offloaded to an object store, and their reference is captured in the metadata. - -`Flytekit TypeTransformers` make it possible to use complex objects as if they are available locally - just like persistent filehandles. But Flyte backend only deals with -the references. - -Thus, primitive data types and references to large objects fall under Metadata - `Meta input` or `Meta output`, and the actual large object is known as **Raw data**. -A unique property of this separation is that all `meta values` are read by FlytePropeller engine and available on the FlyteConsole or CLI from the control plane. -`Raw` data is not read by any of the Flyte components and hence it is possible to store it in a completely separate blob storage or alternate stores, which can't be accessed by Flyte control plane components -but can be accessed by users's container/tasks. - -Raw Data Prefix -~~~~~~~~~~~~~~~ - -Every task can read/write its own data files. If ``FlyteFile`` or any natively supported type like ``pandas.DataFrame`` is used, Flyte will automatically offload and download -data from the configured object-store paths. These paths are completely customizable per `LaunchPlan` or `Execution`. - -- The default Rawoutput path (prefix in an object store like S3/GCS) can be configured during registration as shown in :std:ref:`flytectl_register_files`. - The argument ``--outputLocationPrefix`` allows us to set the destination directory for all the raw data produced. Flyte will create randomized folders in this path to store the data. -- To override the ``RawOutput`` path (prefix in an object store like S3/GCS), you can specify an alternate location when invoking a Flyte execution, as shown in the following screenshot of the LaunchForm in FlyteConsole: - - .. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/data_movement/launch_raw_output.png - -- In the sandbox, the default Rawoutput-prefix is configured to be the root of the local bucket. Hence Flyte will write all the raw data (reference types like blob, file, df/schema/parquet, etc.) under a path defined by the execution. - - -Metadata -~~~~~~~~ - -Metadata in Flyte is critical to enable the passing of data between tasks. It allows to perform in-memory computations for branches or send partial outputs from one task to another or compose outputs from multiple tasks into one input to be sent to a task. - -Thus, metadata is restricted due to its omnipresence. Each `meta output`/`input` cannot be larger than 1MB. If you have `List[int]`, it cannot be larger than 1MB, considering other input entities. In scenarios where large lists or strings need to be sent between tasks, file abstraction is preferred. - -``LiteralType`` & Literals -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -SERIALIZATION TIME -^^^^^^^^^^^^^^^^^^ - -When a task is declared with inputs and outputs, Flyte extracts the interface of the task and converts it to an internal representation called a :std:ref:`ref_flyteidl.core.typedinterface`. -For each variable, a corresponding :std:ref:`ref_flyteidl.core.literaltype` is created. - -For example, the following Python function's interface is transformed as follows: - -.. code-block:: python - - @task - def my_task(a: int, b: str) -> FlyteFile: - """ - Description of my function - - :param a: My input integer - :param b: My input string - :return: My output file - """ - ... - -.. code-block:: - - interface { - inputs { - variables { - key: "a" - value { - type { - simple: INTEGER - } - description: "My input Integer" - } - } - variables { - key: "b" - value { - type { - simple: STRING - } - description: "My input string" - } - } - } - outputs { - variables { - key: "o0" - value { - type { - blob { - } - } - description: "My output File" - } - } - } - } - - -RUNTIME -^^^^^^^ - -At runtime, data passes through Flyte using :std:ref:`ref_flyteidl.core.literal` where the values are set. -For files, the corresponding ``Literal`` is called ``LiteralBlob`` (:std:ref:`ref_flyteidl.core.blob`) which is a binary large object. -Many different objects can be mapped to the underlying `Blob` or `Struct` types. For example, an image is a Blob, a ``pandas.DataFrame`` is a Blob of type parquet, etc. - -Data Movement -============= - -Flyte is primarily a **DataFlow Engine**. It enables movement of data and provides an abstraction to enable movement of data between different languages. - -One implementation of Flyte is the current workflow engine. - -The workflow engine is responsible for moving data from a previous task to the next task. As explained previously, Flyte only deals with Metadata and not the actual Raw data. -The illustration below explains how data flows from engine to the task and how that is transferred between tasks. The medium to transfer the data can change, and will change in the future. -We could use fast metadata stores to speed up data movement or exploit locality. - -Between Flytepropeller and Tasks -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/data_movement/flyte_data_movement.png - - -Between Tasks -~~~~~~~~~~~~~~ - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/data_movement/flyte_data_transfer.png - - -Bringing in Your Own Datastores for Raw Data -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Flytekit has a pluggable data persistence layer. -This is driven by PROTOCOL. -For example, it is theoretically possible to use S3 ``s3://`` for metadata and GCS ``gcs://`` for raw data. It is also possible to create your own protocol ``my_fs://``, to change how data is stored and accessed. -But for Metadata, the data should be accessible to Flyte control plane. - -Data persistence is also pluggable. By default, it supports all major blob stores and uses an interface defined in Flytestdlib. diff --git a/rsts/concepts/domains.rst b/rsts/concepts/domains.rst deleted file mode 100644 index bb306924dd..0000000000 --- a/rsts/concepts/domains.rst +++ /dev/null @@ -1,13 +0,0 @@ -.. _divedeep-domains: - -Domains -======= - -.. tags:: Basic, Glossary - -Domains provide an abstraction to isolate resources and feature configuration for different -deployment environments. - -For example: We develop and deploy Flyte workflows in development, staging, and production. We configure Flyte domains with those names, and specify lower resource limits on the development and staging domains than production domains. - -We also use domains to disable launch plans and schedules from development and staging domains, since those features are typically meant for production deployments. \ No newline at end of file diff --git a/rsts/concepts/dynamic_spec.rst b/rsts/concepts/dynamic_spec.rst deleted file mode 100644 index 4e9e11ad3c..0000000000 --- a/rsts/concepts/dynamic_spec.rst +++ /dev/null @@ -1,50 +0,0 @@ -.. _divedeep-dynamic-spec: - -Dynamic Job Spec -================ - -.. tags:: Basic, Design - -A dynamic job spec is a subset of the entire workflow spec that defines a set of tasks, workflows, nodes, and output bindings that control how the job should assemble its outputs. - -This spec is currently only supported as an intermediate step in running Dynamic Tasks. - -.. code-block:: protobuf - :caption: Dynamic job spec in Protobuf - - message DynamicJobSpec { - repeated Node nodes = 1; - int64 min_successes = 2; - repeated Binding outputs = 3; - - repeated TaskTemplate tasks = 4; - repeated WorkflowTemplate subworkflows = 5; - } - -.. _divedeep-dynamic-tasks: - -Tasks ------ - -Defines one or more :ref:`Tasks ` that can then be referenced in the spec. - -.. _divedeep-dynamic-subworkflows: - -Subworkflows ------------- - -Defines zero or more :ref:`Workflows ` that can then be referenced in the spec. - -.. _divedeep-dynamic-nodes: - -Nodes ------ - -Defines one or more :ref:`Nodes ` that can run in parallel to produce the final outputs of the spec. - -.. _divedeep-dynamic-outputs: - -Outputs -------- - -Defines one or more binding that instructs engine on how to assemble the final outputs. \ No newline at end of file diff --git a/rsts/concepts/execution_timeline.rst b/rsts/concepts/execution_timeline.rst deleted file mode 100644 index 276930c94e..0000000000 --- a/rsts/concepts/execution_timeline.rst +++ /dev/null @@ -1,72 +0,0 @@ -.. _divedeep-execution-timeline: - -######################################## -Timeline of a workflow execution -######################################## - -.. tags:: Intermediate, Glossary - -The illustration below shows the timeline view of a workflow execution. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/monitoring/flyte_wf_timeline.svg?sanitize=true - - -The illustration above refers to a simple workflow, with 2 nodes N1 & N2. This can be represented as follows, - -.. mermaid:: - - graph LR; - Start --> N1; - N1 --> N2; - N2 --> End; - - -Acceptance Latency -==================== -Every workflow starts in the ``Acceptance`` phase. Acceptance refers to the time between FlyteAdmin receiving an execution request and FlytePropeller evaluating the first round of workflow. -Usually, within this phase, the K8s queuing latency is the largest contributor to latency where the overall acceptance latency of <5s is desirable. - -Transition Latency -=================== -Transition latency refers to the time between successive node executions, that is, between ``N1`` and ``N2``. For the first node ``N1``, this latency also encapsulates executing the start node. - -Similarly, the last node also encapsulates executing end node. ``Start Node`` and ``End Node`` are capstones inserted to mark the beginning and end of the DAG. - -The latency involves time consumed to: - -#. Gather outputs for a node after the node completes execution. -#. Send an observation event to FlyteAdmin. Failing to do so will be regarded as an error and will be tried until it succeeds or system max retries are exhausted (the number of max system retries is configured to be 30 by default and can be altered per deployment). -#. Persist data to Kubernetes. -#. Receive the persisted object back from Kubernetes (as this process is eventually consistent using informer caches). -#. Gather inputs for a node before the node starts. -#. Send a queued event for the next node to FlyteAdmin (this is what is persisted and drives the UI/CLI and historical information). - -Queuing Latency -================ -Queuing latency is the time taken by Kubernetes to start the pod, other services to start the job, HTTP throttle to be met, or any rate-limiting that needs to be overcome. This -is usually tied to the available resources and quota, and is out of control for Flyte. - -Completion Latency -=================== -Completion latency is the time taken to mark the workflow as complete and accumulate outputs of a workflow after the last node completes its execution. - - -Overview of Various Latencies in FlytePropeller -================================================= - -=================================== ================================================================================================================================== - Description of main events for workflow execution ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - Events Description -=================================== ================================================================================================================================== -Acceptance Measures the time between when we receive service call to create an Execution (Unknown) and when it has moved to Queued. -Transition Latency Measures the latency between two consecutive node executions, the time spent in Flyte engine. -Queuing Latency Measures the latency between the time a node's been queued to the time the handler reported the executable moved to running state. -Task Execution Actual time spent executing user code -Repeat steps 2-4 for every task -Transition Latency See #2 -Completion Latency Measures the time between when the WF moved to succeeding/failing state and when it finally moved to a terminal state. -=================================== ================================================================================================================================== - -.. note:: - **The core team is working on optimizing Completion Latency, Transition Latency, and Acceptance Latency.** \ No newline at end of file diff --git a/rsts/concepts/executions.rst b/rsts/concepts/executions.rst deleted file mode 100644 index b6ee602520..0000000000 --- a/rsts/concepts/executions.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. _divedeep-executions: - -########## -Executions -########## - -.. tags:: Basic, Glossary - -**Executions** are instances of workflows, nodes or tasks created in the system as a result of a user-requested execution or a scheduled execution. - -Typical Flow Using Flytectl ---------------------------- - -* When an execution of a workflow is triggered using UI/Flytecli/other stateless systems, the system first calls the ``getLaunchPlan`` endpoint and retrieves a launch plan matching the given version. The launch plan definition includes definitions of all input variables declared for the workflow. -* The user-side component then ensures that all the required inputs are supplied and requests the FlyteAdmin service for an execution. -* The FlyteAdmin service validates the inputs, ensuring that they are all specified and, if required, within the declared bounds. -* FlyteAdmin then fetches the previously validated and compiled workflow closure and translates it to an executable format with all the inputs. -* This executable workflow is launched on Kubernetes with an execution record in the database. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/executions/flyte_wf_execution_overview.svg?sanitize=true \ No newline at end of file diff --git a/rsts/concepts/flyte_console.rst b/rsts/concepts/flyte_console.rst deleted file mode 100644 index 8e9484789b..0000000000 --- a/rsts/concepts/flyte_console.rst +++ /dev/null @@ -1,232 +0,0 @@ -.. _ui: - -How to Use Flyte UI -=================== - -.. tags:: Basic, UI - -Flyte UI is a web-based user interface for Flyte. It helps interact with Flyte objects and builds DAGs out of your workflows. - -With Flyte UI, you can: - -* Launch tasks -* Launch workflows -* View Versioned Tasks and Workflows -* Trigger Versioned Tasks and Workflows -* Inspect Executions through Inputs, Outputs, Logs, and Graphs -* Clone Executions -* Relaunch Executions -* Recover Executions - -.. note:: - `FlyteConsole `__ hosts the Flyte user interface code. - -Launching Workflows -------------------- - -You can launch a workflow by clicking on the **Launch Workflow** button. Workflows are viewable after they are registered. -The UI should be accessible at http://localhost:30081/console. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/launch_execution_001.png - :alt: "Launch Workflow" button - - Launch a workflow using the "Launch Workflow" button. - -| - -The end-to-end process from writing code to registering workflows is present in the :std:ref:`getting-started`. - -A pop-up window appears with input fields that the execution requires upon clicking the **Launch Workflow** button. -If the default inputs are given, they will be auto-populated. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/launch_execution_002.png - :alt: Launch form - - A pop-up window appears after clicking the "Launch Workflow" button. - -| - -An execution can be terminated/aborted by clicking on the **Terminate** button. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/launch_execution_003.png - :alt: "Terminate" button - - Terminate an execution by clicking the "Terminate" button. - -| - -Launching Tasks ---------------- - -You can launch a task by clicking on the **Launch Task** button. Tasks are viewable after they are registered. -The UI should be accessible at http://localhost:30081/console. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/launch_task_001.png - :alt: "Launch Task" button - - Launch a task by clicking the "Launch Task" button. - -| - -A pop-up window appears with input fields that the task requires and the role with which the task has to run on clicking the **Launch Task** button. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/launch_task_002.png - :alt: Launch form - - A pop-up window appears on clicking the "Launch Task" button. - -| - -Viewing Versioned Tasks and Workflows -------------------------------------- - -Every registered Flyte entity is tagged with a version. All the registered versions of workflows and tasks are viewable in the UI. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/versioned_executions.png - :alt: Versioned workflows - - View versioned workflows. - -| - -Triggering Versioned Tasks and Workflows ----------------------------------------- - -Every registered Flyte entity is versioned and can be triggered anytime. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/trigger_versioned_executions.png - :alt: Trigger versioned workflows - - Trigger versioned workflows. - -| - -Inspecting Executions ---------------------- - -Executions can be inspected through the UI. Inputs and Outputs for every node and execution can be viewed. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/inspect_execution_001.png - :alt: Node's inputs and outputs - - View every execution node's inputs and outputs. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/inspect_execution_002.png - :alt: Execution's inputs and outputs - - View every execution's inputs and outputs. - -| - -Logs are accessible as well. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/inspect_execution_003.png - :alt: Logs - - View Kubernetes logs. - -| - -Every execution has two views: Nodes and Graph. - -A node in the nodes view encapsulates an instance of a task, but it can also contain an entire subworkflow or trigger an external workflow. -More about nodes can be found in :std:ref:`divedeep-nodes`. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/inspect_execution_004.png - :alt: Nodes - - Inspect execution's nodes in the UI. - -| - -Graph view showcases a static DAG. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/inspect_execution_005.png - :alt: DAG - - Inspect execution's DAG in the UI. - -| - -Cloning Executions ------------------- - -An execution in the ``RUNNING`` state can be cloned. - -Click on the ellipsis on the top right corner of the UI. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/clone_execution_001.png - :alt: Clone execution - - Step 1: Click on the ellipsis. - -| - -Click on the **Clone Execution** button. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/clone_execution_002.png - :alt: Clone execution - - Step 2: "Clone execution" button. - -| - -Relaunching Executions ----------------------- - -The **Relaunch** button allows you to relaunch a terminated execution with pre-populated inputs. -This option can be helpful to try out a new version of a Flyte entity. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/relaunch_execution.png - :alt: Relaunch an execution - - Relaunch an execution. - -| - -A pop-up window appears on clicking the relaunch button, allowing you to modify the version and inputs. - -Recovering Executions ---------------------- - -Recovery mode allows you to recover an individual execution by copying all successful node executions and running from the failed nodes. -The **Recover** button helps recover a failed execution. - -| - -.. figure:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/flyteconsole/recover_execution.png - :alt: Recover an execution - - Recover an execution. - -| diff --git a/rsts/concepts/flyte_wf_tasks_high_level.png b/rsts/concepts/flyte_wf_tasks_high_level.png deleted file mode 100644 index 83e987ee18..0000000000 Binary files a/rsts/concepts/flyte_wf_tasks_high_level.png and /dev/null differ diff --git a/rsts/concepts/launchplans.rst b/rsts/concepts/launchplans.rst deleted file mode 100644 index 2efb6af998..0000000000 --- a/rsts/concepts/launchplans.rst +++ /dev/null @@ -1,57 +0,0 @@ -.. _divedeep-launchplans: - -Launch plans -============ - -.. tags:: Basic, Glossary, Design - -Launch plans help execute workflows. A workflow can be associated with multiple launch plans and launch plan versions, but an individual launch plan is always associated with a single, specific workflow. After creating a launch plan, it is easy to share and execute them. - -Launch plans provide a way to templatize Flyte workflow invocations. Launch plans contain a set of bound workflow inputs that are passed as arguments to create an execution. Launch plans do not necessarily contain the entire set of required workflow inputs, but a launch plan is always necessary to trigger an execution. Additional input arguments can be provided at execution time to supplement launch plan static input values. - -In addition to templatized inputs, launch plans allow you to run your workflow on one or multiple schedules. Each launch -plan can optionally define a single schedule (which can be easily disabled by disabling the launch plan) as well as -optional notifications. Refer to the :ref:`deployment-configuration-notifications` for a deep dive into available notifications. - -The Association between Workflows and LaunchPlans -------------------------------------------------- - -Every workflow comes with a `default` launch plan that has the same name as that of a workflow. The default launch plan is authored (in code) as part of creating a new workflow. -A launch plan version can only ever be mapped to one workflow version; meaning a launch plan version cannot be used twice. This is because part of what makes a new launch plan version is the mapping to the specific workflow version. - -.. note:: - Users rarely interact with the default launch plan. - -Suppose we have ``Workflow A`` in ``version 1``, ``LaunchPlans`` ``A`` and ``B`` in ``version 1``, and ``LaunchPlan`` ``B`` in ``version 2``, then: - -1. ``Workflow A`` can be associated with ``LaunchPlan A`` (version 1); -2. ``Workflow A`` can be associated with ``LaunchPlan B`` (different launch plan name; version 1); -3. ``Workflow A`` can be associated with ``LaunchPlan B`` (version 2). - - -What do Launch Plans Provide? ------------------------------- - -- One click invocation of workflows with predefined inputs and friendly launch plan names. -- Multiple schedules with different default values for inputs per workflow. -- Ability to easily enable and disable schedules. -- Can be created dynamically with flyteclient or statically using the Flyte SDK. -- Associate different notifications with your workflows. -- Restrict inputs to be passed to the workflows at launch time using the :ref:`fixed_inputs ` parameter. -- Multiple versions of the launch plan (with same name) with only one active version. Schedule will reflect only on the active launch plan version. - -.. _concepts-launchplans-inputs: - -Launch plan inputs ------------------- -Generally launch plan inputs correspond to their related workflow definition's inputs, in that the variable type and names are expected to match. Launch plans cannot introduce any inputs not defined in the core workflow definition. However, launch plan inputs differ slightly from workflow inputs in that the former are categorized into **default inputs** and **fixed inputs**. - -Default Inputs -^^^^^^^^^^^^^^ -Default inputs behave much like default workflow inputs. As their name implies, default inputs provide default workflow input values at execution time in the absence of any dynamically provided values. - -.. _fixed_inputs: - -Fixed Inputs -^^^^^^^^^^^^ -Fixed inputs cannot be overridden. If a workflow is executed with a launch plan and dynamic inputs that attempt to redefine the launch plan's fixed inputs, the execution creation request *will fail*. diff --git a/rsts/concepts/nodes.rst b/rsts/concepts/nodes.rst deleted file mode 100644 index d67c15457c..0000000000 --- a/rsts/concepts/nodes.rst +++ /dev/null @@ -1,34 +0,0 @@ -.. _divedeep-nodes: - -Nodes -===== - -.. tags:: Basic, Glossary - -A node represents a unit of execution or work within a workflow. Ordinarily, a node encapsulates an instance of -a :ref:`task `, but it can also contain an entire subworkflow or trigger an external workflow. -Nodes can have inputs and outputs, which are used to coordinate task inputs and outputs. -Moreover, node outputs can be used as inputs to other nodes within a workflow. - -Tasks are always encapsulated within a node. Like tasks, nodes can come in a variety of flavors determined by their *target*. -These targets include :ref:`task nodes `, :ref:`workflow nodes `, and :ref:`branch nodes `. - -.. _divedeep-task-nodes: - -Task Nodes ----------- - -Tasks referenced in a workflow are always enclosed in nodes. This extends to all task types. -For example, an array task will be enclosed by a single node. - -.. _divedeep-workflow-nodes: - -Workflow Nodes --------------- -A node can contain an entire sub-workflow. Since workflow executions always require a launch plan, workflow nodes have a reference to a launch plan to trigger their enclosed workflows. - -.. _divedeep-branch-nodes: - -Branch Nodes ------------- -Branch nodes alter the flow of the workflow graph. Conditions at runtime are evaluated to determine the control flow. diff --git a/rsts/concepts/projects.rst b/rsts/concepts/projects.rst deleted file mode 100644 index 99ed0daf3f..0000000000 --- a/rsts/concepts/projects.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. _divedeep-projects: - -Projects -======== - -.. tags:: Basic, Glossary - -A project in Flyte is a group of :ref:`workflows ` and :ref:`tasks ` tied together to achieve a goal. - -A Flyte project can map to an engineering project or everything that's owned by a team or an individual. There cannot be multiple projects with the same name in Flyte. - -Since the fully-qualified name for tasks and workflows include the project and domain name, the task/workflow names are only required to be unique within a project. The workflows in a project ``A`` can refer to tasks and workflows in other projects using the fully-qualified name. - -Flyte allows users to set resource limits and provides basic reports and dashboards automatically for each project. The information captured in these reports includes workflow/task level insights, resource usage, and billing information. \ No newline at end of file diff --git a/rsts/concepts/registration.rst b/rsts/concepts/registration.rst deleted file mode 100644 index bc745f7a0f..0000000000 --- a/rsts/concepts/registration.rst +++ /dev/null @@ -1,33 +0,0 @@ -.. _divedeep-registration: - -############ -Registration -############ - -.. tags:: Basic, Glossary, Design - -During registration, Flyte validates the workflow structure and saves the workflow. The registration process also updates the workflow graph. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/concepts/executions/flyte_wf_registration_overview.svg?sanitize=true - -Typical Flow -------------- -The following steps elaborate on the specifics of the registration process: - -* Define the tasks using the :py:mod:`Flytekit ` Task Definition language. -* Define a workflow using the :py:mod:`Flytekit ` Workflow definition language. -* Use `flytectl register CLI `__ to compile the tasks into their serialized representation as described in :std:ref:`Flyte Specification language `. During this, the task representation is bound to a container that constitutes the code for the task. This associated entity is registered with FlyteAdmin using the registerTask API. -* Use flytectl register CLI to compile the workflow into their serialized representation as described in :std:ref:`Flyte Specification language `. The referenced tasks are replaced by their FlyteAdmin registered Identifiers, obtained in the previous step. The associated entity is registered with FlyteAdmin using the registerWorkflow API. -* Launch an execution using the FlyteAdmin launch execution API, which requires the necessary inputs provided. This is automatically done if the user uses flytectl to launch the execution. -* Use the FlyteAdmin read APIs to get details of the execution, monitor it to completion, or retrieve a historical execution. -* **OR** use the FlyteConsole to visualize the execution in real time as it progresses or visualize any historical execution. The console makes it easy to view debugging information for the execution. -* Set specific rules such as *notification* on **failure** or **success** or publish all events in the execution to a pub-sub system. -* Query the datastore to get a summary of all the executions and the compute resources consumed. - -.. note:: - Workflows and tasks are purely specifications and can be provided using tools like ``YAML``, ``JSON``, ``protobuf binary`` or any other programming language, and hence registration is possible using other tools. Contributions welcome! - -Registration in the Backend ---------------------------- - -When FlyteAdmin receives a workflow registration request, it uses the workflow compiler to compile and validate the workflow. It also fetches all the referenced tasks and creates a complete workflow closure, which is stored in the metastore. If the workflow compilation fails, the compiler returns an error to the client. diff --git a/rsts/concepts/schedules.rst b/rsts/concepts/schedules.rst deleted file mode 100644 index 34644b217b..0000000000 --- a/rsts/concepts/schedules.rst +++ /dev/null @@ -1,102 +0,0 @@ -.. _concepts-schedules: - -Schedules -========= - -.. tags:: Basic, Glossary - -Workflows can be run automatically using :ref:`schedules ` associated with launch plans. - -Only one launch plan version for a given {Project, Domain, Name} combination can be active, which means only one schedule can be active for a launch plan. This is because a single active schedule can exist across all versions of the launch plan. - -A :ref:`workflow ` version can have multiple schedules associated with it, given that these schedules exist as versions of different launch plans. - -Creating a new schedule creates a new version of the launch plan. -If you wish to change a schedule, you will have to create a new version of that launch plan since a **schedule cannot be edited**. - -FlyteAdmin keeps track of the newly-added schedules, and searches through all the versions of launch plans to set them to 'deactivated'. - -The launch plan versions with schedules that were previously deactivated can be manually used, by clicking on the launch button and selecting the specific launch plan version. - -Let's now look at how schedules can be defined through cron_expression_ or rate_unit_. - -.. _cron_expression: - -Cron Expression ---------------- -Cron expression strings use :ref:`this ` syntax. They are validated at launch plan registration time. - -.. _rate_unit: - -Format ------- - -A cron expression represents a set of times, with the help of 5 space-separated fields. - -.. _cron_expression_table: - -+--------------+------------+-----------------+----------------------------+ -| Field name | Mandatory? | Allowed values | Allowed special characters | -+==============+============+=================+============================+ -| Minutes | Yes | 0-59 | * / , - | -+--------------+------------+-----------------+----------------------------+ -| Hours | Yes | 0-23 | * / , - | -+--------------+------------+-----------------+----------------------------+ -| Day of month | Yes | 1-31 | * / , - ? | -+--------------+------------+-----------------+----------------------------+ -| Month | Yes | 1-12 or JAN-DEC | * / , - | -+--------------+------------+-----------------+----------------------------+ -| Day of week | Yes | 0-6 or SUN-SAT | * / , - ? | -+--------------+------------+-----------------+----------------------------+ - -**Note**: The 'Month' and 'Day of week' fields are case insensitive. - - -Cron schedules --------------- -An incorrect cron schedule expression leads to a failure in triggering the schedule. :ref:`Here ` is a table that shows the format of a cron expression. - -Below is another example: - -.. code-block:: default - - cron_lp_every_min_of_hour = LaunchPlan.get_or_create( - name="my_cron_scheduled_lp", - workflow=date_formatter_wf, - schedule=CronSchedule( - # Note that kickoff_time_input_arg matches the workflow input we defined above: kickoff_time - # But in case you are using the AWS scheme of schedules and not using the native scheduler then switch over the schedule parameter with cron_expression - schedule="@hourly", # Following schedule runs every hour at beginning of the hour - kickoff_time_input_arg="kickoff_time", - ), - - ) - - -Fixed rate schedules ----------------------- -Instead of cron schedules, fixed rate schedules can be used. - -You can specify the duration in the schedule using `timedelta`, that supports `minutes`, `hours`, `days` and `weeks`. - -:ref:`Here ` is an example with duration in `minutes`. - -Below is an example with duration in `days`. - -.. code-block:: default - - fixed_rate_lp_days = LaunchPlan.get_or_create( - name="my_fixed_rate_lp_days", - workflow=positive_wf, - # Note that the above workflow doesn't accept any kickoff time arguments. - # We omit the ``kickoff_time_input_arg`` from the FixedRate schedule invocation - schedule=FixedRate(duration=timedelta(days=1)), - fixed_inputs={"name": "you"}, - -) - - -Rate Unit ---------- - -Schedules can also be defined using fixed rates in units of **days**, **hours** and **minutes**. diff --git a/rsts/concepts/state_machine.rst b/rsts/concepts/state_machine.rst deleted file mode 100644 index ce570e2f3e..0000000000 --- a/rsts/concepts/state_machine.rst +++ /dev/null @@ -1,154 +0,0 @@ -.. _divedeep-state-machine: - -################################################ -Understanding the State Transition in a Workflow -################################################ - -.. tags:: Basic, Design - -High Level Overview of How a Workflow Progresses to Success -=========================================================== - -.. mermaid:: - - flowchart TD - id1(( )) - id1 --> Ready - Ready --> Running - subgraph Running - id2(( )) - id2 --> NodeQueued - NodeQueued --> NodeRunning - subgraph NodeRunning - id3(( )) - id3 --> TaskQueued - TaskQueued --> TaskRunning - TaskRunning --> TaskSuccess - end - TaskSuccess --> NodeSuccess - end - NodeSuccess --> Success - - -This state diagram illustrates a high-level, simplistic view of the state transitions that a workflow with a single task and node would go through as the user observes success. - -The following sections explain the various observable (and some hidden) states for workflow, node, and task state transitions. - -Workflow States -=============== - -.. mermaid:: - - flowchart TD - Queued -->|On system errors more than threshold| Aborted - Queued --> Ready - Ready--> |Write inputs to workflow| Running - Running--> |On system error| Running - Running--> |On all Nodes Success| Succeeding - Succeeding--> |On successful event send to Admin| Succeeded - Succeeding--> |On system error| Succeeding - Ready--> |On precondition failure| Failing - Running--> |On any Node Failure| Failing - Ready--> |On user initiated abort| Aborting - Running--> |On user initiated abort| Aborting - Succeeding--> |On user initiated abort| Aborting - Failing--> |If Failure node exists| HandleFailureNode - Failing--> |On user initiated abort| Aborting - HandleFailureNode--> |On completing failure node| Failed - HandleFailureNode--> |On user initiated abort| Aborting - Failing--> |On successful send of Failure node| Failed - Aborting--> |On successful event send to Admin| Aborted - -A workflow always starts in the ``Ready`` state and ends either in ``Failed``, ``Succeeded``, or ``Aborted`` state. -Any system error within a state causes a retry on that state. These retries are capped by :ref:`system retries ` which eventually lead to an ``Aborted`` state if the failure persists. - -Every transition between states is recorded in FlyteAdmin using :std:ref:`workflowexecutionevent `. - -The phases in the above state diagram are captured in the admin database as specified here :std:ref:`workflowexecution.phase ` and are sent as a part of the Execution event. - -The state machine specification for the illustration can be found `here `__. - - -Node States -=========== - -.. mermaid:: - - flowchart TD - id1(( )) - id1-->NotYetStarted - id1-->|Will stop the node execution |Aborted - NotYetStarted-->|If all upstream nodes are ready, i.e, inputs are ready | Queued - NotYetStarted--> |If the branch was not taken |Skipped - Queued-->|Start task execution- attempt 0 | Running - Running-->|If task timeout has elapsed and retry_attempts >= max_retries|TimingOut - Running-->|Internal state|Succeeding - Running-->|For dynamic nodes generating workflows| DynamicRunning - DynamicRunning-->TimingOut - DynamicRunning-->RetryableFailure - TimingOut-->|If total node timeout has elapsed|TimedOut - DynamicRunning-->Succeeding - Succeeding-->|User observes the task as succeeded| Succeeded - Running-->|on retryable failure| RetryableFailure - RetryableFailure-->|if retry_attempts < max_retries|Running - RetryableFailure-->|retry_attempts >= max_retries|Failing - Failing-->Failed - Succeeded-->id2(( )) - Failed-->id2(( )) - - -This state diagram illustrates the node transition through various states. This is the core finite state machine for a node. -From the user's perspective, a workflow simply consists of a sequence of tasks. But to Flyte, a workflow internally creates a meta entity known as **node**. - -Once a Workflow enters the ``Running`` state, it triggers the phantom ``start node`` of the workflow. The ``start node`` is considered to be the entry node of any workflow. -The ``start node`` begins by executing all its child-nodes using a modified Depth First Search algorithm recursively. - -Nodes can be of different types as listed below, but all the nodes traverse through the same transitions: - -#. Start Node - Only exists during the execution and is not modeled in the core spec. -#. :std:ref:`Task Node ` -#. :std:ref:`Branch Node ` -#. :std:ref:`Workflow Node ` -#. Dynamic Node - Just a task node that does not return output but constitutes a dynamic workflow. - When the task runs, it remains in the ``RUNNING`` state. Once the task completes and Flyte starts executing the dynamic workflow, - the overarching node that contains both the original task and the dynamic workflow enters `DYNAMIC_RUNNING` state. -#. End Node - Only exists during the execution and is not modeled in the core spec - -Every transition between states is recorded in FlyteAdmin using :std:ref:`nodeexecutionevent `. - -Every ``NodeExecutionEvent`` can have any :std:ref:`nodeexecution.phase `. - -.. note:: TODO: Add explanation for each phase. - -The state machine specification for the illustration can be found `here `__. - -Task States -=========== - -.. mermaid:: - - flowchart TD - id1(( )) - id1-->|Aborted by NodeHandler- timeouts, external abort, etc,.| NotReady - id1-->Aborted - NotReady-->|Optional-Blocked on resource quota or resource pool | WaitingForResources - WaitingForResources--> |Optional- Has been submitted, but hasn't started |Queued - Queued-->|Optional- Prestart initialization | Initializing - Initializing-->|Actual execution of user code has started|Running - Running-->|Successful execution|Success - Running-->|Failed with a retryable error|RetryableFailure - Running-->|Unrecoverable failure, will stop all execution|PermanentFailure - Success-->id2(( )) - RetryableFailure-->id2(( )) - PermanentFailure-->id2(( )) - - -The state diagram above illustrates the various states through which a task transitions. This is the core finite state machine for a task. - -Every transition between states is recorded in FlyteAdmin using :std:ref:`taskexecutionevent `. - -Every ``TaskExecutionEvent`` can have any :std:ref:`taskexecution.phase `. - -.. note:: TODO: Add explanation for each phase. - -The state machine specification for the illustration can be found `here `__. diff --git a/rsts/concepts/tasks.rst b/rsts/concepts/tasks.rst deleted file mode 100644 index cd1059d2f4..0000000000 --- a/rsts/concepts/tasks.rst +++ /dev/null @@ -1,122 +0,0 @@ -.. _divedeep-tasks: - -Tasks -===== - -.. tags:: Basic, Glossary - -Tasks are fully independent units of execution and first-class entities of Flyte. -They are the fundamental building blocks and extension points that encapsulate the users' code. - -Characteristics ---------------- - -A Flyte task is characterized by: - -1. A combination of :ref:`projects ` and :ref:`domains `, -2. A unique unicode name (we recommend it not to exceed 32 characters), -3. A version string, and/or -4. *Optional* Task interface definition. - - For tasks to exchange data with each other, a task can define a signature (much like a function/method - signature in programming languages). A task interface defines the input and output variables — - :std:ref:`variablesentry ` - and their types, :std:ref:`literaltype `. - -Can "X" Be a Flyte Task? -------------------------- - -When deciding if a unit of execution constitutes a Flyte task, consider these questions: - -- Is there a well-defined graceful/successful exit criteria for the task? A task is expected to exit after completion of input processing. -- Is it repeatable? Under certain circumstances, a task might be retried, rerun, etc. with the same inputs. It is expected - to produce the same output every single time. For example, avoid using random number generators with current clock as seed. Use a system-provided clock as the seed instead. -- Is it a pure function, i.e., does it have side effects that are unknown to the system (calls a web-service)? It is recommended to avoid side-effects in tasks. When side-effects are evident, ensure that the operations are idempotent. - -Dynamic Tasks --------------- - -"Dynamic tasks" is a misnomer. -Flyte is one-of-a-kind workflow engine that ships with the concept of truly `Dynamic Workflows `__! -Users can generate workflows in reaction to user inputs or computed values at runtime. -These executions are evaluated to generate a static graph before execution. - -Extending Task ---------------- - -Plugins -^^^^^^^ - -Flyte exposes an extensible model to express tasks in an execution-independent language. -It contains first-class task plugins (for example: `Papermill `__, -`Great Expectations `__, and :ref:`more `.) -that execute the Flyte tasks. -Almost any action can be implemented and introduced into Flyte as a "Plugin", which includes: - -- Tasks that run queries on distributed data warehouses like Redshift, Hive, Snowflake, etc. -- Tasks that run executions on compute engines like Spark, Flink, AWS Sagemaker, AWS Batch, Kubernetes pods, jobs, etc. -- Tasks that call web services. - -Flyte ships with certain defaults, for example, running a simple Python function does not need any hosted service. Flyte knows how to -execute these kinds of tasks on Kubernetes. It turns out these are the vast majority of tasks in machine learning, and Flyte is adept at -handling an enormous scale on Kubernetes. This is achieved by implementing a unique scheduler on Kubernetes. - -Types -^^^^^ - -It is impossible to define the unit of execution of a task in the same way for all tasks. Hence, Flyte allows for different task -types in the system. Flyte has a set of defined, battle-tested task types. It allows for a flexible model to -:std:ref:`define new types `. - -Inherent Features ------------------ - -Fault tolerance -^^^^^^^^^^^^^^^ - -In any distributed system, failure is inevitable. Allowing users to design a fault-tolerant system (e.g. workflow) is an inherent goal of Flyte. -At a high level, tasks offer two parameters to achieve fault tolerance: - -**Retries** - -Tasks can define a retry strategy to let the system know how to handle failures (For example: retry 3 times on any kind of error). - -There are two kinds of retries: - -1. System retry: It is a system-defined, recoverable failure that is used when system failures occur. The number of retries is validated against the number of system retries. - -.. _system-retry: - -System retry can be of two types: - -- **Downstream System Retry**: When a downstream system (or service) fails, or remote service is not contactable, the failure is retried against the number of retries set `here `__. This performs end-to-end system retry against the node whenever the task fails with a system error. This is useful when the downstream service throws a 500 error, abrupt network failure, etc. - -- **Transient Failure Retry**: This retry mechanism offers resiliency against transient failures, which are opaque to the user. It is tracked across the entire duration of execution. It helps Flyte entities and the additional services connected to Flyte like S3, to continue operating despite a system failure. Indeed, all transient failures are handled gracefully by Flyte! Moreover, in case of a transient failure retry, Flyte does not necessarily retry the entire task. “Retrying an entire task” means that the entire pod associated with the Flyte task would be rerun with a clean slate; instead, it just retries the atomic operation. For example, Flyte tries to persist the state until it can, exhausts the max retries, and backs off. - - To set a transient failure retry: - - - Update `MaxWorkflowRetries `__ in the propeller configuration. - - - Or update `max-workflow-retries `__ in helm. - -2. User retry: If a task fails to execute, it is retried for a specific number of times, and this number is set by the user in `TaskMetadata `__. The number of retries must be less than or equal to 10. - -.. note:: - - Recoverable vs. Non-Recoverable failures: Recoverable failures will be retried and counted against the task's retry count. Non-recoverable failures will just fail, i.e., the task isn’t retried irrespective of user/system retry configurations. All user exceptions are considered non-recoverable unless the exception is a subclass of FlyteRecoverableException. - - -.. note:: - - `RFC 3902 `_ implements an alternative, simplified retry behaviour with which both system and user retries are counted towards a single retry budget defined in the task decorator (thus, without a second retry budget defined in the platform configuration). The last retries are always performed on non-spot instances to guarantee completion. To activate this behaviour, set ``configmap.core.propeller.node-config.ignore-retry-cause`` to ``true`` in the helm values. - -**Timeouts** - -To ensure that the system is always making progress, tasks must be guaranteed to end gracefully/successfully. The system defines a default timeout period for the tasks. It is possible for task authors to define a timeout period, after which the task is marked as ``failure``. Note that a timed-out task will be retried if it has a retry strategy defined. The timeout can be handled in the `TaskMetadata `__. - - -Caching/Memoization -^^^^^^^^^^^^^^^^^^^ - -Flyte supports memoization of task outputs to ensure that identical invocations of a task are not executed repeatedly, thereby saving compute resources and execution time. For example, if you wish to run the same piece of code multiple times, you can reuse the output instead of re-computing it. -For more information on memoization, refer to the :std:doc:`Caching Example `. diff --git a/rsts/concepts/versioning.rst b/rsts/concepts/versioning.rst deleted file mode 100644 index 42df830e6c..0000000000 --- a/rsts/concepts/versioning.rst +++ /dev/null @@ -1,104 +0,0 @@ -.. _divedeep-versioning: - -Versions -======== - -.. tags:: Basic, Glossary - -One of the most important features and reasons for certain design decisions in Flyte is the need for machine learning and data practitioners to experiment. -When users experiment, they do so in isolation and try multiple iterations. -Unlike traditional software, the users must conduct multiple experiments concurrently with different environments, algorithms, etc. -This may happen when multiple data scientists simultaneously iterate on the same workflow/pipeline. - -The cost of creating an independent infrastructure for each version is enormous and undesirable. -It is beneficial to share the same centralized infrastructure, where the burden of maintaining the infrastructure is with a central infrastructure team, -while the users can use it independently. This improves the cost of operation since the same infrastructure can be reused by multiple teams. - -Versioned workflows help users quickly reproduce prior results or identify the source of previous successful experiments. - -Why Do You Need Versioning? ---------------------------- - -Versioning is required to: - -- Work on the same project concurrently and identify the version/experiment that was successful. -- Capture the environment for a version and independently launch it. -- Visualize prior runs and tie them to experiment results. -- Rollback to production deployments in case of failures with ease. -- Execute multiple experiments in production, which may use different training or data processing algorithms. -- Understand how a specific system evolved and answer questions related to the effectiveness of a specific strategy. - -Operational Benefits of Completely Versioned Workflows/Pipelines -------------------------------------------------------------------- - -The entire workflow in Flyte is versioned and all tasks and entities are immutable which makes it possible to completely change the structure of a workflow between versions, without worrying about the consequences for the pipelines in production. -This hermetic property makes it effortless to manage and deploy new workflow versions and is important for workflows that are long-running. -If a workflow execution is in progress and another new workflow version has been activated, Flyte guarantees that the execution of the old version continues unhindered. - -Consider a scenario where you need to run all the previous executions if there's a bug to be fixed. -Simply fixing the bug in the task may not solve the problem. -Moreover, fixing bugs involves code changes, which may affect the workflow structure. -Flyte addresses this using two properties: - -1. Since the entire workflow is versioned, changing the structure has no impact on the existing execution, and the workflow state won't be corrupted. -2. Flyte provides caching/memoization of outputs. As long as the tasks and their behavior have not changed, it is possible to move them around and still recover their previous outputs, without having to rerun the tasks. This strategy will work even if the workflow changes are in a task. - -Let us take a sample workflow: - -.. mermaid:: - - graph TD; - A-->B; - B-->C; - C-->D; - -In the above graph, let us assume that task `C` fails. It is then possible to simply fix `C` and ``relaunch`` the previous execution (maintaining the inputs etc). This will not re-run tasks ``A``, and ``B`` as long as they are marked as `cache=True`. - -Now, let us consider that the only solution to fix the bug is to change the graph structure and introduce a new step ``B1`` that short circuits the execution to ``D``: - -.. mermaid:: - - graph TD; - A-->B; - B-->B1; - B1-->D; - B1-->C; - C-->D; - -The same ``cache=True`` will handle this complicated situation as well. - -Why Is Versioning Hard? ------------------------ - -Git has become the defacto-standard in version control for code, making it easy to work on branches, merge them, and revert unwanted changes. -But achieving this for a live (running) algorithm usually requires the entire infrastructure to be associated and potentially re-created for every execution. - -How Is Versioning Tied to Reproducibility? ------------------------------------------- - -Workflows can be reproduced without explicit versioning within the system. -To reproduce a past experiment, users need to identify the source code and resurrect any dependencies that the code may have used (for example, TensorFlow 1.x instead of TensorFlow 2.x, or specific Python libraries). -It is also required to instantiate the infrastructure that the previous version may have used. If not recorded, you'll have to ensure that the previously used dataset (say) can be reconstructed. - -This is exactly how Flyte was conceived! - -In Flyte, every task is versioned, and it precisely captures the dependency set. For external tasks, memoization is recommended so that the constructed dataset can be cached on the Flyte side. This way, one can guarantee reproducible behavior from the external systems. - -Moreover, every piece of code is registered with the version of the code that was used to create the instance. -Therefore, users can easily construct the data lineage for all the parts of the workflow. - -What Is the Cost of Versioning & Reproducibility? -------------------------------------------------- - -One of the costs of versioning and allowing on-demand reproducibility is the need to re-instantiate the infrastructure from scratch. -This may sometimes result in additional overhead. However, the advent of Docker containers and Kubernetes has made it possible to build a platform to achieve these goals. - -.. admonition:: Coming soon! - - We are working on reducing the penalty of on-demand infrastructure creation while still maintaining the guarantees. Stay tuned! - -What Is the Best Way to Version Your Tasks and Workflows? ---------------------------------------------------------- - -The best way to version tasks and workflows is to independently version every task with the GIT-SHA or hash of the entire code artifact. -The workflows are also versioned using the GIT-SHA of the containing repository. diff --git a/rsts/concepts/workflow_lifecycle.rst b/rsts/concepts/workflow_lifecycle.rst deleted file mode 100644 index efb11e52d8..0000000000 --- a/rsts/concepts/workflow_lifecycle.rst +++ /dev/null @@ -1,246 +0,0 @@ -.. _workflow-lifecycle: - -################################################################# -Understand the Lifecycle of a Flyte Workflow -################################################################# - -.. tags:: Basic, Design - -Let's understand how Flyte's plugin machinery works and how information flows from one component to another in Flyte. - -Under the hood, Flyte relies on a primitive called “Plugins”. Every task that you run on Flyte is powered by a plugin. Some of these plugins are native and guaranteed by Flyte system. These native plugins, for example, run your Flyte tasks inside a k8s pod. There are three native plugins, namely, ``Container``, ``K8sPod``, and ``Sql``. - -Moreover, there are plugins that are actual extensions; they create additional infrastructure and communicate with SaaS on your behalf. Examples include :ref:`Spark `, :ref:`AWS Athena `, etc. - -A plugin requires code to live in multiple locations. - -1. Some parts of plugins logic resides in Flytekit's SDK. This let users define tasks. You can find this logic in Flytekit’s Python (https://github.com/flyteorg/flytekit/tree/master/plugins). Think of this as a client for an RPC service or a web service - -2. Another big chunk of plugins logic lives in - `Flyteplugins `__. This is a library that gets loaded into `FlytePropeller `__. - FlytePropeller (a Kubernetes operator) loads Flyteplugins upon starting. - FlytePropeller is aware of the plugins and their dependency on task execution. - However, FlytePropeller is unaware of how these plugins are executed. - ------------- - -To better Illustrate how things work, lets take for example the “Spark” -plugin and understand what is the sequence of steps that take place for -it to work. - -The Spark plugin lets a user define a task that has access to a Spark Session. -In the background Flyte will provide all the needed infrastructure such that by the time the declared task needs to run, all needed Spark infrastructure is ready and running. - -1. User codes in python a task that uses Spark (See code below) - -.. code:: python - - @task( - task_config=Spark( - spark_conf={ - "spark.driver.memory": "1000M", - "spark.executor.instances": "2", - "spark.driver.cores": "1", - } - ) - ) - def hello_spark(i: int) -> float: - ... - ... - -As mentioned earlier some part of plugin logic lives on the SDK. In this -case think of ``Spark`` data class here as a placeholder for all the -Spark settings that we need our plugin to know. We need to pass this -data across multiple places. This is the config that Flyte operator (Flytepropeller) -will need in order to build the needed spark cluster. ``Spark`` class also tells -Flytekit’s SDK that this task will run as a ``PysparkFunctionTask`` -because ``task_config`` points to a ``Spark`` object instance, this is -clearly illustrated `in spark plugin registration step run in the -background `__ - -2. Once the user has finished writing needed Workflows. A packaging step - is needed before user can run the workflows. This packaging step - transforms workflows and tasks we described in python into a Protobuf - representation. This protobuf representation is used by Flyte across its multiple codebases. For - further details on the protobuf representation check `FlyteIdl - repository `__ . Package step is carried out by the sdk tooling you are using. - -This serialization step will transform our ``hello_spark`` task into a -protobuf representation. It will also transform other tasks, workflows -and launch plans to a protobuf representation. - -Our ``hello_spark`` protobuf representation will look as below. A Task -is serialized as a -`TaskTemplate `__ -as defined in ``FlyteIDL``. - -:: - - Id: Task, "example.example.hello_spark" - Type: "Spark" - Metadata: - runtime: - type: FLYTE_SDK - version: 1.0.3 - flavor: python - - interface: - inputs: - i : - type : simple:Integer - description: "i" - outputs: - o0: - type: FLOAT - description: o0 - custom: - executorpath: "/opt/venv/bin/python3" - mainApplicationFile: /opt/venv/bin/entrypoint.py - sparkConf: - spark.driver.cores: 1 - spark.executor.instances: 2 - spark.driver.memory: 1000M - - - Container: - image: "hello_world:1" - args: - [ - "pyflyte-execute" - "--inputs" - "{{.input}}" - "--output-prefix" - "{{.outputPrefix}}" - "--raw-output-data-prefix" - "{{.rawOutputDataPrefix}}" - "--checkpoint-path" - "{{.checkpointOutputPrefix}}" - "--prev-checkpoint" - "{{.prevCheckpointPrefix}}" - "--resolver" - "flytekit.core.python_auto_container.default_task_resolver" - "--" - "task-module" - "example.example" - "task-name" - "hello_spark" - ] - -This representation is generated within Flytekit. Essentially the SDK is -generating the instructions that Flyte’s kubernetes operator needs to -know in order to run this task at a later stage. - -The ``Type`` field is really important as we will see later this will be -used by Flytepropeller (Kubernetes Operator) to know “how” to execute -this task. - -``Interface`` contains information about what are the inputs and outputs -of our task. Flyte uses this interface to check if tasks are composible. - -``Custom`` is a collection of arbitrary Key/Values, think of it as a -Json dict that any plugin can define as it wishes. In this case the -Spark plugin expects all its particular settings in this field i.e: -Spark workers, driver memory etc. - -`Container `__ -is part of Flyte’s IDL primitives. Essentially any Flyte task is ran as -either three primitives a ``Container`` a ``K8sPod`` or ``Sql``. Every -task contains a ``Target`` which has to be either of these. In this -particular case, our Spark cluster is a ``Container`` target. A -``Container`` specifies all the needed parameters you would in a K8s -ContainerSpec i.e: What docker image to run, what is the command that -will be ran, args etc. - -It is important for the reader to note that Flyte expects to run in a -container that has an entrypoint called ``pyflyte-execute``. This -entrypoint is provided when you ``pip install flytekit``. This -entrypoint and flytekit is what provides a lot of the plumbing logic -inside Flyte. For example It is this entrypoint what automagically -deserializes parquet dataframes an injects them to our task’s functions -if need be. - -It should be clear to the reader that a lot of parameters are surrounded -by ``{}`` these are template variables that are to be rendered at -execution time. - -What is important from this representation is that it contains all the -information that Flyte’s operator needs to know to execute this task: It -is a ``"Spark"`` task, it has a function signature (inputs and outputs), -it tells what docker image to run, and finally, it tells what spark -settings are needed for the cluster. - -For more information on why this task contains these fields check -``TaskTemplate`` in `FlyteIDL -repository `__. -I strongly advice you to take a look at the data structures in this file -as they provide good insight in the interfaces used all across Flyte’s -codebases. - -3. Once user has packaged workflows and tasks then a registration step - is needed. During registration Flyte adds these protocolbuffer files to its - database, essentially making these tasks and workflows runnable for - the user. Registration is done via `Flytectl ` __ - -4. At somepoint a Flyte user will trigger a Workflow run. The workflow - run will start running the defined DAG. Eventually our Spark task - will need to run,. This is where the second step of a plugin kicks - in. Flytepropeller (Kubernetes Operator) will realize that this is a - Task of type ``Spark`` and it will handle it differently. - - - FlytePropeller knows a task is of type Spark, because our ``TaskTemplate`` defined it so ``Type: Spark`` - - - Flyte has a ``PluginRegistry`` which has a dictionary from ``Task Type`` to ``Plugin Handlers``. - - - At run time Flytepropeller will run our task, Flytepropeller will figure out it is a Spark task, and then call the method ``BuildResource`` in Spark's plugin implementation. ``BuildResource`` is a method that each plugin has to implement. - - - `Plugin `__ is a Golang interface providing an important method ``BuildResource`` - - - Spark has its own Plugin defined `here in Flyteplugins repo `__ - -Inside Spark’s -`BuildResource `__ -method is where magic happens. At task runtime: - - - Flytepropeller will call ``BuildResource`` method. This method will ask for the ``Custom`` field, tasks flagged as ``type=Spark`` will have a dictionary containing all sort of Spark settings. - - - Using these settings Flytepropeller will use Spark’s K8s Operator to spawn a spark cluster on the go and run a Spark app (Our python task). - - - The spark app will run a pod with ``pyflyte-execute`` as entrypoint. All the inputs and outputs rendered to what they need to be i.e: paths to the actual data inputs instead of ``{{input}}`` - - - For more information on Spark’s K8s operator see : `SparkApplicationSpec `__ - -5. A pod with entrypoint to ``pyflyte-execute`` execute starts running (Spark App). - - - - ``pyflyte-execute`` provides all the plumbing magic that is needed. In this particular case, It will create a SparkSession and injects it somewhere so that it is ready for when the user defined python’s code starts running. Be aware that this is part of the SDK code (Flytekit). - - - ``pyflyte-execute`` points to `execute_task_cmd `__. - - This entrypoint does a lot of things: - - - Resolves the function that the user wants to run. i.e: where is the needed package where this function lives? . this is what ``"flytekit.core.python_auto_container.default_task_resolver"`` does - - - Downloads needed inputs and do a transformation if need be. I.e: is this a Dataframe? if so we need to transform it into a Pandas DF from parquet. - - - Calls `dispatch_execute `__ . This trigger the execution of our spark task. - - - `PysparkFunctionTask `__. defines what gets run just before the user's task code gets executed. It essentially creatse a spark session and then run the user function (The actual code we want to run!). - ------------- - -Recap ------ - -- Flyte requires coordination between multiple pieces of code. In this - case the SDK and FlytePropeller (K8s operator) -- `Flyte IDL (Interface Language Definition) `__ provides some primitives - for services to talk with each other. Flyte uses Procolbuffer - representations of these primitives -- Three important primitives are : ``Container``, ``K8sPod``, ``Sql``. - At the end of the day all tasks boil down to one of those three. -- github.com/flyteorg/FlytePlugins repository contains all code for plugins: - Spark, AWS Athena, BigQuery… -- Flyte entrypoints are the ones carrying out the heavy lifting: making - sure that inputs are downloaded and/or transformed as needed. -- When running workflows on Flyte, if we want to use Flyte underlying plumbing then - we should include Flyte entrypoints: either Jflyte or Flytekit. diff --git a/rsts/concepts/workflows.rst b/rsts/concepts/workflows.rst deleted file mode 100644 index 78a3dce3bd..0000000000 --- a/rsts/concepts/workflows.rst +++ /dev/null @@ -1,51 +0,0 @@ -.. _divedeep-workflows: - -Workflows -========= - -.. tags:: Basic, Glossary - -A workflow is a directed acyclic graph (DAG) of units of work encapsulated by :ref:`nodes `. -Specific instantiations of a workflow (commonly bound with input arguments) are referred to as **workflow executions**, -or just executions. In other words, a workflow is a template for an ordered task execution. - -Flyte workflows are defined in ``protobuf`` and the flytekit SDK facilitates writing workflows. Users can define workflows as a collection of nodes. -Nodes within a workflow can produce outputs that subsequent nodes could consume as inputs. These dependencies dictate the structure of the workflow. - -Workflows written using the SDK don't need to explicitly define nodes to enclose execution units (tasks, sub-workflows, launch plans); -they will be injected by the SDK and captured at registration time. - -Structure ---------- - -Workflows accept inputs and produce outputs and reuse task definitions across :ref:`projects ` and :ref:`domains `. Every workflow has a default :ref:`launchplan ` with the same name as that of the workflow. - -Workflow structure is flexible because: - -- Nodes can be executed in parallel. -- The same task definition can be re-used within a different workflow. -- A single workflow can contain any combination of task types. -- A workflow can contain a single functional node. -- A workflow can contain multiple nodes in all sorts of arrangements. -- A workflow can launch other workflows. - -At execution time, node executions are triggered as soon as their inputs are available. - -**Workflow nodes naturally run in parallel when possible**. -For example, when a workflow has five independent nodes, i.e., when these five nodes don't consume outputs produced by other nodes, -Flyte runs these nodes in parallel in accordance with the data and resource constraints. - -Flyte-Specific Structure -^^^^^^^^^^^^^^^^^^^^^^^^ - -During :ref:`registration `, Flyte validates the workflow structure and saves the workflow. -The registration process updates the workflow graph. -A compiled workflow will always have a start and end node injected into the workflow graph. -In addition, a failure handler will catch and process execution failures. - -Versioning ----------- - -Like :ref:`tasks `, workflows are versioned too. Registered workflows are immutable, i.e., an instance of a -workflow defined by a specific {Project, Domain, Name, Version} combination can't be updated. -Tasks referenced in a workflow version are immutable and are tied to specific tasks' versions. \ No newline at end of file diff --git a/rsts/conf.py b/rsts/conf.py deleted file mode 100644 index 8461fdcdfb..0000000000 --- a/rsts/conf.py +++ /dev/null @@ -1,273 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Configuration file for the Sphinx documentation builder. -# -# This file does only contain a selection of the most common options. For a -# full list see the documentation: -# http://www.sphinx-doc.org/en/stable/config - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - -import sphinx.application -import sphinx.errors - -sphinx.application.ExtensionError = sphinx.errors.ExtensionError - -# -- Project information ----------------------------------------------------- - -project = "Flyte" -copyright = "2022, Flyte Authors" -author = "Flyte" - -# The short X.Y version -version = "" -# The full version, including alpha/beta/rc tags -release = "1.10.6" - -# -- General configuration --------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosectionlabel", - "sphinx.ext.doctest", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.coverage", - "sphinx.ext.ifconfig", - "sphinx.ext.viewcode", - "sphinx.ext.extlinks", - "sphinx.ext.napoleon", - "sphinx.ext.doctest", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx-prompt", - "sphinx_copybutton", - "sphinxext.remoteliteralinclude", - "sphinx_issues", - "sphinx_panels", - "sphinxcontrib.mermaid", - "sphinxcontrib.video", - "sphinxcontrib.youtube", - "sphinx_tabs.tabs", - "sphinx_tags", -] - -extlinks = { - "propeller": ("https://github.com/flyteorg/flytepropeller/tree/master/%s", ""), - "stdlib": ("https://github.com/flyteorg/flytestdlib/tree/master/%s", ""), - "kit": ("https://github.com/flyteorg/flytekit/tree/master/%s", ""), - "plugins": ("https://github.com/flyteorg/flyteplugins/tree/v0.1.4/%s", ""), - "idl": ("https://github.com/flyteorg/flyteidl/tree/v0.14.1/%s", ""), - "admin": ("https://github.com/flyteorg/flyteadmin/tree/master/%s", ""), - "cookbook": ("https://flytecookbook.readthedocs.io/en/latest/", None), -} - -# Add any paths that contain templates here, relative to this directory. -# templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = ".rst" - -# The master toctree document. -master_doc = "index" - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_favicon = "images/flyte_circle_gradient_1_4x4.png" -html_logo = "images/flyte_circle_gradient_1_4x4.png" -html_theme = "furo" -html_title = "Flyte" - -templates_path = ["_templates"] - -pygments_style = "tango" -pygments_dark_style = "native" - -html_theme_options = { - "light_css_variables": { - "color-brand-primary": "#4300c9", - "color-brand-content": "#4300c9", - }, - "dark_css_variables": { - "color-brand-primary": "#9D68E4", - "color-brand-content": "#9D68E4", - }, - # custom flyteorg furo theme options - "github_repo": "flyte", - "github_username": "flyteorg", - "github_commit": "master", - "docs_path": "rsts", # path to documentation source - # path to directory of sphinx gallery source files relative to repo root - "sphinx_gallery_src_dir": "cookbook", - # path to root directory containing auto-generated sphinx gallery examples - "sphinx_gallery_dest_dir": "auto", -} - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] -html_css_files = ["custom.css"] - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. -# -# html_sidebars = {"**": ["logo-text.html", "globaltoc.html", "localtoc.html", "searchbox.html"]} - -# -- Options for HTMLHelp output --------------------------------------------- - -# Output file base name for HTML help builder. -htmlhelp_basename = "Flytedoc" - -# -- Options for LaTeX output ------------------------------------------------ - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, "Flyte.tex", "Flyte Documentation", "Flyte Authors", "manual"), -] - -# -- Options for manual page output ------------------------------------------ - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "flyte", "Flyte Documentation", [author], 1)] - -# -- Options for Texinfo output ---------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "Flyte", - "Flyte Documentation", - author, - "Flyte", - "Accelerate your ML and data workflows to production.", - "Miscellaneous", - ), -] - -# -- Extension configuration ------------------------------------------------- -autosectionlabel_prefix_document = True -autosectionlabel_maxdepth = 2 - -# Tags config -tags_create_tags = True -tags_page_title = "Tag" -tags_overview_title = "All Tags" - -# -- Options for intersphinx extension --------------------------------------- - -# Example configuration for intersphinx: refer to the Python standard library. -# intersphinx configuration. Uncomment the repeats with the local paths and update your username -# to help with local development. -intersphinx_mapping = { - "python": ("https://docs.python.org/3", None), - "numpy": ("https://numpy.org/doc/stable", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), - "torch": ("https://pytorch.org/docs/master/", None), - "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), - "matplotlib": ("https://matplotlib.org", None), - "flytekit": ("https://flyte.readthedocs.io/projects/flytekit/en/latest/", None), - # "flytekit": ("/Users/yourusername/go/src/github.com/flyteorg/flytekit/docs.rst/build/html", None), - "flytectl": ("https://flytectl.readthedocs.io/en/latest/", None), - # "flytectl": ("/Users/yourusername/go/src/github.com/flyteorg/flytectl/docs/build/html", None), - "cookbook": ("https://flytecookbook.readthedocs.io/en/latest/", None), - "flyteidl": ("https://docs.flyte.org/projects/flyteidl/en/latest", None), -} - - -# Sphinx-mermaid config -mermaid_output_format = "raw" -mermaid_version = "latest" -mermaid_init_js = "mermaid.initialize({startOnLoad:false});" - -# Makes it so that only the command is copied, not the output -copybutton_prompt_text = "$ " - -# prevent css style tags from being copied by the copy button -copybutton_exclude = 'style[type="text/css"]' - -# -- Options for todo extension ---------------------------------------------- - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - -# -- Options for Sphix issues extension -------------------------------------- - -# GitHub repo -issues_github_path = "flyteorg/flyte" - -# equivalent to -issues_uri = "https://github.com/flyteorg/flyte/issues/{issue}" -issues_pr_uri = "https://github.com/flyteorg/flyte/pull/{pr}" -issues_commit_uri = "https://github.com/flyteorg/flyte/commit/{commit}" - -# Usage: -# See issue :issue:`42` -# See issues :issue:`12,13` -# See :issue:`sloria/konch#45`. -# See PR :pr:`58` diff --git a/rsts/deployment/agents/bigquery.rst b/rsts/deployment/agents/bigquery.rst deleted file mode 100644 index 31d4ac6a67..0000000000 --- a/rsts/deployment/agents/bigquery.rst +++ /dev/null @@ -1,106 +0,0 @@ -.. _deployment-agent-setup-bigquery: - -Google BigQuery Agent -====================== - -This guide provides an overview of setting up BigQuery agent in your Flyte deployment. -Please note that the BigQuery agent requires Flyte deployment in the GCP cloud; -it is not compatible with demo/AWS/Azure. - -Set up the GCP Flyte cluster ----------------------------- - -* Ensure you have a functional Flyte cluster running in `GCP `__. -* Create a service account for BigQuery. For more details, refer to: https://cloud.google.com/bigquery/docs/quickstarts/quickstart-client-libraries. -* Verify that you have the correct kubeconfig and have selected the appropriate Kubernetes context. -* Confirm that you have the correct Flytectl configuration at ``~/.flyte/config.yaml``. - -Specify agent configuration ----------------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - Edit the relevant YAML file to specify the agent. - - .. code-block:: bash - - kubectl edit configmap flyte-sandbox-config -n flyte - - .. code-block:: yaml - :emphasize-lines: 7,11,16 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - agent-service - default-for-task-types: - - container: container - - container_array: k8s-array - - bigquery_query_job_task: agent-service - - plugins: - agent-service: - supportedTaskTypes: - - bigquery_query_job_task - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following configuration to it. - - .. code-block:: yaml - - configmap: - enabled_plugins: - # -- Tasks specific configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#GetConfig) - tasks: - # -- Plugins configuration, [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#TaskPluginConfig) - task-plugins: - # -- [Enabled Plugins](https://pkg.go.dev/github.com/flyteorg/flyteplugins/go/tasks/config#Config). Enable sagemaker*, athena if you install the backend - enabled-plugins: - - container - - sidecar - - k8s-array - - agent-service - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - bigquery_query_job_task: agent-service - plugins: - agent-service: - supportedTaskTypes: - - bigquery_query_job_task - -Ensure that the propeller has the correct service account for BigQuery. - -Upgrade the Flyte Helm release ------------------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: bash - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - - and ```` with the name of your namespace (e.g., ``flyte``). - -For bigquery plugin on the Flyte cluster, please refer to `Bigquery Plugin Example `_ diff --git a/rsts/deployment/agents/index.rst b/rsts/deployment/agents/index.rst deleted file mode 100644 index 4bc56f55c5..0000000000 --- a/rsts/deployment/agents/index.rst +++ /dev/null @@ -1,57 +0,0 @@ -.. _deployment-agent-setup: - -Agent Setup -=========== - -.. tags:: Agent, Integration, Data, Advanced - -Discover the process of setting up Agents for Flyte. - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: deployment-agent-setup-bigquery - :type: ref - :text: Bigquery Agent - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the Bigquery agent. - - --- - - .. link-button:: deployment-agent-setup-mmcloud - :type: ref - :text: MMCloud Agent - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the MMCloud agent. - - --- - - .. link-button:: deployment-agent-setup-sensor - :type: ref - :text: Sensor Agent - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the Sensor agent. - - - --- - - .. link-button:: deployment-agent-setup-databricks - :type: ref - :text: Databricks Agent - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the Databricks agent. - -.. toctree:: - :maxdepth: 1 - :name: Agent setup - :hidden: - - bigquery - mmcloud - databricks - sensor diff --git a/rsts/deployment/agents/mmcloud.rst b/rsts/deployment/agents/mmcloud.rst deleted file mode 100644 index e2c2af9813..0000000000 --- a/rsts/deployment/agents/mmcloud.rst +++ /dev/null @@ -1,121 +0,0 @@ -.. _deployment-agent-setup-mmcloud: - -MMCloud Agent -================= - -MemVerge Memory Machine Cloud (MMCloud) empowers users to continuously optimize cloud resources during runtime, -safely execute stateful tasks on spot instances, -and monitor resource usage in real time. -These capabilities make it an excellent fit for long-running batch workloads. - -This guide provides an overview of how to set up MMCloud in your Flyte deployment. - -Set up MMCloud --------------- - -To run a Flyte workflow with Memory Machine Cloud, you will need to deploy Memory Machine Cloud. -Check out the `MMCloud User Guide `_ to get started! - -By the end of this step, you should have deployed an MMCloud OpCenter. - -Spin up a cluster ------------------ - -.. tabs:: - - .. group-tab:: Flyte binary - - You can spin up a demo cluster using the following command: - - .. code-block:: bash - - flytectl demo start - - Or install Flyte using the :ref:`flyte-binary helm chart `. - - .. group-tab:: Flyte core - - If you've installed Flyte using the - `flyte-core helm chart `__, please ensure: - - * You have the correct kubeconfig and have selected the correct Kubernetes context. - * You have configured the correct flytectl settings in ``~/.flyte/config.yaml``. - -.. note:: - - Add the Flyte chart repo to Helm if you're installing via the Helm charts. - - .. code-block:: bash - - helm repo add flyteorg https://flyteorg.github.io/flyte - -Specify agent configuration ----------------------------- - -Enable the MMCloud agent by adding the following config to the relevant YAML file(s): - -.. code-block:: yaml - - tasks: - task-plugins: - enabled-plugins: - - agent-service - default-for-task-types: - - mmcloud_task: agent-service - -.. code-block:: yaml - - plugins: - agent-service: - agents: - mmcloud-agent: - endpoint: - insecure: true - supportedTaskTypes: - - mmcloud_task - agentForTaskTypes: - - mmcloud_task: mmcloud-agent - -Substitute ```` with the endpoint of your MMCloud agent. - -Upgrade the deployment ----------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - .. code-block:: bash - - kubectl rollout restart deployment flyte-sandbox -n flyte - - .. group-tab:: Helm chart - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - and ```` with the name of your namespace (e.g., ``flyte``). - -Wait for the upgrade to complete. You can check the status of the deployment pods by running the following command: - -.. code-block:: - - kubectl get pods -n flyte - -For mmcloud plugin on the Flyte cluster, please refer to `Memory Machine Cloud Plugin Example `_ diff --git a/rsts/deployment/agents/sensor.rst b/rsts/deployment/agents/sensor.rst deleted file mode 100644 index fa886ca608..0000000000 --- a/rsts/deployment/agents/sensor.rst +++ /dev/null @@ -1,149 +0,0 @@ -.. _deployment-agent-setup-sensor: - -Sensor Agent -================= - -Sensor enables users to continuously check for a file or a condition to be met periodically. - -When the condition is met, the sensor will complete. - -This guide provides an overview of how to set up Sensor in your Flyte deployment. - -Spin up a cluster ------------------ - -.. tabs:: - - .. group-tab:: Flyte binary - - You can spin up a demo cluster using the following command: - - .. code-block:: bash - - flytectl demo start - - Or install Flyte using the :ref:`flyte-binary helm chart `. - - .. group-tab:: Flyte core - - If you've installed Flyte using the - `flyte-core helm chart `__, please ensure: - - * You have the correct kubeconfig and have selected the correct Kubernetes context. - * Confirm that you have the correct Flytectl configuration at ``~/.flyte/config.yaml``. - -.. note:: - - Add the Flyte chart repo to Helm if you're installing via the Helm charts. - - .. code-block:: bash - - helm repo add flyteorg https://flyteorg.github.io/flyte - -Specify agent configuration ----------------------------- - -Enable the Sensor agent by adding the following config to the relevant YAML file(s): - -.. tabs:: - - .. group-tab:: Flyte binary - - Edit the relevant YAML file to specify the agent. - - .. code-block:: bash - - kubectl edit configmap flyte-sandbox-config -n flyte - - .. code-block:: yaml - :emphasize-lines: 7,11,16 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - agent-service - default-for-task-types: - - container: container - - container_array: k8s-array - - sensor: agent-service - - plugins: - agent-service: - supportedTaskTypes: - - sensor - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following configuration to it. - - .. code-block:: yaml - - configmap: - enabled_plugins: - # -- Tasks specific configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#GetConfig) - tasks: - # -- Plugins configuration, [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#TaskPluginConfig) - task-plugins: - # -- [Enabled Plugins](https://pkg.go.dev/github.com/flyteorg/flyteplugins/go/tasks/config#Config). Enable sagemaker*, athena if you install the backend - enabled-plugins: - - container - - sidecar - - k8s-array - - agent-service - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - sensor: agent-service - plugins: - agent-service: - supportedTaskTypes: - - sensor - - -Upgrade the deployment ----------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - .. code-block:: bash - - kubectl rollout restart deployment flyte-sandbox -n flyte - - .. group-tab:: Helm chart - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - and ```` with the name of your namespace (e.g., ``flyte``). - -Wait for the upgrade to complete. - -You can check the status of the deployment pods by running the following command: - -.. code-block:: - - kubectl get pods -n flyte - -For sensor plugin on the Flyte cluster, please refer to `Sensor Plugin Example `_ diff --git a/rsts/deployment/configuration/auth_appendix.rst b/rsts/deployment/configuration/auth_appendix.rst deleted file mode 100644 index d8c96df1ee..0000000000 --- a/rsts/deployment/configuration/auth_appendix.rst +++ /dev/null @@ -1,139 +0,0 @@ -.. _deployment-configuration-auth-appendix: - -###################################### -Understanding Authentication -###################################### - -.. tags:: Authentication, Design, Advanced - -.. _deployment-auth-openid-appendix: - -************** -OpenID Connect -************** - -Flyte supports OpenID Connect. A defacto standard for user authentication. After configuring OpenID Connect, users -accessing flyte console or flytectl (or other 3rd party apps) will be prompted to authenticate using the configured -provider. - -.. mermaid:: - :alt: Flyte UI Swimlane - - sequenceDiagram - %%{config: { 'fontFamily': 'Menlo', 'fontSize': 10, 'fontWeight': 100} }%% - autonumber - User->>+Browser: /home - Browser->>+Console: /home - Console->>-Browser: 302 /login - Browser->>+Admin: /login - Admin->>-Browser: Idp.com/oidc - Browser->>+Idp: Idp.com/oidc - Idp->>-Browser: 302 /login - Browser->>-User: Enter user/pass - User->>+Browser: login - Browser->>+Idp: Submit username/pass - Idp->>-Browser: admin/?authCode= - Browser->>+Admin: admin/authCode= - Admin->>+Idp: Exchange Tokens - Idp->>-Admin: idt, at, rt - Admin->>+Browser: Write Cookies & Redirect to /console - Browser->>+Console: /home - Browser->>-User: Render /home - -.. _deployment-auth-oauth2-appendix: - -****** -OAuth2 -****** - -Flyte supports OAuth2 to control access to 3rd party and native apps. FlyteAdmin comes with a built in Authorization -Server that can perform 3-legged and 2-legged OAuth2 flows. It also supports delegating these responsibilities to an -external Authorization Server. - -Service Authentication using OAuth2 -=================================== - -Propeller (and potentially other non-user facing services) can also authenticate using ``client_credentials`` to the IdP and -be granted an ``access_token`` to be used with admin and other backend services. - -FlyteAdmin's builtin Authorization Server ------------------------------------------ - -.. mermaid:: - :alt: Service Authentication Swimlane - - sequenceDiagram - %%{config: { 'fontFamily': 'Menlo', 'fontSize': 10, 'fontWeight': 100} }%% - autonumber - Propeller->>+Admin: /token?client_creds&scope=https://admin/ - Admin->>-Propeller: access_token - Propeller->>+Admin: /list_projects?token=access_token - -External Authorization Server ------------------------------ - -.. mermaid:: - :alt: Service Authentication Swimlane - - sequenceDiagram - %%{config: { 'fontFamily': 'Menlo', 'fontSize': 10, 'fontWeight': 100} }%% - autonumber - Propeller->>+External Authorization Server: /token?client_creds&scope=https://admin/ - External Authorization Server->>-Propeller: access_token - Propeller->>+Admin: /list_projects?token=access_token - -User Authentication in other clients (e.g. Cli) using OAuth2-Pkce -================================================================== - -Users accessing backend services through Cli should be able to use OAuth2-Pkce flow to authenticate (in a browser) to the Idp and be issued -an access_token valid to communicate with the intended backend service on behalf of the user. - -FlyteAdmin's builtin Authorization Server ------------------------------------------ - -.. mermaid:: - :alt: CLI Authentication with Admin's own Authorization Server - - sequenceDiagram - %%{config: { 'fontFamily': 'Menlo', 'fontSize': 10, 'fontWeight': 100} }%% - autonumber - User->>+Cli: flytectl list-projects - Cli->>+Admin: admin/client-config - Admin->>-Cli: Client_id=, ... - Cli->>+Browser: /oauth2/authorize?pkce&code_challenge,client_id,scope - Browser->>+Admin: /oauth2/authorize?pkce... - Admin->>-Browser: 302 idp.com/login - Note over Browser,Admin: The prior OpenID Connect flow - Browser->>+Admin: admin/logged_in - Note over Browser,Admin: Potentially show custom consent screen - Admin->>-Browser: localhost/?authCode= - Browser->>+Cli: localhost/authCode= - Cli->>+Admin: /token?code,code_verifier - Admin->>-Cli: access_token - Cli->>+Admin: /projects/ + access_token - Admin->>-Cli: project1, project2 - -External Authorization Server ------------------------------ - -.. mermaid:: - :alt: CLI Authentication with an external Authorization Server - - sequenceDiagram - %%{config: { 'fontFamily': 'Menlo', 'fontSize': 10, 'fontWeight': 100} }%% - autonumber - User->>+Cli: flytectl list-projects - Cli->>+Admin: admin/client-config - Admin->>-Cli: Client_id=, ... - Cli->>+Browser: /oauth2/authorize?pkce&code_challenge,client_id,scope - Browser->>+ExternalIdp: /oauth2/authorize?pkce... - ExternalIdp->>-Browser: 302 idp.com/login - Note over Browser,ExternalIdp: The prior OpenID Connect flow - Browser->>+ExternalIdp: /logged_in - Note over Browser,ExternalIdp: Potentially show custom consent screen - ExternalIdp->>-Browser: localhost/?authCode= - Browser->>+Cli: localhost/authCode= - Cli->>+ExternalIdp: /token?code,code_verifier - ExternalIdp->>-Cli: access_token - Cli->>+Admin: /projects/ + access_token - Admin->>-Cli: project1, project2 diff --git a/rsts/deployment/configuration/auth_migration.rst b/rsts/deployment/configuration/auth_migration.rst deleted file mode 100644 index c982dca335..0000000000 --- a/rsts/deployment/configuration/auth_migration.rst +++ /dev/null @@ -1,162 +0,0 @@ -.. _deployment-configuration-auth-migration: - -#################################### -Migrating Your Authentication Config -#################################### - -.. tags:: Authentication, Infrastructure, Advanced - -Flyte previously shipped with only a barebones OIDC setup, and relied on an external authorization server. This -migration guide helps you move to Admin's own authorization server. - -Okta Config changes -=================== - -Using Okta as an example, you would have previously seen something like the following: - -* An Application (OpenID Connect Web) for FlyteAdmin itself (e.g. **0oal5rch46pVhCGF45d6**). -* An Application (OpenID Native app) for Flyte-cli/FlyteCTL (e.g. **0oal62nxuD6OSFSRq5d6**). - These two applications would be assigned to the relevant users. -* An Application (Web) for FlytePropeller (e.g. **0abc5rch46pVhCGF9876**). - This application would either use the default Authorization server, or you would create a new one. - -Admin Config Changes -==================== - -After Flyte version `0.13.0 `__, -you can still use the IdP as the Authorization Server if you wish. - -.. tabs:: - - .. group-tab:: < v0.13.0 - - .. code-block:: yaml - - server: - # ... other settings - security: - secure: false - useAuth: true - allowCors: true - allowedOrigins: - - "*" - allowedHeaders: - - "Content-Type" - oauth: - baseUrl: https://dev-62129345.okta.com/oauth2/default/ - scopes: - - profile - - openid - - email - claims: - iss: https://dev-62129345.okta.com/oauth2/default - aud: 0oal5rch46pVhCGF45d6 - clientId: 0oal5rch46pVhCGF45d6 - clientSecretFile: "/Users/ytong/etc/secrets/oauth/secret" - authorizeUrl: "https://dev-62129345.okta.com/oauth2/default/v1/authorize" - tokenUrl: "https://dev-62129345.okta.com/oauth2/default/v1/token" - callbackUrl: "http://localhost:8088/callback" - cookieHashKeyFile: "/Users/ytong/etc/secrets/hashkey/hashkey" - cookieBlockKeyFile: "/Users/ytong/etc/secrets/blockkey/blockkey" - redirectUrl: "/api/v1/projects" - thirdPartyConfig: - flyteClient: - clientId: 0oal62nxuD6OSFSRq5d6 - redirectUri: http://localhost:12345/callback - - .. group-tab:: >= v0.13.0 - - .. code-block:: yaml - - server: - # ... other settings - security: - secure: false - useAuth: true - allowCors: true - allowedOrigins: - - "*" - allowedHeaders: - - "Content-Type" - auth: - authorizedUris: - # This should point at your public http Uri. - - https://flyte.mycompany.com - # This will be used by internal services in the same namespace as flyteadmin - - http://flyteadmin:80 - # This will be used by internal services in the same cluster but different namespaces - - http://flyteadmin.flyte.svc.cluster.local:80 - userAuth: - openId: - # Put the URL of the OpenID Connect provider. - baseUrl: https://dev-62129345.okta.com/oauth2/default # Okta with a custom Authorization Server - scopes: - - profile - - openid - - offline_access # Uncomment if OIdC supports issuing refresh tokens. - # Replace with the client id created for Flyte. - clientId: 0oal5rch46pVhCGF45d6 - appAuth: - # External delegates app auth responsibilities to an external authorization server, Internal means FlyteAdmin does it itself - authServerType: External - thirdPartyConfig: - flyteClient: - clientId: 0oal62nxuD6OSFSRq5d6 - redirectUri: http://localhost:12345/callback - scopes: - - all - - offline - -To summarize the changes between pre-``v0.13.0`` and post-``v0.13.0``: - -* The original **oauth** section has been moved two levels higher into its own section and renamed **auth** but enabling/disabling of authentication remains in the old location. -* Secrets by default will now be looked up in **/etc/secrets**. Use the following command to generate them: - - .. prompt:: bash $ - - flyteadmin secrets init -p /etc/secrets - - This will generate the new cookie hash/block keys, as well as other secrets Admin needs to run the Authorization server. - -* The **clientSecretFile** has been moved to **/etc/secrets/oidc_client_secret** so move that there. -* **claims** has been removed, just delete that. -* **authorizeUrl** and **tokenUrl** are no longer necessary. -* The **baseUrl** for the external Authorization Server is now in the **appAuth** section. -* The **thirdPartyConfig** has been moved to **appAuth** as well. -* **redirectUrl** has been defaulted to **/console**. If that's the value you want, then you no longer need this setting. - -Propeller Config Changes -======================== - -Similarly, there are FlytePropeller config changes to be aware of. - -.. tabs:: - - .. group-tab:: < v0.13.0 - - .. code-block:: yaml - - admin: - endpoint: dns:///mycompany.domain.com - useAuth: true - clientId: flytepropeller - clientSecretLocation: /etc/secrets/client_secret - tokenUrl: https://demo.nuclyde.io/oauth2/token - scopes: - - all - - .. group-tab:: >= v0.13.0 - - .. code-block:: yaml - - admin: - endpoint: dns:///mycompany.domain.com - # If you are using the built-in authorization server, you can delete the following two lines: - clientId: flytepropeller - clientSecretLocation: /etc/secrets/client_secret - -To summarize the changes between pre-``v0.13.0`` and post-``v0.13.0``: - -* **useAuth** is deprecated and will be removed in a future version. Auth requirement will be discovered through an anonymous admin discovery call. -* **tokenUrl** and **scopes** will also be discovered through a metadata call. -* **clientId** and **clientSecretLocation** have defaults that work out of the box with the built-in authorization server (e.g. if you setup Google OpenID Connect). diff --git a/rsts/deployment/configuration/auth_setup.rst b/rsts/deployment/configuration/auth_setup.rst deleted file mode 100644 index 61a6b4c0ae..0000000000 --- a/rsts/deployment/configuration/auth_setup.rst +++ /dev/null @@ -1,790 +0,0 @@ -.. _deployment-configuration-auth-setup: - -####################### -Authenticating in Flyte -####################### - -.. tags:: Authentication, Infrastructure, Advanced - -The Flyte platform consists of multiple components. Securing communication between each component is crucial to ensure -the integrity of the overall system. - -The following diagram summarizes the components and their interactions as part of Flyte's auth implementation: - - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/auth/flyte-auth-arch-v2.png - - -In summary, there are two main resources required for a complete auth flow in Flyte: - -**An identity layer** - -Using an implementation of the `Open ID Connect (OIDC) specification `__, it enables clients to verify the identity of the end user based on the authentication performed by an Authorization Server. For this flow to work, you must first register Flyte as a new client (app) to the Identity Provider (IdP). - -**An authorization server** - -As defined by IETF's `RFC #6749 `__, the authorization server's role is to issue *access tokens to the client after successfully authenticating the resource owner and obtaining authorization*. In this context, the *resource owner* is the end user of Flyte; and the *client* is the tool or component that intends to interact with ``flyteadmin`` : ``flytepropeller``, ``flyteconsole`` or any of the CLI tools. - -There are two supported options to use an authorization server in Flyte: - - * **Internal authorization server**: It comes pre-installed with Flyte and it is a suitable choice for quick start and testing purposes. - * **External (custom) authorization server**: This a service provided by one of the supported IdPs and is the recommended option if your organization needs to retain control over scope definitions and grants, token expiration policies and other advanced security controls. - - -.. note:: - - Regardless of the type of authorization server to use, you will still need an IdP to provide identity through OIDC. - - -.. _auth-setup: - -******************** -Authentication Setup -******************** - -Prerequisites -============= - - -The following is required for non-sandbox (non ``flytectl demo``) deployments: - - -* A public domain name (e.g. example.foobar.com) -* Routing of traffic from that domain name to the Kubernetes Flyte Ingress IP address - -.. note:: - - Checkout this `community-maintained guide `__ for more information about setting up Flyte in production, including Ingress. - - -Identity Providers Support -========================== - -Flyte supports OAuth2 and OpenId Connect to secure the various connections: - -* :ref:`OpenID Connect `: used to secure user's authentication to ``flyteadmin`` through the UI. -* :ref:`OAuth2 `: used to secure connections from clients (i.e. ``pyflyte``, ``flytectl`` and - ``flytepropeller``) to the ``flyteadmin`` service. - -Support for these protocols varies per IdP. Checkout the following table to understand the available support level for -your IdP: - -+----------------------+--------+-------------+---------------------+----------+-------+----------+--------+ -| Feature | Okta | Google free | GCP Identity Service| Azure AD | Auth0 | KeyCloak | Github | -+======================+========+=============+=====================+==========+=======+==========+========+ -| OpenID Connect (OIDC)| Yes | Yes | Yes | Yes | Yes | Yes | No | -+----------------------+--------+-------------+---------------------+----------+-------+----------+--------+ -| Custom Auth Server | Yes | No | Yes | Yes | ? | Yes | No | -+----------------------+--------+-------------+---------------------+----------+-------+----------+--------+ - - -Identity Management layer : OIDC -=================================== - -In this section, you can find canonical examples of how to set up OIDC on some of the supported IdPs; enabling users to authenticate in the -browser. - -.. tabs:: - - .. group-tab:: Google - - - - - Create an OAuth2 Client Credential following the `official documentation `__ and take note of the ``client_id`` and ``client_secret`` - - - In the **Authorized redirect URIs** field, add ``http://localhost:30081/callback`` for **sandbox** deployments, or ``https:///callback`` for other methods of deployment. - - - .. group-tab:: Okta - - - 1. If you don't already have an Okta account, sign up for one `here `__. - 2. Create an app integration, with `OIDC - OpenID Connect` as the sign-on method and `Web Application` as the app type. - 3. Add sign-in redirect URIs: - - - ``http://localhost:30081/callback`` for sandbox or ``https:///callback`` for other Flyte deployment types. - - 4. *Optional* - Add logout redirect URIs: - - - ``http://localhost:30081/logout`` for sandbox, ``https:///callback`` for other Flyte deployment types). - - 5. Take note of the Client ID and Client Secret - - .. group-tab:: Keycloak - - - 1. If you don't have a Keycloak installation, you can use `this `__ which provides a quick way to deploy Keycloak cluster on AWS. - 2. Create a realm using the `admin console `__ - 3. Create an OIDC client with client secret and note them down. Use the following `instructions `__ - 4. Add Login redirect URIs: - - - ``http://localhost:30081/callback`` for sandbox or ``https:///callback`` for other Flyte deployment types. - - .. group-tab:: Microsoft Azure AD - - 1. From the Azure homepage go to **Azure Active Directory** - 2. From the **Ovierview** page, take note of the **Tenant ID** - 3. Go to **App registrations** - 4. Create a **New registration** - 5. Give it a descriptive name - 6. For the **Supported account types** select the option that matches your organization's security policy - 7. In the **Redirect URI** section select: - - - **Web** platform - - Add ``http://localhost:30081/callback`` for sandbox or ``https:///callback`` for other Flyte deployment types - - 9. Click on **Register** - 10. Once created, click on the registered app and go to the **Certificates and secrets** section - 11. Go to **Client secrets** and create a **New client secret** - 12. Enter a description and an expiration policy - 13. Take note of the secret **Value** as it will be used in the Helm chart - - For further reference, check out the official `Azure AD Docs `__ on how to configure the IdP for OpenIDConnect. - - .. note:: - - Make sure the app is registered without `additional claims `__. - The OpenIDConnect authentication will not work otherwise, please refer to this `GitHub Issue `__ and `Azure AD Docs `__ for more information. - - -Apply OIDC Configuration -=========================== - -.. tabs:: - - .. group-tab:: flyte-binary - - - 1. Generate a random password to be used internally by ``flytepropeller`` - - 2. Use the following command to generate a bcrypt hash for that password: - - .. prompt:: bash $ - - pip install bcrypt && python -c 'import bcrypt; import base64; print(base64.b64encode(bcrypt.hashpw("".encode("utf-8"), bcrypt.gensalt(6))))' - - 3. Go to your values file and locate the ``auth`` section and replace values accordingly: - - .. code-block:: yaml - - auth: - enabled: true - oidc: - # baseUrl: https://accounts.google.com # Uncomment for Google - # baseUrl: https:///auth/realms/ # Uncomment for Keycloak and update with your installation host and realm name - # baseUrl: https://login.microsoftonline.com//oauth2/v2.0/authorize # Uncomment for Azure AD - # For Okta use the Issuer URI from Okta's default auth server - baseUrl: https://dev-.okta.com/oauth2/default - # Replace with the client ID and secret created for Flyte in your IdP - clientId: - clientSecret: - internal: - clientSecret: '' - # Use the output of step #2 (only the content inside of '') - - clientSecretHash: - - authorizedUris: - - https:// - - 4. Save your changes - 5. Upgrade your Helm release with the new values: - - .. prompt:: bash $ - - helm upgrade flyteorg/flyte-binary -n --values .yaml - - Where: - - * ```` is the name of your Helm release, typically ``flyte-backend``. You can find it using ``helm ls -n `` - - - 6. Verify that your Flyte deployment now requires successful login to your IdP to access the UI (``https:///console``) - - 7. For ``flytectl`` / ``pyflyte``, make sure that your local config file (``$HOME/.flyte/config.yaml``) includes the following option: - - .. code-block:: yaml - - admin: - ... - authType: Pkce #change from the default `clientCred` to enable client auth without using shared secrets - ... - - - .. group-tab:: flyte-core - - 1. Generate a random password to be used internally by flytepropeller - 2. Use the following command to generate a bcrypt hash for that password: - - .. prompt:: bash $ - - pip install bcrypt && python -c 'import bcrypt; import base64; print(base64.b64encode(bcrypt.hashpw("".encode("utf-8"), bcrypt.gensalt(6))))' - - Take note of the output (only the contents inside `''`) - - 3. Store the ``client_secret`` provided by your IdP in a Kubernetes secret as follows: - - .. prompt:: bash $ - - kubectl edit secret -n flyte-admin-secrets - - Where ``flyte-namespace`` is the Kubernetes namespace where you have installed Flyte. - - 4. Add a new key under ``stringData``: - - .. code-block:: yaml - - apiVersion: v1 - # Add from here - stringData: - oidc_client_secret: - # End here - data: - ... - - 5. Save and close your editor. - - 6. Go to your Helm values file and verify that the ``configmap`` section include the following, replacing the content where indicated: - - .. code-block:: yaml - - configmap: - adminServer: - server: - httpPort: 8088 - grpcPort: 8089 - security: - secure: false - useAuth: true - allowCors: true - allowedOrigins: - # Accepting all domains for Sandbox installation - - "*" - allowedHeaders: - - "Content-Type" - auth: - appAuth: - thirdPartyConfig: - flyteClient: - clientId: flytectl - redirectUri: http://localhost:53593/callback - scopes: - - offline - - all - selfAuthServer: - staticClients: - flyte-cli: - id: flyte-cli - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - grant_types: - - refresh_token - - authorization_code - response_types: - - code - - token - scopes: - - all - - offline - - access_token - public: true - flytectl: - id: flytectl - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - grant_types: - - refresh_token - - authorization_code - response_types: - - code - - token - scopes: - - all - - offline - - access_token - public: true - flytepropeller: - id: flytepropeller - # Use the bcrypt hash generated for your random password - client_secret: "" - redirect_uris: - - http://localhost:3846/callback - grant_types: - - refresh_token - - client_credentials - response_types: - - token - scopes: - - all - - offline - - access_token - public: false - - authorizedUris: - # Use the public URL of flyteadmin (a DNS record pointing to your Ingress resource) - - https:// - - http://flyteadmin:80 - - http://flyteadmin.flyte.svc.cluster.local:80 - userAuth: - openId: - # baseUrl: https://accounts.google.com # Uncomment for Google - # baseUrl: https://login.microsoftonline.com//oauth2/v2.0/authorize # Uncomment for Azure AD - # For Okta, use the Issuer URI of the default auth server - baseUrl: https://dev-.okta.com/oauth2/default - # Use the client ID generated by your IdP - clientId: - scopes: - - profile - - openid - - 7. Additionally, outside the ``configmap`` section, add the following block and replace the necessary information: - - .. code-block:: yaml - - secrets: - adminOauthClientCredentials: - # -- If enabled is true, helm will create and manage `flyte-secret-auth` and populate it with `clientSecret`. - # If enabled is false, it's up to the user to create `flyte-secret-auth` - enabled: true - # Use the non-encoded version of the random password - clientSecret: "" - clientId: flytepropeller - - 8. Save and exit your editor. - - 9. Restart ``flyteadmin`` for the changes to take effect: - - .. prompt:: bash $ - - kubectl rollout restart deployment/flyteadmin -n flyte - - 10. Restart ``flytepropeller`` to start using authenticated requests: - - .. prompt:: bash $ - - kubectl rollout restart deployment/flytepropeller -n flyte - - 11. Restart ``flytescheduler``` to start using authenticated requests: - - .. prompt:: bash $ - - kubectl rollout restart deployment/flytescheduler -n flyte - - 12. For flytectl/pyflyte, make sure that your local config file (``$HOME/.flyte/config.yaml``) includes the following option: - - .. code-block:: yaml - - admin: - ... - authType: Pkce #change from the default `clientCred` to enable client auth without using shared secrets - ... - -.. note:: - - **Congratulations!** - - It should now be possible to go to Flyte UI and be prompted for authentication. Flytectl should automatically pickup the change and start prompting for authentication as well. - If you want to use an external OAuth2 provider for App authentication, please continue reading into the next section. - -*************************** -Custom Authorization Server -*************************** - - -As mentioned previously, Flyte ships with an internal authorization server; hence setting up an external Authorization Server is optional and dependent on your organization's security requirements. - -In this section, you will find instructions on how to setup an OAuth2 Authorization Server in the different IdPs supported by Flyte: - -.. note:: - - **Google IdP** - - Google IdP does not offer an OAuth2 Authorization Server that could be used to protect external services (For example Flyte). In this case, Google offers a separate Cloud Product called Google Cloud Identity. - Configuration for Cloud Identity is not included in this guide. If unavailable, setup can stop here and FlyteAdmin BuiltIn OAuth2 Authorization Server can be used instead. - -.. tabs:: - - .. group-tab:: Okta - - Okta's custom authorization servers are available through an add-on license. The free developer accounts do include access, which you can use to test before rolling out the configuration more broadly. - - 1. From the left-hand menu, go to **Security** > **API** - 2. Click on **Add Authorization Server**. - 3. Assign an informative name and set the audience to the public URL of FlyteAdmin (e.g. https://example.foobar.com). - - .. note:: - - The audience must exactly match one of the URIs in the ``authorizedUris`` section above - - 4. Note down the **Issuer URI**; this will be used for all the ``baseUrl`` settings in the Flyte config. - 5. Go to **Scopes** and click **Add Scope**. - 6. Set the name to ``all`` (required) and check ``Required`` under the **User consent** option. - 7. Uncheck the **Block services from requesting this scope** option and save your changes. - 8. Add another scope, named ``offline``. Check both the **Required** and **Include in public metadata** options. - 9. Uncheck the **Block services from requesting this scope** option. - 10. Click **Save**. - 11. Go to **Access Policies**, click **Add New Access Policy**. Enter a name and description and enable **Assign to** - ``All clients``. - 12. Add a rule to the policy with the default settings (you can fine-tune these later). - 13. Navigate back to the **Applications** section. - 14. Create an integration for ``flytectl``; it should be created with the **OIDC - OpenID Connect** sign-on method, and the **Native Application** type. - 15. Add ``http://localhost:53593/callback`` to the sign-in redirect URIs. The other options can remain as default. - 16. Assign this integration to any Okta users or groups who should be able to use the ``flytectl`` tool. - 17. Note down the **Client ID**; there will not be a secret. - 18. Create an integration for ``flytepropeller``; it should be created with the **OIDC - OpenID Connect** sign-on method and **Web Application** type. - 19. Check the ``Client Credentials`` option under **Client acting on behalf of itself**. - 20. This app does not need a specific redirect URI; nor does it need to be assigned to any users. - 21. Note down the **Client ID** and **Client secret**; you will need these later. - 22. Take note of the **Issuer URI** for your Authorization Server. It will be used as the baseURL parameter in the Helm chart - - You should have three integrations total - one for the web interface (``flyteconsole``), one for ``flytectl``, and one for ``flytepropeller``. - - .. group-tab:: Keycloak - - - 1. If you don't have a Keycloak installation, you can use `this `__ which provides quick way to deploy Keycloak cluster on AWS. - 2. Create a realm in keycloak installation using its `admin console `__ - 3. Under `Client Scopes`, click `Add Create` inside the admin console. - 4. Create two clients (for `flytectl` and `flytepropeller`) to enable these clients to communicate with the service. - 5. `flytectl` should be created with `Access Type Public` and standard flow enabled. - 6. `flytePropeller` should be created as an `Access Type Confidential`, enabling the standard flow - 7. Take note of the client ID and client Secrets provided. - - .. group-tab:: Azure AD - - 1. Navigate to tab **Overview**, obtain ```` and ```` - 2. Navigate to tab **Authentication**, click ``+Add a platform`` - 3. Add **Web** for flyteconsole and flytepropeller, **Mobile and desktop applications** for flytectl. - 4. Add URL ``https:///callback`` as the callback for Web - 5. Add URL ``http://localhost:53593/callback`` as the callback for flytectl - 6. In **Advanced settings**, set ``Enable the following mobile and desktop flows`` to **Yes** to enable deviceflow - 7. Navigate to tab **Certificates & secrets**, click ``+New client secret`` to create ```` - 8. Navigate to tab **Token configuration**, click ``+Add optional claim`` and create email claims for both ID and Access Token - 9. Navigate to tab **API permissions**, add ``email``, ``offline_access``, ``openid``, ``profile``, ``User.Read`` - 10. Navigate to tab **Expose an API**, Click ``+Add a scope`` and ``+Add a client application`` to create ```` - - -Apply external auth server configuration -======================================== - -Follow the steps in this section to configure `flyteadmin` to use an external auth server. This section assumes that you have already completed and applied the configuration for the OIDC Identity Layer. - -.. tabs:: - - .. group-tab:: flyte-binary - - 1. Go to the values YAML file you used to install Flyte using a Helm chart - 2. Find the ``auth`` section and follow the inline comments to insert your configuration: - - .. code-block:: yaml - - auth: - enabled: true - oidc: - # baseUrl: https:///auth/realms/ # Uncomment for Keycloak and update with your installation host and realm name - # baseUrl: https://login.microsoftonline.com//oauth2/v2.0/authorize # Uncomment for Azure AD - # For Okta, use the Issuer URI of the custom auth server: - baseUrl: https://dev-.okta.com/oauth2/ - # Use the client ID and secret generated by your IdP for the first OIDC registration in the "Identity Management layer : OIDC" section of this guide - clientId: - clientSecret: - internal: - # Use the clientID generated by your IdP for the flytepropeller app registration - clientId: - #Use the secret generated by your IdP for flytepropeller - clientSecret: '' - # Use the bcrypt hash for the clientSecret - clientSecretHash: <-flytepropeller-secret-bcrypt-hash> - authorizedUris: - # Use here the exact same value used for 'audience' when the Authorization server was configured - - https:// - - - 3. Find the ``inline`` section of the values file and add the following content, replacing where needed: - - .. code-block:: yaml - - inline: - auth: - appAuth: - authServerType: External - externalAuthServer: - # baseUrl: https:///auth/realms/ # Uncomment for Keycloak and update with your installation host and realm name - # baseUrl: https://login.microsoftonline.com//oauth2/v2.0/authorize # Uncomment for Azure AD - # For Okta, use the Issuer URI of the custom auth server: - baseUrl: https://dev-.okta.com/oauth2/ - metadataUrl: .well-known/oauth-authorization-server - thirdPartyConfig: - flyteClient: - # Use the clientID generated by your IdP for the `flytectl` app registration - clientId: - redirectUri: http://localhost:53593/callback - scopes: - - offline - - all - userAuth: - openId: - # baseUrl: https:///auth/realms/ # Uncomment for Keycloak and update with your installation host and realm name - # baseUrl: https://login.microsoftonline.com//oauth2/v2.0/authorize # Uncomment for Azure AD - # For Okta, use the Issuer URI of the custom auth server: - baseUrl: https://dev-.okta.com/oauth2/ - scopes: - - profile - - openid - # - offline_access # Uncomment if your IdP supports issuing refresh tokens (optional) - # Use the client ID and secret generated by your IdP for the first OIDC registration in the "Identity Management layer : OIDC" section of this guide - clientId: - - - 4. Save your changes - 5. Upgrade your Helm release with the new configuration: - - .. prompt:: bash $ - - helm upgrade flyteorg/flyte-core -n --values .yaml - - - .. group-tab:: flyte-core - - - 1. Find the ``auth`` section in your Helm values file, and replace the necessary data: - - .. note:: - - If you were previously using the internal auth server, make sure to delete all the ``selfAuthServer`` section from your values file - - .. code-block:: yaml - - configmap: - auth: - appAuth: - - authServerType: External - - # 2. Optional: Set external auth server baseUrl if different from OpenId baseUrl. - externalAuthServer: - # baseUrl: https:///auth/realms/ # Uncomment for Keycloak and update with your installation host and realm name - # baseUrl: https://login.microsoftonline.com//oauth2/v2.0/authorize # Uncomment for Azure AD - # For Okta, use the Issuer URI of the custom auth server: - baseUrl: https://dev-.okta.com/oauth2/ - - metadataUrl: .well-known/openid-configuration - - thirdPartyConfig: - flyteClient: - # 3. Replace with a new Native/Public Client ID provisioned in the custom authorization server. - clientId: flytectl - # This should not change - redirectUri: http://localhost:53593/callback - # 4. "all" is a required scope and must be configured in the custom authorization server. - scopes: - - offline - - all - - userAuth: - openId: - # baseUrl: https:///auth/realms/ # Uncomment for Keycloak and update with your installation host and realm name - # baseUrl: https://login.microsoftonline.com//oauth2/v2.0/authorize # Uncomment for Azure AD - # For Okta, use the Issuer URI of the custom auth server: - baseUrl: https://dev-.okta.com/oauth2/ - scopes: - - profile - - openid - # - offline_access # Uncomment if OIdC supports issuing refresh tokens. - clientId: - - - secrets: - adminOauthClientCredentials: - enabled: true # see the section "Disable Helm secret management" if you require to do so - # Replace with the client_secret provided by your IdP for flytepropeller. - clientSecret: - # Replace with the client_id provided by provided by your IdP for flytepropeller. - clientId: - - 2. Save your changes - 3. Upgrade your Helm release with the new configuration: - - .. prompt:: bash $ - - helm upgrade flyteorg/flyte-core -n --values .yaml - - .. group-tab:: flyte-core with Azure AD - - .. code-block:: yaml - - secrets: - adminOauthClientCredentials: - enabled: true - clientSecret: - clientId: - --- - configmap: - admin: - admin: - endpoint: - insecure: true - clientId: - clientSecretLocation: /etc/secrets/client_secret - scopes: - - api:///.default - useAudienceFromAdmin: true - --- - auth: - appAuth: - authServerType: External - externalAuthServer: - baseUrl: https://login.microsoftonline.com//v2.0/ - metadataUrl: .well-known/openid-configuration - AllowedAudience: - - api:// - thirdPartyConfig: - flyteClient: - clientId: - redirectUri: http://localhost:53593/callback - scopes: - - api:/// - - userAuth: - openId: - baseUrl: https://login.microsoftonline.com//v2.0 - scopes: - - openid - - profile - clientId: - -.. note:: - - **Congratulations** - - At this point, every interaction with Flyte components -be it in the UI or CLI- should require a successful login to your IdP, where your security policies are maintained and enforced. - - -Disable Helm secret management ------------------------------- - -Alternatively, you can instruct Helm not to create and manage the secret for ``flytepropeller``. In that case, you'll have to create it following these steps: - - -1. Disable Helm secrets management in your values file - -.. code-block:: yaml - - secrets: - adminOauthClientCredentials: - enabled: false #set to false - # Replace with the client_id provided by provided by your IdP for flytepropeller. - clientId: - -2. Create a secret declaratively: - -.. code-block:: yaml - - apiVersion: v1 - kind: Secret - metadata: - name: flyte-secret-auth - namespace: flyte - type: Opaque - stringData: - # Replace with the client_secret provided by your IdP for flytepropeller. - client_secret: - - - -Continuous Integration - CI ---------------------------- - -If your organization does any automated registration, then you'll need to authenticate with the `client credentials `_ flow. After retrieving an access token from the IDP, you can send it along to `flyteadmin`` as usual. - -.. tabs:: - - .. group-tab:: flytectl - - Flytectl's `config.yaml `_ can be - configured to use either PKCE (`Proof key for code exchange `_) - or Client Credentials (`Client Credentials `_) flows. - - 1. Update ``config.yaml`` as follows: - - .. code-block:: yaml - - admin: - # Update with the Flyte's ingress endpoint (e.g. flyteIngressIP for sandbox or example.foobar.com) - # You must keep the 3 forward-slashes after dns: - endpoint: dns:/// - - # Update auth type to `Pkce` or `ClientSecret` - authType: Pkce - - # Set to the clientId (will be used for both Pkce and ClientSecret flows) - # Leave empty to use the value discovered through flyteAdmin's Auth discovery endpoint. - clientId: - - # Set to the location where the client secret is mounted. - # Only needed/used for `ClientSecret` flow. - clientSecretLocation: - - # If required, set the scopes needed here. Otherwise, flytectl will discover scopes required for OpenID - # Connect through flyteAdmin's Auth discovery endpoint. - # scopes: [ "scope1", "scope2" ] - - To read further about the available config options, please - `visit here `_ - - .. group-tab:: Flytekit / pyflyte - - Flytekit configuration variables are automatically designed to look up values from relevant environment variables. - - .. important:: - - However, to aid with continuous integration use-cases, Flytekit configuration can also reference other environment - variables. - - For instance, if your CI system is not capable of setting custom environment variables like - ``FLYTE_CREDENTIALS_CLIENT_SECRET`` but does set the necessary settings under a different variable, you may use - ``export FLYTE_CREDENTIALS_CLIENT_SECRET_FROM_ENV_VAR=OTHER_ENV_VARIABLE`` to redirect the lookup. A - ``FLYTE_CREDENTIALS_CLIENT_SECRET_FROM_FILE`` redirect is available as well, where the value should be the full - path to the file containing the value for the configuration setting, in this case, the client secret. We found - this redirect behavior necessary when setting up registration within our own CI pipelines. - - The following is a listing of the Flytekit configuration values we set in CI, along with a brief explanation. - - .. code-block:: bash - - # When using OAuth2 service auth, this is the username and password. - export FLYTE_CREDENTIALS_CLIENT_ID= - export FLYTE_CREDENTIALS_CLIENT_SECRET= - - # This tells the SDK to use basic authentication. If not set, Flytekit will assume you want to use the - # standard OAuth based three-legged flow. - export FLYTE_CREDENTIALS_AUTH_MODE=basic - - # This value should be set to conform to this - # `header config `_ - # on the Admin side. - export FLYTE_CREDENTIALS_AUTHORIZATION_METADATA_KEY=
- - # When using basic authentication, you'll need to specify a scope to the IDP (instead of ``openid``, which is - # only for OAuth). Set that here. - export FLYTE_CREDENTIALS_OAUTH_SCOPES= - - # Set this to force Flytekit to use authentication, even if not required by Admin. This is useful as you're - # rolling out the requirement. - export FLYTE_PLATFORM_AUTH=True - -.. _auth-references: - -********** -References -********** - -This collection of RFCs may be helpful to those who wish to investigate the implementation in more depth. - -* `OAuth2 RFC 6749 `_ -* `OAuth Discovery RFC 8414 `_ -* `PKCE RFC 7636 `_ -* `JWT RFC 7519 `_ - -There's also more detailed information about the authentication flows in the :ref:`deployment-configuration-auth-appendix`. diff --git a/rsts/deployment/configuration/cloud_event.rst b/rsts/deployment/configuration/cloud_event.rst deleted file mode 100644 index 5fa526cbc1..0000000000 --- a/rsts/deployment/configuration/cloud_event.rst +++ /dev/null @@ -1,159 +0,0 @@ -.. _deployment-configuration-cloud-event: - -############ -Cloud Events -############ -# gatepr: this doc needs to be updated -.. tags:: Infrastructure, AWS, GCP, Advanced - -Progress of Flyte workflow and task execution is delimited by a series of -events that are passed from the FlytePropeller to FlyteAdmin. Administrators -can configure FlyteAdmin to send these `cloud events `_ onwards to a pub/sub system like -SNS/SQS as well. Note that this configuration is distinct from the -configuration for notifications :ref:`deployment-configuration-notifications`, -and :ref:`deployment-configuration-eventing`. -They should use separate topics/queues. These events are meant for external -consumption, outside the Flyte platform. - -********* -Use cases -********* - -CloudEvents is a specification for describing event data in common formats -to provide interoperability across services, platforms and systems. - -The external events flow can be useful for tracking data lineage and -integrating with existing systems within your organization. - -************************* -Supported Implementations -************************* - -Event egress can be configured to work with **AWS** using -`SQS `_ and -`SNS `_, -**GCP** `Cloud Pub/Sub `_, or -`Apache Kafka `_ - -************* -Configuration -************* - -To turn on, add the following to your FlyteAdmin: - -.. tabs:: - - .. tab:: AWS SNS - - .. code:: yaml - - cloud_events.yaml: | - cloudEvents: - enable: true - aws: - region: us-east-2 - eventsPublisher: - eventTypes: - - all # or node, task, workflow - topicName: arn:aws:sns:us-east-2:123456:123-my-topic - type: aws - - .. tab:: GCP Pub/Sub - - .. code:: yaml - - cloud_events.yaml: | - cloudEvents: - enable: true - gcp: - region: us-east-2 - eventsPublisher: - eventTypes: - - all # or node, task, workflow - topicName: my-topic - type: gcp - - .. tab:: Apache Kafka - - .. code:: yaml - - cloud_events.yaml: | - cloudEvents: - enable: true - kafka: - brokers: 127.0.0.1:9092 - eventsPublisher: - eventTypes: - - all - topicName: myTopic - type: kafka - -Helm -====== -There should already be a section for this in the ``values.yaml`` file. Update -the settings under the ``cloud_events`` key and turn ``enable`` to ``true``. -The same flag is used for Helm as for Admin itself. - -***** -Usage -***** - -Version 1 -========= -The events are emitted in cloud Event format, and the data in the cloud event -will be base64 encoded binary representation of the following IDL messages: - -* ``admin_event_pb2.TaskExecutionEventRequest`` -* ``admin_event_pb2.NodeExecutionEventRequest`` -* ``admin_event_pb2.WorkflowExecutionEventRequest`` - -Which of these three events is being sent can be distinguished by the subject -line of the message, which will be one of the three strings above. - -Note that these message wrap the underlying event messages -:std:doc:`found here `. - -CloudEvent Spec ---------------- - -.. code:: json - - { - "specversion" : "1.0", - "type" : "com.flyte.resource.workflow", - "source" : "https://github.com/flyteorg/flyteadmin", - "id" : "D234-1234-1234", - "time" : "2018-04-05T17:31:00Z", - "jsonschemaurl": "https://github.com/flyteorg/flyteidl/blob/master/jsonschema/workflow_execution.json", - "data" : "workflow execution event" - } - -.. note:: - The message format may eventually change to an enriched and distinct message type in future releases. - -Version 2 -========= -These events are also in the cloud event spec, but there will be considerably more information in the event that users may find useful. Keep in mind however that turning on these events will induce a heavier load on the database as information is queried. - -The event types may be found in the following IDL messages (Python example given): - -* ``flyteidl.event.cloudevents_pb2.CloudEventWorkflowExecution`` -* ``flyteidl.event.cloudevents_pb2.CloudEventTaskExecution`` -* ``flyteidl.event.cloudevents_pb2.CloudEventNodeExecution`` -* ``flyteidl.event.cloudevents_pb2.CloudEventExecutionStart`` - -CloudEvent Spec ---------------- -The specification for these v2 events is similar, except that the jsonschemaurl will be blank for now. We are working on making these auto-generated from the IDL. - -.. code:: json - - { - "specversion" : "1.0", - "type" : "com.flyte.resource.cloudevents.WorkflowExecution", - "source" : "https://github.com/flyteorg/flyteadmin", - "id" : "D234-1234-1234", - "time" : "2018-04-05T17:31:00Z", - "jsonschemaurl": "", - "data" : (bytes of the underlying event) - } diff --git a/rsts/deployment/configuration/customizable_resources.rst b/rsts/deployment/configuration/customizable_resources.rst deleted file mode 100644 index a447c38f73..0000000000 --- a/rsts/deployment/configuration/customizable_resources.rst +++ /dev/null @@ -1,202 +0,0 @@ -.. _deployment-configuration-customizable-resources: - -################################# -Adding New Customizable Resources -################################# - -.. tags:: Infrastructure, Advanced - -As a quick refresher, custom resources allow you to manage configurations for specific combinations of user projects, domains and workflows that override default values. -Examples of such resources include execution clusters, task resource defaults, and :std:ref:`more `. - -.. note:: - For background on customizable resources, refer to :ref:`deployment-configuration-general`. - -In a :ref:`multi-cluster setup `, an example one could think of is setting routing rules to send certain workflows to specific clusters, which demands setting up custom resources. - -Here's how you could go about building a customizable priority designation. - -Example -------- - -Let's say you want to inject a default priority annotation for your workflows. -Perhaps you start off with a model where everything has a default priority but soon you realize it makes sense that workflows in your production domain should take higher priority than those in your development domain. - -Now, one of your user teams requires critical workflows to have a higher priority than other production workflows. - -Here's how you could do that. - -Flyte IDL -^^^^^^^^^ - -Introduce a new :std:ref:`matchable resource ` that includes a unique enum value and proto message definition. - -For example: - -:: - - enum MatchableResource { - ... - WORKFLOW_PRIORITY = 10; - } - - message WorkflowPriorityAttribute { - int priority = 1; - } - - message MatchingAttributes { - oneof target { - ... - WorkflowPriorityAttribute WorkflowPriority = 11; - } - } - - -See the changes in this `file `__ for an example of what is required. - - -FlyteAdmin -^^^^^^^^^^ - -Once your IDL changes are released, update the logic of FlyteAdmin to `fetch `__ your new matchable priority resource and use it while creating executions or in relevant use cases. - -For example: - -:: - - - resource, err := s.resourceManager.GetResource(ctx, managerInterfaces.ResourceRequest{ - Domain: domain, - Project: project, // optional - Workflow: workflow, // optional, must include project when specifying workflow - LaunchPlan: launchPlan, // optional, must include project + workflow when specifying launch plan - ResourceType: admin.MatchableResource_WORKFLOW_PRIORITY, - }) - - if err != nil { - return err - } - - if resource != nil && resource.Attributes != nil && resource.Attributes.GetWorkflowPriority() != nil { - priorityValue := resource.Attributes.GetWorkflowPriority().GetPriority() - // do something with the priority here - } - - -Flytekit -^^^^^^^^ - -For convenience, add a FlyteCTL wrapper to update the new attributes. Refer to `this PR `__ for the entire set of changes required. - -That's it! You now have a new matchable attribute to configure as the needs of your users evolve. - -Flyte ResourceManager ---------------------- - -**Flyte ResourceManager** is a configurable component that allows plugins to manage resource allocations independently. It helps track resource utilization of tasks that run on Flyte. The default deployments are configured as ``noop``, which indicates that the ResourceManager provided by Flyte is disabled and plugins rely on each independent platform to manage resource utilization. In situations like the K8s plugin, where the platform has a robust mechanism to manage resource scheduling, this may work well. However, in a scenario like a simple web API plugin, the rate at which Flyte sends requests may overwhelm a service and benefit from additional resource management. - -The below attribute is configurable within FlytePropeller, which can be disabled with: - -.. code-block:: yaml - - resourcemanager: - type: noop - -The ResourceManager provides a task-type-specific pooling system for Flyte tasks. Optionally, plugin writers can request resource allocation in their tasks. - -A plugin defines a collection of resource pools using its configuration. Flyte uses tokens as a placeholder to represent a unit of resource. - -How does a Flyte plugin request for resources? -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The Flyte plugin registers the resource and the desired quota of every resource with the **ResourceRegistrar** when setting up FlytePropeller. When a plugin is invoked, FlytePropeller provides a proxy for the plugin. This proxy facilitates the plugin's view of the resource pool by controlling operations to allocate and deallocate resources. - -.. dropdown:: :fa:`info-circle` Enabling Redis instance - :title: text-muted - :animate: fade-in-slide-down - - The ResourceManager can use a Redis instance as an external store to track and manage resource pool allocation. By default, it is disabled, and can be enabled with: - - .. code-block:: yaml - - resourcemanager: - type: redis - resourceMaxQuota: 100 - redis: - hostPaths: - - foo - hostKey: bar - maxRetries: 0 - -Once the setup is complete, FlytePropeller builds a ResourceManager based on the previously requested resource registration. Based on the plugin implementation's logic, resources are allocated and deallocated. - -During runtime, the ResourceManager: - -#. Allocates tokens to the plugin. -#. Releases tokens once the task is completed. - -How are resources allocated? -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -When a Flyte task execution needs to send a request to an external service, the plugin claims a unit of the corresponding resource. This is done using a **ResourceName**, which is a unique token and a fully qualified resource request (which is typically an integer). The execution generates this unique token and registers this token with the ResourceManager by calling the ResourceManager’s **"AllocateResource function"**. If the resource pool has sufficient capacity to fulfil your request, then the resources requested are allocated, and the plugin proceeds further. - -When the status is **"AllocationGranted"**, the execution moves forward and sends out the request for those resources. - -The granted token is recorded in a token pool which corresponds to the resource that is managed by the ResourceManager. - -How are resources deallocated? -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -When the request is completed, the plugin asks the ResourceManager to release the token by calling the ReleaseResource() function present in the ResourceManager. Upon calling the function, the token is eliminated from the token pool. -In this manner, Flyte plugins intelligently throttle resource usage during parallel execution of nodes. - -Example -^^^^^^^^ -Let's take an example to understand resource allocation and deallocation when a plugin requests resources. - -Flyte has a built-in `Qubole `__ plugin. This plugin allows Flyte tasks to send Hive commands to Qubole. In the plugin, a single Qubole cluster is considered a resource, and sending a single Hive command to a Qubole cluster consumes a token of the corresponding resource. -The resource is allocated when the status is **“AllocationGranted”**. Qubole plugin calls: - -.. code-block:: go - - status, err := AllocateResource(ctx, , , ) - -Wherein the placeholders are occupied by: - -.. code-block:: go - - status, err := AllocateResource(ctx, "default_cluster", "flkgiwd13-akjdoe-0", ResourceConstraintsSpec{}) - -The resource is deallocated when the Hive command completes its execution and the corresponding token is released. The plugin calls: - -.. code-block:: go - - status, err := AllocateResource(ctx, , , ) - -Wherein the placeholders are occupied by: - -.. code-block:: go - - err := ReleaseResource(ctx, "default_cluster", "flkgiwd13-akjdoe-0") - -Below is an example interface that shows allocation and deallocation of resources. - -.. code-block:: go - - type ResourceManager interface { - GetID() string - // During execution, the plugin calls AllocateResource() to register a token in the token pool associated with a resource - // If it is granted an allocation, the token is recorded in the token pool until the same plugin releases it. - // When calling AllocateResource, the plugin has to specify a ResourceConstraintsSpec that contains resource capping constraints at different project and namespace levels. - // The ResourceConstraint pointers in ResourceConstraintsSpec can be set to nil to not have a constraint at that level - AllocateResource(ctx context.Context, namespace ResourceNamespace, allocationToken string, constraintsSpec ResourceConstraintsSpec) (AllocationStatus, error) - // During execution, after an outstanding request is completed, the plugin uses ReleaseResource() to release the allocation of the token from the token pool. This way, it redeems the quota taken by the token - ReleaseResource(ctx context.Context, namespace ResourceNamespace, allocationToken string) error - } - -How can you force ResourceManager to force runtime quota allocation constraints? -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Runtime quota allocation constraints can be achieved using ResourceConstraintsSpec. It is a contact that a plugin can specify at different project and namespace levels. - -Let's take an example to understand it. - -You can set ResourceConstraintsSpec to ``nil`` objects, which means there would be no allocation constraints at the respective project and namespace level. When ResourceConstraintsSpec specifies ``nil`` ProjectScopeResourceConstraint, and a non-nil NamespaceScopeResourceConstraint, it suggests no constraints specified at any project or namespace level. diff --git a/rsts/deployment/configuration/eventing.rst b/rsts/deployment/configuration/eventing.rst deleted file mode 100644 index 1282cd8917..0000000000 --- a/rsts/deployment/configuration/eventing.rst +++ /dev/null @@ -1,64 +0,0 @@ -.. _deployment-configuration-eventing: - -############### -Platform Events -############### - -.. tags:: Configuration, Infrastructure, Advanced - -Progress of Flyte workflow and task execution is delimited by a series of events that are passed from the FlytePropeller to FlyteAdmin. -Administrators can configure FlyteAdmin to send these events onwards to a pub/sub system like SNS/SQS as well. Note that this configuration is distinct from the configuration for notifications :ref:`deployment-configuration-notifications`. They should use separate topics/queues. These events are meant for external consumption, outside the Flyte platform, whereas the notifications pub/sub setup is entirely for Admin itself to send email/pagerduty/etc notifications. - -********* -Use cases -********* - -The external events flow can be useful for tracking data lineage and integrating with existing systems within your organization. - -************************* -Supported Implementations -************************* - -Event egress can be configured to work with **AWS** using `SQS `_ and `SNS `_ or **GCP** `Cloud Pub/Sub `_. - -************* -Configuration -************* - -To turn on, add the following to your FlyteAdmin: - -.. code:: yaml - - external_events.yaml: | - externalEvents: - enable: true - aws: - region: us-east-2 - eventsPublisher: - eventTypes: - - all - topicName: arn:aws:sns:us-east-2:123456:123-my-topic - type: aws - -Helm -==== - -There should already be a section for this in the ``values.yaml`` file. -Update the settings under the ``external_events`` key and turn ``enable`` to ``true``. The same flag is used for Helm as for Admin itself. - -***** -Usage -***** - -The events emitted will be base64 encoded binary representation of the following IDL messages: - -* ``admin_event_pb2.TaskExecutionEventRequest`` -* ``admin_event_pb2.NodeExecutionEventRequest`` -* ``admin_event_pb2.WorkflowExecutionEventRequest`` - -Which of these three events is being sent can be distinguished by the subject line of the message, which will be one of the three strings above. - -Note that these message wrap the underlying event messages :std:doc:`found here `. - -.. note:: - The message format may eventually change to an enriched and distinct message type in future releases. diff --git a/rsts/deployment/configuration/general.rst b/rsts/deployment/configuration/general.rst deleted file mode 100644 index c61d096930..0000000000 --- a/rsts/deployment/configuration/general.rst +++ /dev/null @@ -1,740 +0,0 @@ -.. _deployment-configuration-general: - -################################# -Configuring Custom K8s Resources -################################# - -*************************** -Configurable Resource Types -*************************** - -Many platform specifications such as task resource defaults, project namespace Kubernetes quota, and more can be -assigned using default values or custom overrides. Defaults are specified in the FlyteAdmin config and -overrides for specific projects are registered with the FlyteAdmin service. - -You can customize these settings along increasing levels of specificity with Flyte: - -- Domain -- Project and Domain -- Project, Domain, and Workflow name -- Project, Domain, Workflow name and LaunchPlan name - -See :ref:`concepts/control_plane:control plane` to understand projects and domains. -The following section will show you how to configure the settings along -these dimensions. - -Task Resources -============== - -Configuring task :py:class:`resources ` includes -setting default values for unspecified task requests and limits. Task resources -also include limits which specify the maximum value that a task request or a limit can have. - -- CPU -- GPU -- Memory -- Storage -- `Ephemeral Storage `__ - -In the absence of an override, the global -`default values `__ -in `task_resource_defaults` are used. - -The override values from the database are assigned at execution, rather than registration time. - -To customize resources for project-domain attributes, define a ``tra.yaml`` file with your overrides: - -.. code-block:: yaml - - defaults: - cpu: "1" - memory: 150Mi - limits: - cpu: "2" - memory: 450Mi - project: flyteexamples - domain: development - -Update the task resource attributes for a project-domain combination: - -.. prompt:: bash $ - - flytectl update task-resource-attribute --attrFile tra.yaml - -.. note:: - - Refer to the :ref:`docs ` to - learn more about the command and its supported flag(s). - -To fetch and verify the individual project-domain attributes: - -.. prompt:: bash $ - - flytectl get task-resource-attribute -p flyteexamples -d development - -.. note:: - - Refer to the :ref:`docs ` to learn - more about the command and its supported flag(s). - -You can view all custom task-resource-attributes by visiting -``protocol://`` and substitute -the protocol and host appropriately. - -Cluster Resources -================= -These are free-form key-value pairs used when filling the templates that the -admin feeds into the cluster manager, which is the process that syncs Kubernetes -resources. - -The keys represent templatized variables in the -`cluster resource template `__ -and the values are what you want to see filled in. - -In the absence of custom override values, you can use ``templateData`` from the -`FlyteAdmin config `__ -as a default. Flyte specifies these defaults by domain and applies them to every -project-domain namespace combination. - -.. note:: - The settings above can be specified on domain, and project-and-domain. - Since Flyte hasn't tied the notion of a workflow or a launch plan to any Kubernetes construct, specifying a workflow or launch plan name doesn't make sense. - This is a departure from the usual hierarchy for customizable resources. - -Define an attributes file, ``cra.yaml``: - -.. code-block:: yaml - - domain: development - project: flyteexamples - attributes: - projectQuotaCpu: "1000" - projectQuotaMemory: 5Ti - -To ensure that the overrides reflect in the Kubernetes namespace -``flyteexamples-development`` (that is, the namespace has a resource quota of -1000 CPU cores and 5TB of memory) when the admin fills in cluster resource -templates: - -.. prompt:: bash $ - - flytectl update cluster-resource-attribute --attrFile cra.yaml - -.. note:: - - Refer to the :ref:`docs ` - to learn more about the command and its supported flag(s). - -To fetch and verify the individual project-domain attributes: - -.. prompt:: bash $ - - flytectl get cluster-resource-attribute -p flyteexamples -d development - -.. note:: - - Refer to the :ref:`docs ` to - learn more about the command and its supported flag(s). - -Flyte uses these updated values to fill the template fields for the -``flyteexamples-development`` namespace. - -For other namespaces, the -`platform defaults `__ -apply. - -.. note:: - The template values, for example, ``projectQuotaCpu`` or ``projectQuotaMemory`` are free-form strings. - Ensure that they match the template placeholders in your `template file `__ - for your changes to take effect and custom values to be substituted. - -You can view all custom cluster-resource-attributes by visiting ``protocol://`` -and substitute the protocol and host appropriately. - -Execution Cluster Label -======================= -This allows forcing a matching execution to consistently execute on a specific -Kubernetes cluster for multi-cluster Flyte deployment set-up. - -Define an attributes file in `ec.yaml`: - -.. code-block:: yaml - - value: mycluster - domain: development - project: flyteexamples - -Ensure that admin places executions in the flyteexamples project and development domain onto ``mycluster``: - -.. prompt:: bash $ - - flytectl update execution-cluster-label --attrFile ec.yaml - -.. note:: - - Refer to the :ref:`docs ` - to learn more about the command and its supported flag(s). - -To fetch and verify the individual project-domain attributes: - -.. prompt:: bash $ - - flytectl get execution-cluster-label -p flyteexamples -d development - -.. note:: - - Refer to the :ref:`docs ` to - learn more about the command and its supported flag(s). - -You can view all custom execution cluster attributes by visiting -``protocol://`` and substitute -the protocol and host appropriately. - -Execution Queues -================ -Execution queues are defined in -`flyteadmin config `__. -These are used for execution placement for constructs like AWS Batch. - -The **attributes** associated with an execution queue must match the **tags** -for workflow executions. The tags associated with configurable resources are -stored in the admin database. - -.. prompt:: bash $ - - flytectl update execution-queue-attribute - -.. note:: - - Refer to the :ref:`docs ` - to learn more about the command and its supported flag(s). - -You can view existing attributes for which tags can be assigned by visiting -``protocol:///api/v1/matchable_attributes?resource_type=2`` and substitute -the protocol and host appropriately. - -Workflow Execution Config -========================= - -This helps with overriding the config used for workflows execution which includes -`security context `__, `annotations or labels `__ -etc. in the `Workflow execution config `__. -These can be defined at two levels of project-domain or project-domain-workflow: - -.. prompt:: bash $ - - flytectl update workflow-execution-config - -.. note:: - - Refer to the :ref:`docs ` - to learn more about the command and its supported flag(s). - -Configuring Service Roles -========================= -You can configure service roles along 3 levels: - -#. Project + domain defaults (every execution launched in this project/domain uses this service account) - -#. Launch plan default (every invocation of this launch plan uses this service account) - -#. Execution time override (overrides at invocation for a specific execution only) - -********* -Hierarchy -********* - -Increasing specificity defines how matchable resource attributes get applied. -The available configurations, in order of decreasing specificity are: - -#. Domain, Project, Workflow name, and LaunchPlan - -#. Domain, Project, and Workflow name - -#. Domain and Project - -#. Domain - -Default values for all and per-domain attributes may be specified in the -FlyteAdmin config as documented in the :std:ref:`deployment-configuration-customizable-resources`. - -Example -======= -If the database includes the following: - -+------------+--------------+----------+-------------+-----------+ -| Domain | Project | Workflow | Launch Plan | Tags | -+============+==============+==========+=============+===========+ -| production | widgetmodels | | | critical | -+------------+--------------+----------+-------------+-----------+ -| production | widgetmodels | Demand | | supply | -+------------+--------------+----------+-------------+-----------+ - -- Any inbound ``CreateExecution`` requests with **[Domain: Production, Project: widgetmodels, Workflow: Demand]** for any launch plan will have a tag value of "supply". -- Any inbound ``CreateExecution`` requests with **[Domain: Production, Project: widgetmodels]** for any workflow other than ``Demand`` and any launch plan will have a tag value "critical". -- All other inbound CreateExecution requests will use the default values specified in the FlyteAdmin config (if any). - - -Configuring K8s Pod -=================== - -There are two approaches to applying the K8s Pod configuration. The **recommended** -method is to use Flyte's Compile-time and Runtime PodTemplate schemes. You can do this by creating -K8s PodTemplate resource/s that serves as the base configuration for all the -task Pods that Flyte initializes. This solution ensures completeness regarding -support configuration options and maintainability as new features are added to K8s. - -The legacy technique is to set configuration options in Flyte's K8s plugin configuration. - -.. note :: - - These two approaches can be used simultaneously, where the K8s plugin configuration will override the default PodTemplate values. - -.. _using-k8s-podtemplates: - -******************************* -Using K8s PodTemplates -******************************* - -`PodTemplate `__ -is a K8s native resource used to define a K8s Pod. It contains all the fields in -the PodSpec, in addition to ObjectMeta to control resource-specific metadata -such as Labels or Annotations. They are commonly applied in Deployments, -ReplicaSets, etc to define the managed Pod configuration of the resources. - -Within Flyte, you can leverage this resource to configure Pods created as part -of Flyte's task execution. It ensures complete control over Pod configuration, -supporting all options available through the resource and ensuring maintainability -in future versions. - -Starting with the Flyte 1.4 release, we now have 2 ways of defining `PodTemplate `__: -1. Compile-time PodTemplate defined at the task level -2. Runtime PodTemplates - - -Compile-time PodTemplates -========================= - -We can define a compile-time pod template, as part of the definition of a `Task `__, for example: - -.. code-block:: python - - @task( - pod_template=PodTemplate( - primary_container_name="primary", - labels={"lKeyA": "lValA", "lKeyB": "lValB"}, - annotations={"aKeyA": "aValA", "aKeyB": "aValB"}, - pod_spec=V1PodSpec( - containers=[ - V1Container( - name="primary", - image="repo/placeholderImage:0.0.0", - command="echo", - args=["wow"], - resources=V1ResourceRequirements(limits={"cpu": "999", "gpu": "999"}), - env=[V1EnvVar(name="eKeyC", value="eValC"), V1EnvVar(name="eKeyD", value="eValD")], - ), - ], - volumes=[V1Volume(name="volume")], - tolerations=[ - V1Toleration( - key="num-gpus", - operator="Equal", - value=1, - effect="NoSchedule", - ), - ], - ) - ) - ) - def t1() -> int: - ... - -Notice how in this example we are defining a new PodTemplate inline, which allows us to define a full -`V1PodSpec `__ and also define -the name of the primary container, labels, and annotations. - -The term compile-time here refers to the fact that the pod template definition is part of the `TaskSpec `__. - -Runtime PodTemplates -==================== - -Runtime PodTemplates, as the name suggests, are applied during runtime, as part of building the resultant Pod. In terms of how -they are applied, you have two choices: (1) you either elect one specific PodTemplate to be considered as default, or (2) you -define a PodTemplate name and use that in the declaration of the task. Those two options are mutually exclusive, meaning that -in the situation where a default PodTemplate is set and a PodTemplate name is present in the task definition, only the -PodTemplate name will be used. - - -Set the ``default-pod-template-name`` in FlytePropeller --------------------------------------------------------- - -This `option `__ -initializes a K8s informer internally to track system PodTemplate updates -(creates, updates, etc) so that FlytePropeller is -`aware `__ -of the latest PodTemplate definitions in the K8s environment. You can find this -setting in `FlytePropeller `__ -config map, which is not set by default. - -An example configuration is: - -.. code-block:: yaml - - plugins: - k8s: - co-pilot: - name: "flyte-copilot-" - image: "cr.flyte.org/flyteorg/flytecopilot:v0.0.15" - start-timeout: "30s" - default-pod-template-name: - -Create a PodTemplate resource ------------------------------- - -Flyte recognizes PodTemplate definitions with the ``default-pod-template-name`` at two granularities. - -1. A system-wide configuration can be created in the same namespace that - FlytePropeller is running in (typically `flyte`). -2. PodTemplates can be applied from the same namespace that the Pod will be - created in. FlytePropeller always favors the PodTemplate with the more - specific namespace. For example, a Pod created in the ``flytesnacks-development`` - namespace will first look for a PodTemplate from the ``flytesnacks-development`` - namespace. If that PodTemplate doesn't exist, it will look for a PodTemplate - in the same namespace that FlytePropeller is running in (in our example, ``flyte``), - and if that doesn't exist, it will begin configuration with an empty PodTemplate. - -Flyte configuration supports all the fields available in the PodTemplate -resource, including container-level configuration. Specifically, containers may -be configured at two granularities, namely "default" and "primary". - -In this scheme, if the default PodTemplate contains a container with the name -"default", that container will be used as the base configuration for all -containers Flyte constructs. Similarly, a container named "primary" will be used -as the base container configuration for all primary containers. If both container -names exist in the default PodTemplate, Flyte first applies the default -configuration, followed by the primary configuration. - -The ``containers`` field is required in each k8s PodSpec. If no default -configuration is desired, specifying a container with a name other than "default" -or "primary" (for example, "noop") is considered best practice. Since Flyte only -processes the "default" or "primary" containers, this value will always be dropped -during Pod construction. Similarly, each k8s container is required to have an -``image``. This value will always be overridden by Flyte, so this value may be -set to anything. However, we recommend using a real image, for example -``docker.io/rwgrim/docker-noop``. - -Using ``pod_template_name`` in a Task --------------------------------------- - -It's also possible to use PodTemplate in tasks by specifying ``pod_template_name`` in the task definition. For example: - -.. code-block:: python - - @task( - pod_template_name="a_pod_template", - ) - def t1() -> int: - ... - -In this example we're specifying that a previously created Runtime PodTemplate resource named ``a_pod_template`` is going to be applied. -The only requirement is that this PodTemplate exists at the moment this task is about to be executed. - - -********************************* -Flyte's K8s Plugin Configuration -********************************* - -The FlytePlugins repository defines `configuration `__ -for the Flyte K8s Plugin. They contain a variety of common options for Pod configuration -which are applied when constructing a Pod. Typically, these options map one-to-one -with K8s Pod fields. This makes it difficult to maintain configuration options as K8s -versions change and fields are added/deprecated. - -********************************* -Evaluation Order in PodTemplates -********************************* - -The following diagram shows the precedence in evaluation order between the different types of PodTemplates and K8s Plugin Configuration. The precedence is higher at the top and decreases as the height of the tree increases. - -.. mermaid:: - :alt: Evaluation order of PodTemplates - - graph BT - B["@task pod_template"] --> A["k8s plugin"] - C["runtime PodTemplate"] --> B - D["@task pod_template_name"] --> B - - -To better understand how Flyte constructs task execution Pods based on Compile-time and Runtime PodTemplates, -and K8s plugin configuration options, let's take a few examples. - -Example 1: Runtime PodTemplate and K8s Plugin Configuration -=========================================================== - -If you have a Runtime PodTemplate defined in the ``flyte`` namespace -(where FlytePropeller instance is running), then it is applied to all Pods that -Flyte creates, unless a **more specific** PodTemplate is defined in the namespace -where you start the Pod. - -An example PodTemplate is shown: - -.. code-block:: yaml - - apiVersion: v1 - kind: PodTemplate - metadata: - name: flyte-template - namespace: flyte - template: - metadata: - labels: - - foo - annotations: - - foo: initial-value - - bar: initial-value - spec: - containers: - - name: default - image: docker.io/rwgrim/docker-noop - terminationMessagePath: "/dev/foo" - hostNetwork: false - -In addition, the K8s plugin configuration in FlytePropeller defines the default -Pod Labels, Annotations, and enables the host networking. - -.. code-block:: yaml - - plugins: - k8s: - default-labels: - - bar - default-annotations: - - foo: overridden-value - - baz: non-overridden-value - enable-host-networking-pod: true - -To construct a Pod, FlytePropeller initializes a Pod definition using the default -PodTemplate. This definition is applied to the K8s plugin configuration values, -and any task-specific configuration is overlaid. During the process, when lists -are merged, values are appended and when maps are merged, the values are overridden. -The resultant Pod using the above default PodTemplate and K8s Plugin configuration is shown: - -.. code-block:: yaml - - apiVersion: v1 - kind: Pod - metadata: - name: example-pod - namespace: flytesnacks-development - labels: - - foo // maintained initial value - - bar // value appended by k8s plugin configuration - annotations: - - foo: overridden-value // value overridden by k8s plugin configuration - - bar: initial-value // maintained initial value - - baz: non-overridden-value // value added by k8s plugin configuration - spec: - containers: - - name: ax9kd5xb4p8r45bpdv7v-n0-0 - image: ghcr.io/flyteorg/flytecookbook:core-bfee7e549ad749bfb55922e130f4330a0ebc25b0 - terminationMessagePath: "/dev/foo" - // remaining container configuration omitted - hostNetwork: true // overridden by the k8s plugin configuration - -The last step in constructing a Pod is to apply any task-specific configuration. -These options follow the same rules as merging the default PodTemplate and K8s -Plugin configuration (that is, list appends and map overrides). Task-specific -options are intentionally robust to provide fine-grained control over task -execution in diverse use-cases. Therefore, exploration is beyond this scope -and has therefore been omitted from this documentation. - -Example 2: A Runtime and Compile-time PodTemplates -================================================== - -In this example we're going to have a Runtime PodTemplate and a Compile-time PodTemplate defined in a task. - -Let's say we have this Runtime PodTemplate defined in the same namespace as the one used to kick off an execution -of the task. For example: - -.. code-block:: yaml - - apiVersion: v1 - kind: PodTemplate - metadata: - name: flyte-template - namespace: flytesnacks-development - template: - metadata: - annotations: - - annotation_1: initial-value - - bar: initial-value - spec: - containers: - - name: default - image: docker.io/rwgrim/docker-noop - terminationMessagePath: "/dev/foo" - -And the definition of the Compile-time PodTemplate in a task: - -.. code-block:: python - - @task( - pod_template=PodTemplate( - primary_container_name="primary", - labels={ - "label_1": "value-1", - "label_2": "value-2", - }, - annotations={ - "annotation_1": "value-1", - "annotation_2": "value-2", - }, - pod_spec=V1PodSpec( - containers=[ - V1Container( - name="primary", - image="a.b.c/image:v1", - command="cmd", - args=[], - ), - ], - ) - ) - ) - def t1() -> int: - ... - -The resultant Pod is as follows: - -.. code-block:: yaml - - apiVersion: v1 - kind: Pod - metadata: - name: example-pod - namespace: flytesnacks-development - labels: - - label_1: value-1 # from Compile-time value - - label_2: value-2 # from Compile-time value - annotations: - - annotation_1: value-1 # value overridden by Compile-time PodTemplate - - annotation_2: value-2 # from Compile-time PodTemplate - - bar: initial-value # from Runtime PodTemplate - spec: - containers: - - name: default - image: docker.io/rwgrim/docker-noop - terminationMessagePath: "/dev/foo" - - name: primary - image: a.b.c/image:v1 - command: cmd - args: [] - // remaining container configuration omitted - -Notice how options follow the same merging rules, i.e. lists append and maps override. - - -Example 3: Runtime and Compile-time PodTemplates and K8s Plugin Configuration -============================================================================= - -Now let's make a slightly more complicated example where now we have both Compile-time and Runtime PodTemplates being combined -with K8s Configuration. - -Here's the definition of a Compile-time PodTemplate: - -.. code-block:: python - - @task( - pod_template=PodTemplate( - primary_container_name="primary", - labels={ - "label_1": "value-compile", - "label_2": "value-compile", - }, - annotations={ - "annotation_1": "value-compile", - "annotation_2": "value-compile", - }, - pod_spec=V1PodSpec( - containers=[ - V1Container( - name="primary", - image="a.b.c/image:v1", - command="cmd", - args=[], - ), - ], - host_network=True, - ) - ) - ) - def t1() -> int: - ... - - -And a Runtime PodTemplate: - -.. code-block:: yaml - - apiVersion: v1 - kind: PodTemplate - metadata: - name: flyte-template - namespace: flyte - template: - metadata: - labels: - - label_1: value-runtime - - label_2: value-runtime - - label_3: value-runtime - annotations: - - foo: value-runtime - - bar: value-runtime - spec: - containers: - - name: default - image: docker.io/rwgrim/docker-noop - terminationMessagePath: "/dev/foo" - hostNetwork: false - -And the following K8s Plugin Configuration: - -.. code-block:: yaml - - plugins: - k8s: - default-labels: - - label_1: value-plugin - default-annotations: - - annotation_1: value-plugin - - baz: value-plugin - -The resultant pod for that task is as follows: - -.. code-block:: yaml - - apiVersion: v1 - kind: Pod - metadata: - name: example-pod - namespace: flytesnacks-development - labels: - - label_1: value-plugin - - label_2: value-compile - annotations: - - annotation_1: value-plugin - - annotation_2: value-compile - - foo: value-runtime - - bar: value-runtime - - baz: value-plugin - spec: - containers: - - name: default - image: docker.io/rwgrim/docker-noop - terminationMessagePath: "/dev/foo" - - name: primary - image: a.b.c/image:v1 - command: cmd - args: [] - // remaining container configuration omitted diff --git a/rsts/deployment/configuration/generated/datacatalog_config.rst b/rsts/deployment/configuration/generated/datacatalog_config.rst deleted file mode 100644 index d65ac218bb..0000000000 --- a/rsts/deployment/configuration/generated/datacatalog_config.rst +++ /dev/null @@ -1,996 +0,0 @@ -.. _flytedatacatalog-config-specification: - -######################################### -Flyte Datacatalog Configuration -######################################### - -- `application <#section-application>`_ - -- `database <#section-database>`_ - -- `datacatalog <#section-datacatalog>`_ - -- `logger <#section-logger>`_ - -- `otel <#section-otel>`_ - -- `storage <#section-storage>`_ - -Section: application -======================================================================================================================== - -grpcPort (int) ------------------------------------------------------------------------------------------------------------------------- - -On which grpc port to serve Catalog - -**Default Value**: - -.. code-block:: yaml - - "8081" - - -grpcServerReflection (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable GRPC Server Reflection - -**Default Value**: - -.. code-block:: yaml - - "true" - - -httpPort (int) ------------------------------------------------------------------------------------------------------------------------- - -On which http port to serve Catalog - -**Default Value**: - -.. code-block:: yaml - - "8080" - - -secure (bool) ------------------------------------------------------------------------------------------------------------------------- - -Whether to run Catalog in secure mode or not - -**Default Value**: - -.. code-block:: yaml - - "false" - - -readHeaderTimeoutSeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -The amount of time allowed to read request headers. - -**Default Value**: - -.. code-block:: yaml - - "32" - - -Section: database -======================================================================================================================== - -host (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -port (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -dbname (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -username (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -password (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -passwordPath (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -options (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -debug (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -enableForeignKeyConstraintWhenMigrating (bool) ------------------------------------------------------------------------------------------------------------------------- - -Whether to enable gorm foreign keys when migrating the db - -**Default Value**: - -.. code-block:: yaml - - "false" - - -maxIdleConnections (int) ------------------------------------------------------------------------------------------------------------------------- - -maxIdleConnections sets the maximum number of connections in the idle connection pool. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -maxOpenConnections (int) ------------------------------------------------------------------------------------------------------------------------- - -maxOpenConnections sets the maximum number of open connections to the database. - -**Default Value**: - -.. code-block:: yaml - - "100" - - -connMaxLifeTime (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -sets the maximum amount of time a connection may be reused - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -postgres (`database.PostgresConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - dbname: postgres - debug: false - host: postgres - options: sslmode=disable - password: "" - passwordPath: "" - port: 5432 - username: postgres - - -sqlite (`database.SQLiteConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - file: "" - - -config.Duration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Duration (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -database.PostgresConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -host (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The host name of the database server - -**Default Value**: - -.. code-block:: yaml - - postgres - - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The port name of the database server - -**Default Value**: - -.. code-block:: yaml - - "5432" - - -dbname (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database name - -**Default Value**: - -.. code-block:: yaml - - postgres - - -username (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database user who is connecting to the server. - -**Default Value**: - -.. code-block:: yaml - - postgres - - -password (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database password. - -**Default Value**: - -.. code-block:: yaml - - "" - - -passwordPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Points to the file containing the database password. - -**Default Value**: - -.. code-block:: yaml - - "" - - -options (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -See http://gorm.io/docs/connecting_to_the_database.html for available options passed, in addition to the above. - -**Default Value**: - -.. code-block:: yaml - - sslmode=disable - - -debug (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether or not to start the database connection with debug mode enabled. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -database.SQLiteConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -file (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The path to the file (existing or new) where the DB should be created / stored. If existing, then this will be re-used, else a new will be created - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: datacatalog -======================================================================================================================== - -storage-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -StoragePrefix specifies the prefix where DataCatalog stores offloaded ArtifactData in CloudStorage. If not specified, the data will be stored in the base container directly. - -**Default Value**: - -.. code-block:: yaml - - metadata - - -metrics-scope (string) ------------------------------------------------------------------------------------------------------------------------- - -Scope that the metrics will record under. - -**Default Value**: - -.. code-block:: yaml - - datacatalog - - -profiler-port (int) ------------------------------------------------------------------------------------------------------------------------- - -Port that the profiling service is listening on. - -**Default Value**: - -.. code-block:: yaml - - "10254" - - -heartbeat-grace-period-multiplier (int) ------------------------------------------------------------------------------------------------------------------------- - -Number of heartbeats before a reservation expires without an extension. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -max-reservation-heartbeat (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -The maximum available reservation extension heartbeat interval. - -**Default Value**: - -.. code-block:: yaml - - 10s - - -Section: logger -======================================================================================================================== - -show-source (bool) ------------------------------------------------------------------------------------------------------------------------- - -Includes source code location in logs. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -mute (bool) ------------------------------------------------------------------------------------------------------------------------- - -Mutes all logs regardless of severity. Intended for benchmarks/tests only. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -level (int) ------------------------------------------------------------------------------------------------------------------------- - -Sets the minimum logging level. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -formatter (`logger.FormatterConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets logging format. - -**Default Value**: - -.. code-block:: yaml - - type: json - - -logger.FormatterConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets logging format type. - -**Default Value**: - -.. code-block:: yaml - - json - - -Section: otel -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of exporter to configure [noop/file/jaeger]. - -**Default Value**: - -.. code-block:: yaml - - noop - - -file (`otelutils.FileConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a file - -**Default Value**: - -.. code-block:: yaml - - filename: /tmp/trace.txt - - -jaeger (`otelutils.JaegerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a jaeger - -**Default Value**: - -.. code-block:: yaml - - endpoint: http://localhost:14268/api/traces - - -otelutils.FileConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -filename (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Filename to store exported telemetry traces - -**Default Value**: - -.. code-block:: yaml - - /tmp/trace.txt - - -otelutils.JaegerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Endpoint for the jaeger telemtry trace ingestor - -**Default Value**: - -.. code-block:: yaml - - http://localhost:14268/api/traces - - -Section: storage -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of storage to configure [s3/minio/local/mem/stow]. - -**Default Value**: - -.. code-block:: yaml - - s3 - - -connection (`storage.ConnectionConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - access-key: "" - auth-type: iam - disable-ssl: false - endpoint: "" - region: us-east-1 - secret-key: "" - - -stow (`storage.StowConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Storage config for stow backend. - -**Default Value**: - -.. code-block:: yaml - - {} - - -container (string) ------------------------------------------------------------------------------------------------------------------------- - -Initial container (in s3 a bucket) to create -if it doesn't exist-.' - -**Default Value**: - -.. code-block:: yaml - - "" - - -enable-multicontainer (bool) ------------------------------------------------------------------------------------------------------------------------- - -If this is true, then the container argument is overlooked and redundant. This config will automatically open new connections to new containers/buckets as they are encountered - -**Default Value**: - -.. code-block:: yaml - - "false" - - -cache (`storage.CachingConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - max_size_mbs: 0 - target_gc_percent: 0 - - -limits (`storage.LimitsConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets limits for stores. - -**Default Value**: - -.. code-block:: yaml - - maxDownloadMBs: 2 - - -defaultHttpClient (`storage.HTTPClientConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets the default http client config. - -**Default Value**: - -.. code-block:: yaml - - headers: null - timeout: 0s - - -signedUrl (`storage.SignedURLConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets config for SignedURL. - -**Default Value**: - -.. code-block:: yaml - - {} - - -storage.CachingConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -max_size_mbs (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used - -**Default Value**: - -.. code-block:: yaml - - "0" - - -target_gc_percent (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets the garbage collection target percentage. - -**Default Value**: - -.. code-block:: yaml - - "0" - - -storage.ConnectionConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -URL for storage client to connect to. - -**Default Value**: - -.. code-block:: yaml - - "" - - -auth-type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Auth Type to use [iam,accesskey]. - -**Default Value**: - -.. code-block:: yaml - - iam - - -access-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Access key to use. Only required when authtype is set to accesskey. - -**Default Value**: - -.. code-block:: yaml - - "" - - -secret-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Secret to use when accesskey is set. - -**Default Value**: - -.. code-block:: yaml - - "" - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Region to connect to. - -**Default Value**: - -.. code-block:: yaml - - us-east-1 - - -disable-ssl (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Disables SSL connection. Should only be used for development. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -URL (`url.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - ForceQuery: false - Fragment: "" - Host: "" - OmitHost: false - Opaque: "" - Path: "" - RawFragment: "" - RawPath: "" - RawQuery: "" - Scheme: "" - User: null - - -url.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Opaque (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -User (url.Userinfo) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -Host (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -OmitHost (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -ForceQuery (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -RawQuery (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Fragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawFragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -storage.HTTPClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -headers (map[string][]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets time out on the http client. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -storage.LimitsConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxDownloadMBs (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed download size (in MBs) per call. - -**Default Value**: - -.. code-block:: yaml - - "2" - - -storage.SignedURLConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -stowConfigOverride (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -storage.StowConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -kind (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Kind of Stow backend to use. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - "" - - -config (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Configuration for stow backend. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - {} - - diff --git a/rsts/deployment/configuration/generated/flyteadmin_config.rst b/rsts/deployment/configuration/generated/flyteadmin_config.rst deleted file mode 100644 index 8952377f71..0000000000 --- a/rsts/deployment/configuration/generated/flyteadmin_config.rst +++ /dev/null @@ -1,5325 +0,0 @@ -.. _flyteadmin-config-specification: - -######################################### -Flyte Admin Configuration -######################################### - -- `admin <#section-admin>`_ - -- `auth <#section-auth>`_ - -- `cloudevents <#section-cloudevents>`_ - -- `cluster_resources <#section-cluster_resources>`_ - -- `clusterpools <#section-clusterpools>`_ - -- `clusters <#section-clusters>`_ - -- `database <#section-database>`_ - -- `domains <#section-domains>`_ - -- `externalevents <#section-externalevents>`_ - -- `flyteadmin <#section-flyteadmin>`_ - -- `logger <#section-logger>`_ - -- `namespace_mapping <#section-namespace_mapping>`_ - -- `notifications <#section-notifications>`_ - -- `otel <#section-otel>`_ - -- `plugins <#section-plugins>`_ - -- `propeller <#section-propeller>`_ - -- `qualityofservice <#section-qualityofservice>`_ - -- `queues <#section-queues>`_ - -- `registration <#section-registration>`_ - -- `remotedata <#section-remotedata>`_ - -- `scheduler <#section-scheduler>`_ - -- `secrets <#section-secrets>`_ - -- `server <#section-server>`_ - -- `storage <#section-storage>`_ - -- `task_resources <#section-task_resources>`_ - -- `task_type_whitelist <#section-task_type_whitelist>`_ - -Section: admin -======================================================================================================================== - -endpoint (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -For admin types, specify where the uri of the service is located. - -**Default Value**: - -.. code-block:: yaml - - "" - - -insecure (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use insecure connection. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -insecureSkipVerify (bool) ------------------------------------------------------------------------------------------------------------------------- - -InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' - -**Default Value**: - -.. code-block:: yaml - - "false" - - -caCertFilePath (string) ------------------------------------------------------------------------------------------------------------------------- - -Use specified certificate file to verify the admin server peer. - -**Default Value**: - -.. code-block:: yaml - - "" - - -maxBackoffDelay (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Max delay for grpc backoff - -**Default Value**: - -.. code-block:: yaml - - 8s - - -perRetryTimeout (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -gRPC per retry timeout - -**Default Value**: - -.. code-block:: yaml - - 15s - - -maxRetries (int) ------------------------------------------------------------------------------------------------------------------------- - -Max number of gRPC retries - -**Default Value**: - -.. code-block:: yaml - - "4" - - -authType (uint8) ------------------------------------------------------------------------------------------------------------------------- - -Type of OAuth2 flow used for communicating with admin.ClientSecret,Pkce,ExternalCommand are valid values - -**Default Value**: - -.. code-block:: yaml - - ClientSecret - - -tokenRefreshWindow (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Max duration between token refresh attempt and token expiry. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -useAuth (bool) ------------------------------------------------------------------------------------------------------------------------- - -Deprecated: Auth will be enabled/disabled based on admin's dynamically discovered information. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -clientId (string) ------------------------------------------------------------------------------------------------------------------------- - -Client ID - -**Default Value**: - -.. code-block:: yaml - - flytepropeller - - -clientSecretLocation (string) ------------------------------------------------------------------------------------------------------------------------- - -File containing the client secret - -**Default Value**: - -.. code-block:: yaml - - /etc/secrets/client_secret - - -clientSecretEnvVar (string) ------------------------------------------------------------------------------------------------------------------------- - -Environment variable containing the client secret - -**Default Value**: - -.. code-block:: yaml - - "" - - -scopes ([]string) ------------------------------------------------------------------------------------------------------------------------- - -List of scopes to request - -**Default Value**: - -.. code-block:: yaml - - [] - - -useAudienceFromAdmin (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use Audience configured from admins public endpoint config. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -audience (string) ------------------------------------------------------------------------------------------------------------------------- - -Audience to use when initiating OAuth2 authorization requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -authorizationServerUrl (string) ------------------------------------------------------------------------------------------------------------------------- - -This is the URL to your IdP's authorization server. It'll default to Endpoint - -**Default Value**: - -.. code-block:: yaml - - "" - - -tokenUrl (string) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. - -**Default Value**: - -.. code-block:: yaml - - "" - - -authorizationHeader (string) ------------------------------------------------------------------------------------------------------------------------- - -Custom metadata header to pass JWT - -**Default Value**: - -.. code-block:: yaml - - "" - - -pkceConfig (`pkce.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for Pkce authentication flow. - -**Default Value**: - -.. code-block:: yaml - - refreshTime: 5m0s - timeout: 2m0s - - -deviceFlowConfig (`deviceflow.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for Device authentication flow. - -**Default Value**: - -.. code-block:: yaml - - pollInterval: 5s - refreshTime: 5m0s - timeout: 10m0s - - -command ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Command for external authentication token generation - -**Default Value**: - -.. code-block:: yaml - - [] - - -proxyCommand ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Command for external proxy-authorization token generation - -**Default Value**: - -.. code-block:: yaml - - [] - - -defaultServiceConfig (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -httpProxyURL (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.Duration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Duration (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 8s - - -config.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -URL (`url.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - ForceQuery: false - Fragment: "" - Host: "" - OmitHost: false - Opaque: "" - Path: "" - RawFragment: "" - RawPath: "" - RawQuery: "" - Scheme: "" - User: null - - -url.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Opaque (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -User (url.Userinfo) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -Host (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -OmitHost (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -ForceQuery (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -RawQuery (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Fragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawFragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -deviceflow.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -refreshTime (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -grace period from the token expiry after which it would refresh the token. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -amount of time the device flow should complete or else it will be cancelled. - -**Default Value**: - -.. code-block:: yaml - - 10m0s - - -pollInterval (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -amount of time the device flow would poll the token endpoint if auth server doesn't return a polling interval. Okta and google IDP do return an interval' - -**Default Value**: - -.. code-block:: yaml - - 5s - - -pkce.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Amount of time the browser session would be active for authentication from client app. - -**Default Value**: - -.. code-block:: yaml - - 2m0s - - -refreshTime (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -grace period from the token expiry after which it would refresh the token. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -Section: auth -======================================================================================================================== - -httpAuthorizationHeader (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - flyte-authorization - - -grpcAuthorizationHeader (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - flyte-authorization - - -disableForHttp (bool) ------------------------------------------------------------------------------------------------------------------------- - -Disables auth enforcement on HTTP Endpoints. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -disableForGrpc (bool) ------------------------------------------------------------------------------------------------------------------------- - -Disables auth enforcement on Grpc Endpoints. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -authorizedUris ([]config.URL) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -httpProxyURL (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -userAuth (`config.UserAuthConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Defines Auth options for users. - -**Default Value**: - -.. code-block:: yaml - - cookieBlockKeySecretName: cookie_block_key - cookieHashKeySecretName: cookie_hash_key - cookieSetting: - domain: "" - sameSitePolicy: DefaultMode - httpProxyURL: "" - openId: - baseUrl: "" - clientId: "" - clientSecretFile: "" - clientSecretName: oidc_client_secret - scopes: - - openid - - profile - redirectUrl: /console - - -appAuth (`config.OAuth2Options`_) ------------------------------------------------------------------------------------------------------------------------- - -Defines Auth options for apps. UserAuth must be enabled for AppAuth to work. - -**Default Value**: - -.. code-block:: yaml - - authServerType: Self - externalAuthServer: - allowedAudience: [] - baseUrl: "" - httpProxyURL: "" - metadataUrl: "" - retryAttempts: 5 - retryDelay: 1s - selfAuthServer: - accessTokenLifespan: 30m0s - authorizationCodeLifespan: 5m0s - claimSymmetricEncryptionKeySecretName: claim_symmetric_key - issuer: "" - oldTokenSigningRSAKeySecretName: token_rsa_key_old.pem - refreshTokenLifespan: 1h0m0s - staticClients: - flyte-cli: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flyte-cli - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytectl: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flytectl - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytepropeller: - audience: null - client_secret: JDJhJDA2JGQ2UFFuMlFBRlUzY0w1VjhNRGtldXVrNjN4dWJxVXhOeGp0ZlB3LkZjOU1nVjZ2cG15T0l5 - grant_types: - - refresh_token - - client_credentials - id: flytepropeller - public: false - redirect_uris: - - http://localhost:3846/callback - response_types: - - token - scopes: - - all - - offline - - access_token - tokenSigningRSAKeySecretName: token_rsa_key.pem - thirdPartyConfig: - flyteClient: - audience: "" - clientId: flytectl - redirectUri: http://localhost:53593/callback - scopes: - - all - - offline - - -config.OAuth2Options -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -authServerType (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - Self - - -selfAuthServer (`config.AuthorizationServer`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Authorization Server config to run as a service. Use this when using an IdP that does not offer a custom OAuth2 Authorization Server. - -**Default Value**: - -.. code-block:: yaml - - accessTokenLifespan: 30m0s - authorizationCodeLifespan: 5m0s - claimSymmetricEncryptionKeySecretName: claim_symmetric_key - issuer: "" - oldTokenSigningRSAKeySecretName: token_rsa_key_old.pem - refreshTokenLifespan: 1h0m0s - staticClients: - flyte-cli: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flyte-cli - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytectl: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flytectl - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytepropeller: - audience: null - client_secret: JDJhJDA2JGQ2UFFuMlFBRlUzY0w1VjhNRGtldXVrNjN4dWJxVXhOeGp0ZlB3LkZjOU1nVjZ2cG15T0l5 - grant_types: - - refresh_token - - client_credentials - id: flytepropeller - public: false - redirect_uris: - - http://localhost:3846/callback - response_types: - - token - scopes: - - all - - offline - - access_token - tokenSigningRSAKeySecretName: token_rsa_key.pem - - -externalAuthServer (`config.ExternalAuthorizationServer`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -External Authorization Server config. - -**Default Value**: - -.. code-block:: yaml - - allowedAudience: [] - baseUrl: "" - httpProxyURL: "" - metadataUrl: "" - retryAttempts: 5 - retryDelay: 1s - - -thirdPartyConfig (`config.ThirdPartyConfigOptions`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines settings to instruct flyte cli tools (and optionally others) on what config to use to setup their client. - -**Default Value**: - -.. code-block:: yaml - - flyteClient: - audience: "" - clientId: flytectl - redirectUri: http://localhost:53593/callback - scopes: - - all - - offline - - -config.AuthorizationServer -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -issuer (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the issuer to use when issuing and validating tokens. The default value is https:/// - -**Default Value**: - -.. code-block:: yaml - - "" - - -accessTokenLifespan (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the lifespan of issued access tokens. - -**Default Value**: - -.. code-block:: yaml - - 30m0s - - -refreshTokenLifespan (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the lifespan of issued access tokens. - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -authorizationCodeLifespan (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the lifespan of issued access tokens. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -claimSymmetricEncryptionKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use to encrypt claims in authcode token. - -**Default Value**: - -.. code-block:: yaml - - claim_symmetric_key - - -tokenSigningRSAKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use to retrieve RSA Signing Key. - -**Default Value**: - -.. code-block:: yaml - - token_rsa_key.pem - - -oldTokenSigningRSAKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use to retrieve Old RSA Signing Key. This can be useful during key rotation to continue to accept older tokens. - -**Default Value**: - -.. code-block:: yaml - - token_rsa_key_old.pem - - -staticClients (map[string]*fosite.DefaultClient) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - flyte-cli: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flyte-cli - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytectl: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flytectl - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytepropeller: - audience: null - client_secret: JDJhJDA2JGQ2UFFuMlFBRlUzY0w1VjhNRGtldXVrNjN4dWJxVXhOeGp0ZlB3LkZjOU1nVjZ2cG15T0l5 - grant_types: - - refresh_token - - client_credentials - id: flytepropeller - public: false - redirect_uris: - - http://localhost:3846/callback - response_types: - - token - scopes: - - all - - offline - - access_token - - -config.ExternalAuthorizationServer -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -baseUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -This should be the base url of the authorization server that you are trying to hit. With Okta for instance, it will look something like https://company.okta.com/oauth2/abcdef123456789/ - -**Default Value**: - -.. code-block:: yaml - - "" - - -allowedAudience ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional: A list of allowed audiences. If not provided, the audience is expected to be the public Uri of the service. - -**Default Value**: - -.. code-block:: yaml - - [] - - -metadataUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional: If the server doesn't support /.well-known/oauth-authorization-server, you can set a custom metadata url here.' - -**Default Value**: - -.. code-block:: yaml - - "" - - -httpProxyURL (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -retryAttempts (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional: The number of attempted retries on a transient failure to get the OAuth metadata - -**Default Value**: - -.. code-block:: yaml - - "5" - - -retryDelay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional, Duration to wait between retries - -**Default Value**: - -.. code-block:: yaml - - 1s - - -config.ThirdPartyConfigOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -flyteClient (`config.FlyteClientConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - audience: "" - clientId: flytectl - redirectUri: http://localhost:53593/callback - scopes: - - all - - offline - - -config.FlyteClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -clientId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -public identifier for the app which handles authorization for a Flyte deployment - -**Default Value**: - -.. code-block:: yaml - - flytectl - - -redirectUri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -This is the callback uri registered with the app which handles authorization for a Flyte deployment - -**Default Value**: - -.. code-block:: yaml - - http://localhost:53593/callback - - -scopes ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Recommended scopes for the client to request. - -**Default Value**: - -.. code-block:: yaml - - - all - - offline - - -audience (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Audience to use when initiating OAuth2 authorization requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.UserAuthConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -redirectUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - /console - - -openId (`config.OpenIDOptions`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OpenID Configuration for User Auth - -**Default Value**: - -.. code-block:: yaml - - baseUrl: "" - clientId: "" - clientSecretFile: "" - clientSecretName: oidc_client_secret - scopes: - - openid - - profile - - -httpProxyURL (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cookieHashKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use for cookie hash key. - -**Default Value**: - -.. code-block:: yaml - - cookie_hash_key - - -cookieBlockKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use for cookie block key. - -**Default Value**: - -.. code-block:: yaml - - cookie_block_key - - -cookieSetting (`config.CookieSettings`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -settings used by cookies created for user auth - -**Default Value**: - -.. code-block:: yaml - - domain: "" - sameSitePolicy: DefaultMode - - -config.CookieSettings -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -sameSitePolicy (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Allows you to declare if your cookie should be restricted to a first-party or same-site context.Wrapper around http.SameSite. - -**Default Value**: - -.. code-block:: yaml - - DefaultMode - - -domain (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Allows you to set the domain attribute on the auth cookies. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.OpenIDOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -clientId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -clientSecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - oidc_client_secret - - -clientSecretFile (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -baseUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scopes ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - openid - - profile - - -Section: cloudevents -======================================================================================================================== - -enable (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - local - - -aws (`interfaces.AWSConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - region: "" - - -gcp (`interfaces.GCPConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - projectId: "" - - -kafka (`interfaces.KafkaConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - brokers: null - version: "" - - -eventsPublisher (`interfaces.EventsPublisherConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - eventTypes: null - topicName: "" - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -interfaces.AWSConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.EventsPublisherConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -topicName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -eventTypes ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interfaces.GCPConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -projectId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.KafkaConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -version (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -brokers ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -Section: cluster_resources -======================================================================================================================== - -templatePath (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -templateData (map[string]interfaces.DataSource) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -refreshInterval (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - 1m0s - - -customData (map[string]map[string]interfaces.DataSource) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -standaloneDeployment (bool) ------------------------------------------------------------------------------------------------------------------------- - -Whether the cluster resource sync is running in a standalone deployment and should call flyteadmin service endpoints - -**Default Value**: - -.. code-block:: yaml - - "false" - - -Section: clusterpools -======================================================================================================================== - -clusterPoolAssignments (map[string]interfaces.ClusterPoolAssignment) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: clusters -======================================================================================================================== - -clusterConfigs ([]interfaces.ClusterConfig) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -labelClusterMap (map[string][]interfaces.ClusterEntity) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -defaultExecutionLabel (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: database -======================================================================================================================== - -host (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -port (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -dbname (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -username (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -password (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -passwordPath (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -options (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -debug (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -enableForeignKeyConstraintWhenMigrating (bool) ------------------------------------------------------------------------------------------------------------------------- - -Whether to enable gorm foreign keys when migrating the db - -**Default Value**: - -.. code-block:: yaml - - "false" - - -maxIdleConnections (int) ------------------------------------------------------------------------------------------------------------------------- - -maxIdleConnections sets the maximum number of connections in the idle connection pool. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -maxOpenConnections (int) ------------------------------------------------------------------------------------------------------------------------- - -maxOpenConnections sets the maximum number of open connections to the database. - -**Default Value**: - -.. code-block:: yaml - - "100" - - -connMaxLifeTime (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -sets the maximum amount of time a connection may be reused - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -postgres (`database.PostgresConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - dbname: postgres - debug: false - host: postgres - options: sslmode=disable - password: "" - passwordPath: "" - port: 5432 - username: postgres - - -sqlite (`database.SQLiteConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - file: "" - - -database.PostgresConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -host (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The host name of the database server - -**Default Value**: - -.. code-block:: yaml - - postgres - - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The port name of the database server - -**Default Value**: - -.. code-block:: yaml - - "5432" - - -dbname (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database name - -**Default Value**: - -.. code-block:: yaml - - postgres - - -username (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database user who is connecting to the server. - -**Default Value**: - -.. code-block:: yaml - - postgres - - -password (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database password. - -**Default Value**: - -.. code-block:: yaml - - "" - - -passwordPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Points to the file containing the database password. - -**Default Value**: - -.. code-block:: yaml - - "" - - -options (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -See http://gorm.io/docs/connecting_to_the_database.html for available options passed, in addition to the above. - -**Default Value**: - -.. code-block:: yaml - - sslmode=disable - - -debug (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether or not to start the database connection with debug mode enabled. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -database.SQLiteConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -file (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The path to the file (existing or new) where the DB should be created / stored. If existing, then this will be re-used, else a new will be created - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: domains -======================================================================================================================== - -id (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - development - - -name (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - development - - -Section: externalevents -======================================================================================================================== - -enable (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - local - - -aws (`interfaces.AWSConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - region: "" - - -gcp (`interfaces.GCPConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - projectId: "" - - -eventsPublisher (`interfaces.EventsPublisherConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - eventTypes: null - topicName: "" - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -Section: flyteadmin -======================================================================================================================== - -roleNameKey (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -metricsScope (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - 'flyte:' - - -metricsKeys ([]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - - project - - domain - - wf - - task - - phase - - tasktype - - runtime_type - - runtime_version - - app_name - - -profilerPort (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "10254" - - -metadataStoragePrefix ([]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - - metadata - - admin - - -eventVersion (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "2" - - -asyncEventsBufferSize (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "100" - - -maxParallelism (int32) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "25" - - -labels (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -annotations (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -overwriteCache (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -assumableIamRole (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -k8sServiceAccount (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -outputLocationPrefix (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -useOffloadedWorkflowClosure (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -envs (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -Section: logger -======================================================================================================================== - -show-source (bool) ------------------------------------------------------------------------------------------------------------------------- - -Includes source code location in logs. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -mute (bool) ------------------------------------------------------------------------------------------------------------------------- - -Mutes all logs regardless of severity. Intended for benchmarks/tests only. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -level (int) ------------------------------------------------------------------------------------------------------------------------- - -Sets the minimum logging level. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -formatter (`logger.FormatterConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets logging format. - -**Default Value**: - -.. code-block:: yaml - - type: json - - -logger.FormatterConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets logging format type. - -**Default Value**: - -.. code-block:: yaml - - json - - -Section: namespace_mapping -======================================================================================================================== - -mapping (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -template (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - '{{ project }}-{{ domain }}' - - -templateData (map[string]interfaces.DataSource) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -Section: notifications -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - local - - -region (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -aws (`interfaces.AWSConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - region: "" - - -gcp (`interfaces.GCPConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - projectId: "" - - -publisher (`interfaces.NotificationsPublisherConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - topicName: "" - - -processor (`interfaces.NotificationsProcessorConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - accountId: "" - queueName: "" - - -emailer (`interfaces.NotificationsEmailerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - body: "" - emailServerConfig: - apiKeyEnvVar: "" - apiKeyFilePath: "" - serviceName: "" - sender: "" - subject: "" - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -interfaces.NotificationsEmailerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -emailServerConfig (`interfaces.EmailServerConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - apiKeyEnvVar: "" - apiKeyFilePath: "" - serviceName: "" - - -subject (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -sender (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -body (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.EmailServerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -serviceName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -apiKeyEnvVar (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -apiKeyFilePath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.NotificationsProcessorConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -queueName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -accountId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.NotificationsPublisherConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -topicName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: otel -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of exporter to configure [noop/file/jaeger]. - -**Default Value**: - -.. code-block:: yaml - - noop - - -file (`otelutils.FileConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a file - -**Default Value**: - -.. code-block:: yaml - - filename: /tmp/trace.txt - - -jaeger (`otelutils.JaegerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a jaeger - -**Default Value**: - -.. code-block:: yaml - - endpoint: http://localhost:14268/api/traces - - -otelutils.FileConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -filename (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Filename to store exported telemetry traces - -**Default Value**: - -.. code-block:: yaml - - /tmp/trace.txt - - -otelutils.JaegerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Endpoint for the jaeger telemtry trace ingestor - -**Default Value**: - -.. code-block:: yaml - - http://localhost:14268/api/traces - - -Section: plugins -======================================================================================================================== - -catalogcache (`catalog.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - reader: - maxItems: 10000 - maxRetries: 3 - workers: 10 - writer: - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -k8s (`config.K8sPluginConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - co-pilot: - cpu: 500m - default-input-path: /var/flyte/inputs - default-output-path: /var/flyte/outputs - image: cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - input-vol-name: flyte-inputs - memory: 128Mi - name: flyte-copilot- - output-vol-name: flyte-outputs - start-timeout: 1m40s - storage: "" - create-container-error-grace-period: 3m0s - default-annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - default-cpus: "1" - default-env-vars: null - default-env-vars-from-env: null - default-labels: null - default-memory: 1Gi - default-node-selector: null - default-pod-dns-config: null - default-pod-security-context: null - default-pod-template-name: "" - default-pod-template-resync: 30s - default-security-context: null - default-tolerations: null - delete-resource-on-finalize: false - enable-host-networking-pod: null - gpu-device-node-label: k8s.amazonaws.com/accelerator - gpu-partition-size-node-label: k8s.amazonaws.com/gpu-partition-size - gpu-resource-name: nvidia.com/gpu - gpu-unpartitioned-node-selector-requirement: null - gpu-unpartitioned-toleration: null - image-pull-backoff-grace-period: 3m0s - inject-finalizer: false - interruptible-node-selector: null - interruptible-node-selector-requirement: null - interruptible-tolerations: null - non-interruptible-node-selector-requirement: null - resource-tolerations: null - scheduler-name: "" - send-object-events: false - - -catalog.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -reader (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Catalog reader workqueue config. Make sure the index cache must be big enough to accommodate the biggest array task allowed to run on the system. - -**Default Value**: - -.. code-block:: yaml - - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -writer (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Catalog writer workqueue config. Make sure the index cache must be big enough to accommodate the biggest array task allowed to run on the system. - -**Default Value**: - -.. code-block:: yaml - - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -workqueue.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -workers (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Number of concurrent workers to start processing the queue. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -maxRetries (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of retries per item. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -maxItems (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of entries to keep in the index. - -**Default Value**: - -.. code-block:: yaml - - "10000" - - -config.K8sPluginConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -inject-finalizer (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Instructs the plugin to inject a finalizer on startTask and remove it on task termination. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -default-annotations (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - - -default-labels (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-env-vars (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-env-vars-from-env (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-cpus (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines a default value for cpu for containers if not specified. - -**Default Value**: - -.. code-block:: yaml - - "1" - - -default-memory (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines a default value for memory for containers if not specified. - -**Default Value**: - -.. code-block:: yaml - - 1Gi - - -default-tolerations ([]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-node-selector (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-affinity (v1.Affinity) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -scheduler-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines scheduler name. - -**Default Value**: - -.. code-block:: yaml - - "" - - -interruptible-tolerations ([]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible-node-selector (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -non-interruptible-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -resource-tolerations (map[v1.ResourceName][]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -co-pilot (`config.FlyteCoPilotConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Co-Pilot Configuration - -**Default Value**: - -.. code-block:: yaml - - cpu: 500m - default-input-path: /var/flyte/inputs - default-output-path: /var/flyte/outputs - image: cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - input-vol-name: flyte-inputs - memory: 128Mi - name: flyte-copilot- - output-vol-name: flyte-outputs - start-timeout: 1m40s - storage: "" - - -delete-resource-on-finalize (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Instructs the system to delete the resource upon successful execution of a k8s pod rather than have the k8s garbage collector clean it up. This ensures that no resources are kept around (potentially consuming cluster resources). This, however, will cause k8s log links to expire as soon as the resource is finalized. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -create-container-error-grace-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 3m0s - - -image-pull-backoff-grace-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 3m0s - - -gpu-device-node-label (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - k8s.amazonaws.com/accelerator - - -gpu-partition-size-node-label (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - k8s.amazonaws.com/gpu-partition-size - - -gpu-unpartitioned-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -gpu-unpartitioned-toleration (v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -gpu-resource-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - nvidia.com/gpu - - -default-pod-security-context (v1.PodSecurityContext) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-security-context (v1.SecurityContext) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -enable-host-networking-pod (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - - -default-pod-dns-config (v1.PodDNSConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-pod-template-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the PodTemplate to use as the base for all k8s pods created by FlytePropeller. - -**Default Value**: - -.. code-block:: yaml - - "" - - -default-pod-template-resync (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Frequency of resyncing default pod templates - -**Default Value**: - -.. code-block:: yaml - - 30s - - -send-object-events (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -If true, will send k8s object events in TaskExecutionEvent updates. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.FlyteCoPilotConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Flyte co-pilot sidecar container name prefix. (additional bits will be added after this) - -**Default Value**: - -.. code-block:: yaml - - flyte-copilot- - - -image (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Flyte co-pilot Docker Image FQN - -**Default Value**: - -.. code-block:: yaml - - cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - - -default-input-path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default path where the volume should be mounted - -**Default Value**: - -.. code-block:: yaml - - /var/flyte/inputs - - -default-output-path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default path where the volume should be mounted - -**Default Value**: - -.. code-block:: yaml - - /var/flyte/outputs - - -input-vol-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the data volume that is created for storing inputs - -**Default Value**: - -.. code-block:: yaml - - flyte-inputs - - -output-vol-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the data volume that is created for storing outputs - -**Default Value**: - -.. code-block:: yaml - - flyte-outputs - - -start-timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 1m40s - - -cpu (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Used to set cpu for co-pilot containers - -**Default Value**: - -.. code-block:: yaml - - 500m - - -memory (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Used to set memory for co-pilot containers - -**Default Value**: - -.. code-block:: yaml - - 128Mi - - -storage (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default storage limit for individual inputs / outputs - -**Default Value**: - -.. code-block:: yaml - - "" - - -resource.Quantity -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -i (`resource.int64Amount`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - {} - - -d (`resource.infDecAmount`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - - -s (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "1" - - -Format (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - DecimalSI - - -resource.infDecAmount -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Dec (inf.Dec) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -resource.int64Amount -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -value (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "1" - - -scale (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -Section: propeller -======================================================================================================================== - -kube-config (string) ------------------------------------------------------------------------------------------------------------------------- - -Path to kubernetes client config file. - -**Default Value**: - -.. code-block:: yaml - - "" - - -master (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -workers (int) ------------------------------------------------------------------------------------------------------------------------- - -Number of threads to process workflows - -**Default Value**: - -.. code-block:: yaml - - "20" - - -workflow-reeval-duration (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Frequency of re-evaluating workflows - -**Default Value**: - -.. code-block:: yaml - - 10s - - -downstream-eval-duration (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Frequency of re-evaluating downstream tasks - -**Default Value**: - -.. code-block:: yaml - - 30s - - -limit-namespace (string) ------------------------------------------------------------------------------------------------------------------------- - -Namespaces to watch for this propeller - -**Default Value**: - -.. code-block:: yaml - - all - - -prof-port (`config.Port`_) ------------------------------------------------------------------------------------------------------------------------- - -Profiler port - -**Default Value**: - -.. code-block:: yaml - - 10254 - - -metadata-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -MetadataPrefix should be used if all the metadata for Flyte executions should be stored under a specific prefix in CloudStorage. If not specified, the data will be stored in the base container directly. - -**Default Value**: - -.. code-block:: yaml - - metadata/propeller - - -rawoutput-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -a fully qualified storage path of the form s3://flyte/abc/..., where all data sandboxes should be stored. - -**Default Value**: - -.. code-block:: yaml - - "" - - -queue (`config.CompositeQueueConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Workflow workqueue configuration, affects the way the work is consumed from the queue. - -**Default Value**: - -.. code-block:: yaml - - batch-size: -1 - batching-interval: 1s - queue: - base-delay: 0s - capacity: 10000 - max-delay: 1m0s - rate: 1000 - type: maxof - sub-queue: - base-delay: 0s - capacity: 10000 - max-delay: 0s - rate: 1000 - type: bucket - type: batch - - -metrics-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -An optional prefix for all published metrics. - -**Default Value**: - -.. code-block:: yaml - - flyte - - -metrics-keys ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Metrics labels applied to prometheus metrics emitted by the service. - -**Default Value**: - -.. code-block:: yaml - - - project - - domain - - wf - - task - - -enable-admin-launcher (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable remote Workflow launcher to Admin - -**Default Value**: - -.. code-block:: yaml - - "true" - - -max-workflow-retries (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of retries per workflow - -**Default Value**: - -.. code-block:: yaml - - "10" - - -max-ttl-hours (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of hours a completed workflow should be retained. Number between 1-23 hours - -**Default Value**: - -.. code-block:: yaml - - "23" - - -gc-interval (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Run periodic GC every 30 minutes - -**Default Value**: - -.. code-block:: yaml - - 30m0s - - -leader-election (`config.LeaderElectionConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for leader election. - -**Default Value**: - -.. code-block:: yaml - - enabled: false - lease-duration: 15s - lock-config-map: - Name: "" - Namespace: "" - renew-deadline: 10s - retry-period: 2s - - -publish-k8s-events (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable events publishing to K8s events API. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -max-output-size-bytes (int64) ------------------------------------------------------------------------------------------------------------------------- - -Maximum size of outputs per task - -**Default Value**: - -.. code-block:: yaml - - "10485760" - - -enable-grpc-latency-metrics (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable grpc latency metrics. Note Histograms metrics can be expensive on Prometheus servers. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -kube-client-config (`config.KubeClientConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration to control the Kubernetes client - -**Default Value**: - -.. code-block:: yaml - - burst: 25 - qps: 100 - timeout: 30s - - -node-config (`config.NodeConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -config for a workflow node - -**Default Value**: - -.. code-block:: yaml - - default-deadlines: - node-active-deadline: 0s - node-execution-deadline: 0s - workflow-active-deadline: 0s - default-max-attempts: 1 - ignore-retry-cause: false - interruptible-failure-threshold: -1 - max-node-retries-system-failures: 3 - - -max-streak-length (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of consecutive rounds that one propeller worker can use for one workflow - >1 => turbo-mode is enabled. - -**Default Value**: - -.. code-block:: yaml - - "8" - - -event-config (`config.EventConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configures execution event behavior. - -**Default Value**: - -.. code-block:: yaml - - fallback-to-output-reference: false - raw-output-policy: reference - - -include-shard-key-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified shard key label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-shard-key-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified shard key label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -include-project-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified project label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-project-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified project label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -include-domain-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified domain label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-domain-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified domain label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -cluster-id (string) ------------------------------------------------------------------------------------------------------------------------- - -Unique cluster id running this flytepropeller instance with which to annotate execution events - -**Default Value**: - -.. code-block:: yaml - - propeller - - -create-flyteworkflow-crd (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable creation of the FlyteWorkflow CRD on startup - -**Default Value**: - -.. code-block:: yaml - - "false" - - -array-node-event-version (int) ------------------------------------------------------------------------------------------------------------------------- - -ArrayNode eventing version. 0 => legacy (drop-in replacement for maptask), 1 => new - -**Default Value**: - -.. code-block:: yaml - - "0" - - -config.CompositeQueueConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Type of composite queue to use for the WorkQueue - -**Default Value**: - -.. code-block:: yaml - - batch - - -queue (`config.WorkqueueConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Workflow workqueue configuration, affects the way the work is consumed from the queue. - -**Default Value**: - -.. code-block:: yaml - - base-delay: 0s - capacity: 10000 - max-delay: 1m0s - rate: 1000 - type: maxof - - -sub-queue (`config.WorkqueueConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -SubQueue configuration, affects the way the nodes cause the top-level Work to be re-evaluated. - -**Default Value**: - -.. code-block:: yaml - - base-delay: 0s - capacity: 10000 - max-delay: 0s - rate: 1000 - type: bucket - - -batching-interval (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration for which downstream updates are buffered - -**Default Value**: - -.. code-block:: yaml - - 1s - - -batch-size (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "-1" - - -config.WorkqueueConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Type of RateLimiter to use for the WorkQueue - -**Default Value**: - -.. code-block:: yaml - - maxof - - -base-delay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -base backoff delay for failure - -**Default Value**: - -.. code-block:: yaml - - 0s - - -max-delay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max backoff delay for failure - -**Default Value**: - -.. code-block:: yaml - - 1m0s - - -rate (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Bucket Refill rate per second - -**Default Value**: - -.. code-block:: yaml - - "1000" - - -capacity (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Bucket capacity as number of items - -**Default Value**: - -.. code-block:: yaml - - "10000" - - -config.EventConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -raw-output-policy (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -How output data should be passed along in execution events. - -**Default Value**: - -.. code-block:: yaml - - reference - - -fallback-to-output-reference (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether output data should be sent by reference when it is too large to be sent inline in execution events. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.KubeClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -qps (float32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max burst rate for throttle. 0 defaults to 10 - -**Default Value**: - -.. code-block:: yaml - - "25" - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max duration allowed for every request to KubeAPI before giving up. 0 implies no timeout. - -**Default Value**: - -.. code-block:: yaml - - 30s - - -config.LeaderElectionConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enables/Disables leader election. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -lock-config-map (`types.NamespacedName`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -ConfigMap namespace/name to use for resource lock. - -**Default Value**: - -.. code-block:: yaml - - Name: "" - Namespace: "" - - -lease-duration (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration that non-leader candidates will wait to force acquire leadership. This is measured against time of last observed ack. - -**Default Value**: - -.. code-block:: yaml - - 15s - - -renew-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration that the acting master will retry refreshing leadership before giving up. - -**Default Value**: - -.. code-block:: yaml - - 10s - - -retry-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration the LeaderElector clients should wait between tries of actions. - -**Default Value**: - -.. code-block:: yaml - - 2s - - -types.NamespacedName -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Namespace (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.NodeConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -default-deadlines (`config.DefaultDeadlines`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value for timeouts - -**Default Value**: - -.. code-block:: yaml - - node-active-deadline: 0s - node-execution-deadline: 0s - workflow-active-deadline: 0s - - -max-node-retries-system-failures (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of retries per node for node failure due to infra issues - -**Default Value**: - -.. code-block:: yaml - - "3" - - -interruptible-failure-threshold (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -number of failures for a node to be still considered interruptible. Negative numbers are treated as complementary (ex. -1 means last attempt is non-interruptible).' - -**Default Value**: - -.. code-block:: yaml - - "-1" - - -default-max-attempts (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default maximum number of attempts for a node - -**Default Value**: - -.. code-block:: yaml - - "1" - - -ignore-retry-cause (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Ignore retry cause and count all attempts toward a node's max attempts - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.DefaultDeadlines -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -node-execution-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of node execution timeout that includes the time spent to run the node/workflow - -**Default Value**: - -.. code-block:: yaml - - 0s - - -node-active-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of node timeout that includes the time spent queued. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -workflow-active-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of workflow timeout that includes the time spent queued. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -config.Port -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "10254" - - -Section: qualityofservice -======================================================================================================================== - -tierExecutionValues (map[string]interfaces.QualityOfServiceSpec) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -defaultTiers (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: queues -======================================================================================================================== - -executionQueues (interfaces.ExecutionQueues) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - [] - - -workflowConfigs (interfaces.WorkflowConfigs) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - [] - - -Section: registration -======================================================================================================================== - -maxWorkflowNodes (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "100" - - -maxLabelEntries (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -maxAnnotationEntries (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -workflowSizeLimit (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: remotedata -======================================================================================================================== - -scheme (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - none - - -region (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -signedUrls (`interfaces.SignedURL`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - durationMinutes: 0 - enabled: false - signingPrincipal: "" - - -maxSizeInBytes (int64) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "2097152" - - -inlineEventDataPolicy (int) ------------------------------------------------------------------------------------------------------------------------- - -Specifies how inline execution event data should be saved in the backend - -**Default Value**: - -.. code-block:: yaml - - Offload - - -interfaces.SignedURL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether signed urls should even be returned with GetExecutionData, GetNodeExecutionData and GetTaskExecutionData response objects. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -durationMinutes (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -signingPrincipal (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: scheduler -======================================================================================================================== - -profilerPort (`config.Port`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - 10254 - - -eventScheduler (`interfaces.EventSchedulerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - aws: null - local: {} - region: "" - scheduleNamePrefix: "" - scheduleRole: "" - scheme: local - targetName: "" - - -workflowExecutor (`interfaces.WorkflowExecutorConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - accountId: "" - aws: null - local: - adminRateLimit: - burst: 10 - tps: 100 - useUTCTz: false - region: "" - scheduleQueueName: "" - scheme: local - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -interfaces.EventSchedulerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - local - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scheduleRole (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -targetName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scheduleNamePrefix (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -aws (interfaces.AWSSchedulerConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -local (`interfaces.FlyteSchedulerConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - {} - - -interfaces.FlyteSchedulerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -interfaces.WorkflowExecutorConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - local - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scheduleQueueName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -accountId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -aws (interfaces.AWSWorkflowExecutorConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -local (`interfaces.FlyteWorkflowExecutorConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - adminRateLimit: - burst: 10 - tps: 100 - useUTCTz: false - - -interfaces.FlyteWorkflowExecutorConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -adminRateLimit (`interfaces.AdminRateLimit`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - burst: 10 - tps: 100 - - -useUTCTz (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -interfaces.AdminRateLimit -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -tps (float64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "10" - - -Section: secrets -======================================================================================================================== - -secrets-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -Prefix where to look for secrets file - -**Default Value**: - -.. code-block:: yaml - - /etc/secrets - - -env-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -Prefix for environment variables - -**Default Value**: - -.. code-block:: yaml - - FLYTE_SECRET_ - - -Section: server -======================================================================================================================== - -httpPort (int) ------------------------------------------------------------------------------------------------------------------------- - -On which http port to serve admin - -**Default Value**: - -.. code-block:: yaml - - "8088" - - -grpcPort (int) ------------------------------------------------------------------------------------------------------------------------- - -deprecated - -**Default Value**: - -.. code-block:: yaml - - "0" - - -grpcServerReflection (bool) ------------------------------------------------------------------------------------------------------------------------- - -deprecated - -**Default Value**: - -.. code-block:: yaml - - "false" - - -kube-config (string) ------------------------------------------------------------------------------------------------------------------------- - -Path to kubernetes client config file, default is empty, useful for incluster config. - -**Default Value**: - -.. code-block:: yaml - - "" - - -master (string) ------------------------------------------------------------------------------------------------------------------------- - -The address of the Kubernetes API server. - -**Default Value**: - -.. code-block:: yaml - - "" - - -security (`config.ServerSecurityOptions`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - allowCors: true - allowedHeaders: - - Content-Type - - flyte-authorization - allowedOrigins: - - '*' - auditAccess: false - secure: false - ssl: - certificateFile: "" - keyFile: "" - useAuth: false - - -grpc (`config.GrpcConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - enableGrpcLatencyMetrics: false - maxMessageSizeBytes: 0 - port: 8089 - serverReflection: true - - -thirdPartyConfig (`config.ThirdPartyConfigOptions`_) ------------------------------------------------------------------------------------------------------------------------- - -Deprecated please use auth.appAuth.thirdPartyConfig instead. - -**Default Value**: - -.. code-block:: yaml - - flyteClient: - audience: "" - clientId: "" - redirectUri: "" - scopes: [] - - -dataProxy (`config.DataProxyConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Defines data proxy configuration. - -**Default Value**: - -.. code-block:: yaml - - download: - maxExpiresIn: 1h0m0s - upload: - defaultFileNameLength: 20 - maxExpiresIn: 1h0m0s - maxSize: 6Mi - storagePrefix: "" - - -readHeaderTimeoutSeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -The amount of time allowed to read request headers. - -**Default Value**: - -.. code-block:: yaml - - "32" - - -kubeClientConfig (`config.KubeClientConfig (kubeClientConfig)`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration to control the Kubernetes client - -**Default Value**: - -.. code-block:: yaml - - burst: 25 - qps: 100 - timeout: 30s - - -config.DataProxyConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -upload (`config.DataProxyUploadConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines data proxy upload configuration. - -**Default Value**: - -.. code-block:: yaml - - defaultFileNameLength: 20 - maxExpiresIn: 1h0m0s - maxSize: 6Mi - storagePrefix: "" - - -download (`config.DataProxyDownloadConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines data proxy download configuration. - -**Default Value**: - -.. code-block:: yaml - - maxExpiresIn: 1h0m0s - - -config.DataProxyDownloadConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxExpiresIn (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed expiration duration. - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -config.DataProxyUploadConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxSize (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed upload size. - -**Default Value**: - -.. code-block:: yaml - - 6Mi - - -maxExpiresIn (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed expiration duration. - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -defaultFileNameLength (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default length for the generated file name if not provided in the request. - -**Default Value**: - -.. code-block:: yaml - - "20" - - -storagePrefix (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Storage prefix to use for all upload requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.GrpcConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -On which grpc port to serve admin - -**Default Value**: - -.. code-block:: yaml - - "8089" - - -serverReflection (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable GRPC Server Reflection - -**Default Value**: - -.. code-block:: yaml - - "true" - - -maxMessageSizeBytes (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The max size in bytes for incoming gRPC messages - -**Default Value**: - -.. code-block:: yaml - - "0" - - -enableGrpcLatencyMetrics (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable grpc latency metrics. Note Histograms metrics can be expensive on Prometheus servers. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.KubeClientConfig (kubeClientConfig) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -qps (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max QPS to the master for requests to KubeAPI. 0 defaults to 5. - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max burst rate for throttle. 0 defaults to 10 - -**Default Value**: - -.. code-block:: yaml - - "25" - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max duration allowed for every request to KubeAPI before giving up. 0 implies no timeout. - -**Default Value**: - -.. code-block:: yaml - - 30s - - -config.ServerSecurityOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -secure (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -ssl (`config.SslOptions`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - certificateFile: "" - keyFile: "" - - -useAuth (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -auditAccess (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -allowCors (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "true" - - -allowedOrigins ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - '*' - - -allowedHeaders ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - Content-Type - - flyte-authorization - - -config.SslOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -certificateFile (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -keyFile (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: storage -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of storage to configure [s3/minio/local/mem/stow]. - -**Default Value**: - -.. code-block:: yaml - - s3 - - -connection (`storage.ConnectionConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - access-key: "" - auth-type: iam - disable-ssl: false - endpoint: "" - region: us-east-1 - secret-key: "" - - -stow (`storage.StowConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Storage config for stow backend. - -**Default Value**: - -.. code-block:: yaml - - {} - - -container (string) ------------------------------------------------------------------------------------------------------------------------- - -Initial container (in s3 a bucket) to create -if it doesn't exist-.' - -**Default Value**: - -.. code-block:: yaml - - "" - - -enable-multicontainer (bool) ------------------------------------------------------------------------------------------------------------------------- - -If this is true, then the container argument is overlooked and redundant. This config will automatically open new connections to new containers/buckets as they are encountered - -**Default Value**: - -.. code-block:: yaml - - "false" - - -cache (`storage.CachingConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - max_size_mbs: 0 - target_gc_percent: 0 - - -limits (`storage.LimitsConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets limits for stores. - -**Default Value**: - -.. code-block:: yaml - - maxDownloadMBs: 2 - - -defaultHttpClient (`storage.HTTPClientConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets the default http client config. - -**Default Value**: - -.. code-block:: yaml - - headers: null - timeout: 0s - - -signedUrl (`storage.SignedURLConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets config for SignedURL. - -**Default Value**: - -.. code-block:: yaml - - {} - - -storage.CachingConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -max_size_mbs (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used - -**Default Value**: - -.. code-block:: yaml - - "0" - - -target_gc_percent (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets the garbage collection target percentage. - -**Default Value**: - -.. code-block:: yaml - - "0" - - -storage.ConnectionConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -URL for storage client to connect to. - -**Default Value**: - -.. code-block:: yaml - - "" - - -auth-type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Auth Type to use [iam,accesskey]. - -**Default Value**: - -.. code-block:: yaml - - iam - - -access-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Access key to use. Only required when authtype is set to accesskey. - -**Default Value**: - -.. code-block:: yaml - - "" - - -secret-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Secret to use when accesskey is set. - -**Default Value**: - -.. code-block:: yaml - - "" - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Region to connect to. - -**Default Value**: - -.. code-block:: yaml - - us-east-1 - - -disable-ssl (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Disables SSL connection. Should only be used for development. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -storage.HTTPClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -headers (map[string][]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets time out on the http client. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -storage.LimitsConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxDownloadMBs (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed download size (in MBs) per call. - -**Default Value**: - -.. code-block:: yaml - - "2" - - -storage.SignedURLConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -stowConfigOverride (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -storage.StowConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -kind (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Kind of Stow backend to use. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - "" - - -config (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Configuration for stow backend. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: task_resources -======================================================================================================================== - -defaults (`interfaces.TaskResourceSet`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - cpu: "2" - ephemeralStorage: "0" - gpu: "0" - memory: 200Mi - storage: "0" - - -limits (`interfaces.TaskResourceSet`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - cpu: "2" - ephemeralStorage: "0" - gpu: "1" - memory: 1Gi - storage: "0" - - -interfaces.TaskResourceSet -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -cpu (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "2" - - -gpu (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -memory (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 200Mi - - -storage (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -ephemeralStorage (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - diff --git a/rsts/deployment/configuration/generated/flytepropeller_config.rst b/rsts/deployment/configuration/generated/flytepropeller_config.rst deleted file mode 100644 index b238e7c961..0000000000 --- a/rsts/deployment/configuration/generated/flytepropeller_config.rst +++ /dev/null @@ -1,5590 +0,0 @@ -.. _flytepropeller-config-specification: - -######################################### -Flyte Propeller Configuration -######################################### - -- `admin <#section-admin>`_ - -- `catalog-cache <#section-catalog-cache>`_ - -- `event <#section-event>`_ - -- `logger <#section-logger>`_ - -- `otel <#section-otel>`_ - -- `plugins <#section-plugins>`_ - -- `propeller <#section-propeller>`_ - -- `secrets <#section-secrets>`_ - -- `storage <#section-storage>`_ - -- `tasks <#section-tasks>`_ - -- `webhook <#section-webhook>`_ - -Section: admin -======================================================================================================================== - -endpoint (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -For admin types, specify where the uri of the service is located. - -**Default Value**: - -.. code-block:: yaml - - "" - - -insecure (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use insecure connection. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -insecureSkipVerify (bool) ------------------------------------------------------------------------------------------------------------------------- - -InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' - -**Default Value**: - -.. code-block:: yaml - - "false" - - -caCertFilePath (string) ------------------------------------------------------------------------------------------------------------------------- - -Use specified certificate file to verify the admin server peer. - -**Default Value**: - -.. code-block:: yaml - - "" - - -maxBackoffDelay (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Max delay for grpc backoff - -**Default Value**: - -.. code-block:: yaml - - 8s - - -perRetryTimeout (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -gRPC per retry timeout - -**Default Value**: - -.. code-block:: yaml - - 15s - - -maxRetries (int) ------------------------------------------------------------------------------------------------------------------------- - -Max number of gRPC retries - -**Default Value**: - -.. code-block:: yaml - - "4" - - -authType (uint8) ------------------------------------------------------------------------------------------------------------------------- - -Type of OAuth2 flow used for communicating with admin.ClientSecret,Pkce,ExternalCommand are valid values - -**Default Value**: - -.. code-block:: yaml - - ClientSecret - - -tokenRefreshWindow (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Max duration between token refresh attempt and token expiry. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -useAuth (bool) ------------------------------------------------------------------------------------------------------------------------- - -Deprecated: Auth will be enabled/disabled based on admin's dynamically discovered information. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -clientId (string) ------------------------------------------------------------------------------------------------------------------------- - -Client ID - -**Default Value**: - -.. code-block:: yaml - - flytepropeller - - -clientSecretLocation (string) ------------------------------------------------------------------------------------------------------------------------- - -File containing the client secret - -**Default Value**: - -.. code-block:: yaml - - /etc/secrets/client_secret - - -clientSecretEnvVar (string) ------------------------------------------------------------------------------------------------------------------------- - -Environment variable containing the client secret - -**Default Value**: - -.. code-block:: yaml - - "" - - -scopes ([]string) ------------------------------------------------------------------------------------------------------------------------- - -List of scopes to request - -**Default Value**: - -.. code-block:: yaml - - [] - - -useAudienceFromAdmin (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use Audience configured from admins public endpoint config. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -audience (string) ------------------------------------------------------------------------------------------------------------------------- - -Audience to use when initiating OAuth2 authorization requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -authorizationServerUrl (string) ------------------------------------------------------------------------------------------------------------------------- - -This is the URL to your IdP's authorization server. It'll default to Endpoint - -**Default Value**: - -.. code-block:: yaml - - "" - - -tokenUrl (string) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. - -**Default Value**: - -.. code-block:: yaml - - "" - - -authorizationHeader (string) ------------------------------------------------------------------------------------------------------------------------- - -Custom metadata header to pass JWT - -**Default Value**: - -.. code-block:: yaml - - "" - - -pkceConfig (`pkce.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for Pkce authentication flow. - -**Default Value**: - -.. code-block:: yaml - - refreshTime: 5m0s - timeout: 2m0s - - -deviceFlowConfig (`deviceflow.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for Device authentication flow. - -**Default Value**: - -.. code-block:: yaml - - pollInterval: 5s - refreshTime: 5m0s - timeout: 10m0s - - -command ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Command for external authentication token generation - -**Default Value**: - -.. code-block:: yaml - - [] - - -proxyCommand ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Command for external proxy-authorization token generation - -**Default Value**: - -.. code-block:: yaml - - [] - - -defaultServiceConfig (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -httpProxyURL (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.Duration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Duration (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 8s - - -config.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -URL (`url.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - ForceQuery: false - Fragment: "" - Host: "" - OmitHost: false - Opaque: "" - Path: "" - RawFragment: "" - RawPath: "" - RawQuery: "" - Scheme: "" - User: null - - -url.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Opaque (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -User (url.Userinfo) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -Host (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -OmitHost (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -ForceQuery (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -RawQuery (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Fragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawFragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -deviceflow.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -refreshTime (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -grace period from the token expiry after which it would refresh the token. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -amount of time the device flow should complete or else it will be cancelled. - -**Default Value**: - -.. code-block:: yaml - - 10m0s - - -pollInterval (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -amount of time the device flow would poll the token endpoint if auth server doesn't return a polling interval. Okta and google IDP do return an interval' - -**Default Value**: - -.. code-block:: yaml - - 5s - - -pkce.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Amount of time the browser session would be active for authentication from client app. - -**Default Value**: - -.. code-block:: yaml - - 2m0s - - -refreshTime (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -grace period from the token expiry after which it would refresh the token. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -Section: catalog-cache -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Catalog Implementation to use - -**Default Value**: - -.. code-block:: yaml - - noop - - -endpoint (string) ------------------------------------------------------------------------------------------------------------------------- - -Endpoint for catalog service - -**Default Value**: - -.. code-block:: yaml - - "" - - -insecure (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use insecure grpc connection - -**Default Value**: - -.. code-block:: yaml - - "false" - - -max-cache-age (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Cache entries past this age will incur cache miss. 0 means cache never expires - -**Default Value**: - -.. code-block:: yaml - - 0s - - -use-admin-auth (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use the same gRPC credentials option as the flyteadmin client - -**Default Value**: - -.. code-block:: yaml - - "false" - - -default-service-config (string) ------------------------------------------------------------------------------------------------------------------------- - -Set the default service config for the catalog gRPC client - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: event -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of EventSink to configure [log/admin/file]. - -**Default Value**: - -.. code-block:: yaml - - admin - - -file-path (string) ------------------------------------------------------------------------------------------------------------------------- - -For file types, specify where the file should be located. - -**Default Value**: - -.. code-block:: yaml - - "" - - -rate (int64) ------------------------------------------------------------------------------------------------------------------------- - -Max rate at which events can be recorded per second. - -**Default Value**: - -.. code-block:: yaml - - "500" - - -capacity (int) ------------------------------------------------------------------------------------------------------------------------- - -The max bucket size for event recording tokens. - -**Default Value**: - -.. code-block:: yaml - - "1000" - - -Section: logger -======================================================================================================================== - -show-source (bool) ------------------------------------------------------------------------------------------------------------------------- - -Includes source code location in logs. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -mute (bool) ------------------------------------------------------------------------------------------------------------------------- - -Mutes all logs regardless of severity. Intended for benchmarks/tests only. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -level (int) ------------------------------------------------------------------------------------------------------------------------- - -Sets the minimum logging level. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -formatter (`logger.FormatterConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets logging format. - -**Default Value**: - -.. code-block:: yaml - - type: json - - -logger.FormatterConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets logging format type. - -**Default Value**: - -.. code-block:: yaml - - json - - -Section: otel -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of exporter to configure [noop/file/jaeger]. - -**Default Value**: - -.. code-block:: yaml - - noop - - -file (`otelutils.FileConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a file - -**Default Value**: - -.. code-block:: yaml - - filename: /tmp/trace.txt - - -jaeger (`otelutils.JaegerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a jaeger - -**Default Value**: - -.. code-block:: yaml - - endpoint: http://localhost:14268/api/traces - - -otelutils.FileConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -filename (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Filename to store exported telemetry traces - -**Default Value**: - -.. code-block:: yaml - - /tmp/trace.txt - - -otelutils.JaegerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Endpoint for the jaeger telemtry trace ingestor - -**Default Value**: - -.. code-block:: yaml - - http://localhost:14268/api/traces - - -Section: plugins -======================================================================================================================== - -agent-service (`agent.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - agentForTaskTypes: null - agents: null - defaultAgent: - defaultServiceConfig: "" - defaultTimeout: 10s - endpoint: dns:///flyteagent.flyte.svc.cluster.local:80 - insecure: true - timeouts: null - resourceConstraints: - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - supportedTaskTypes: - - task_type_1 - - task_type_2 - webApi: - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -athena (`athena.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - defaultCatalog: AwsDataCatalog - defaultWorkGroup: primary - resourceConstraints: - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - webApi: - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -aws (`aws.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - accountId: "" - logLevel: 0 - region: us-east-2 - retries: 3 - - -bigquery (`bigquery.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - googleTokenSource: - gke-task-workload-identity: - remoteClusterConfig: - auth: - caCertPath: "" - tokenPath: "" - enabled: false - endpoint: "" - name: "" - type: default - resourceConstraints: - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - webApi: - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -catalogcache (`catalog.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - reader: - maxItems: 10000 - maxRetries: 3 - workers: 10 - writer: - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -databricks (`databricks.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - databricksInstance: "" - databricksTokenKey: FLYTE_DATABRICKS_API_TOKEN - defaultWarehouse: COMPUTE_CLUSTER - entrypointFile: "" - resourceConstraints: - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - webApi: - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -echo (`testing.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - sleep-duration: 0s - - -k8s (`config.K8sPluginConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - co-pilot: - cpu: 500m - default-input-path: /var/flyte/inputs - default-output-path: /var/flyte/outputs - image: cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - input-vol-name: flyte-inputs - memory: 128Mi - name: flyte-copilot- - output-vol-name: flyte-outputs - start-timeout: 1m40s - storage: "" - create-container-error-grace-period: 3m0s - default-annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - default-cpus: "1" - default-env-vars: null - default-env-vars-from-env: null - default-labels: null - default-memory: 1Gi - default-node-selector: null - default-pod-dns-config: null - default-pod-security-context: null - default-pod-template-name: "" - default-pod-template-resync: 30s - default-security-context: null - default-tolerations: null - delete-resource-on-finalize: false - enable-host-networking-pod: null - gpu-device-node-label: k8s.amazonaws.com/accelerator - gpu-partition-size-node-label: k8s.amazonaws.com/gpu-partition-size - gpu-resource-name: nvidia.com/gpu - gpu-unpartitioned-node-selector-requirement: null - gpu-unpartitioned-toleration: null - image-pull-backoff-grace-period: 3m0s - inject-finalizer: false - interruptible-node-selector: null - interruptible-node-selector-requirement: null - interruptible-tolerations: null - non-interruptible-node-selector-requirement: null - resource-tolerations: null - scheduler-name: "" - send-object-events: false - - -k8s-array (`k8s.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - ErrorAssembler: - maxItems: 100000 - maxRetries: 5 - workers: 10 - OutputAssembler: - maxItems: 100000 - maxRetries: 5 - workers: 10 - logs: - config: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: true - kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName - }}/pod?namespace={{ .namespace }} - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - maxArrayJobSize: 5000 - maxErrorLength: 1000 - namespaceTemplate: "" - node-selector: null - remoteClusterConfig: - auth: - certPath: "" - tokenPath: "" - type: "" - enabled: false - endpoint: "" - name: "" - resourceConfig: - limit: 0 - primaryLabel: "" - scheduler: "" - tolerations: null - - -kf-operator (`common.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - timeout: 1m0s - - -logs (`logs.LogConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: true - kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName - }}/pod?namespace={{ .namespace }} - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -qubole (`config.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - analyzeLinkPath: /v2/analyze - clusterConfigs: - - labels: - - default - limit: 100 - namespaceScopeQuotaProportionCap: 0.7 - primaryLabel: default - projectScopeQuotaProportionCap: 0.7 - commandApiPath: /api/v1.2/commands/ - defaultClusterLabel: default - destinationClusterConfigs: [] - endpoint: https://wellness.qubole.com - lruCacheSize: 2000 - quboleTokenKey: FLYTE_QUBOLE_CLIENT_TOKEN - workers: 15 - - -ray (`ray.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - dashboardHost: 0.0.0.0 - dashboardURLTemplate: null - defaults: - headNode: - ipAddress: $MY_POD_IP - startParameters: - disable-usage-stats: "true" - workerNode: - ipAddress: $MY_POD_IP - startParameters: - disable-usage-stats: "true" - enableUsageStats: false - includeDashboard: true - logs: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - logsSidecar: null - remoteClusterConfig: - auth: - caCertPath: "" - tokenPath: "" - enabled: false - endpoint: "" - name: "" - serviceType: NodePort - shutdownAfterJobFinishes: true - ttlSecondsAfterFinished: 3600 - - -snowflake (`snowflake.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - defaultWarehouse: COMPUTE_WH - resourceConstraints: - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - snowflakeTokenKey: FLYTE_SNOWFLAKE_CLIENT_TOKEN - webApi: - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -spark (`spark.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - features: null - logs: - all-user: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - mixed: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: true - kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName - }}/pod?namespace={{ .namespace }} - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - system: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - user: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - spark-config-default: null - spark-history-server-url: "" - - -agent.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -webApi (`webapi.PluginConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines config for the base WebAPI plugin. - -**Default Value**: - -.. code-block:: yaml - - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -resourceConstraints (`core.ResourceConstraintsSpec`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - - -defaultAgent (`agent.Agent`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The default agent. - -**Default Value**: - -.. code-block:: yaml - - defaultServiceConfig: "" - defaultTimeout: 10s - endpoint: dns:///flyteagent.flyte.svc.cluster.local:80 - insecure: true - timeouts: null - - -agents (map[string]*agent.Agent) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The agents. - -**Default Value**: - -.. code-block:: yaml - - null - - -agentForTaskTypes (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -supportedTaskTypes ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - task_type_1 - - task_type_2 - - -agent.Agent -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - dns:///flyteagent.flyte.svc.cluster.local:80 - - -insecure (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "true" - - -defaultServiceConfig (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -timeouts (map[string]config.Duration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -defaultTimeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 10s - - -core.ResourceConstraintsSpec -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -ProjectScopeResourceConstraint (`core.ResourceConstraint`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - Value: 100 - - -NamespaceScopeResourceConstraint (`core.ResourceConstraint`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - Value: 50 - - -core.ResourceConstraint -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Value (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "100" - - -webapi.PluginConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -resourceQuotas (webapi.ResourceQuotas) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - default: 1000 - - -readRateLimiter (`webapi.RateLimiterConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines rate limiter properties for read actions (e.g. retrieve status). - -**Default Value**: - -.. code-block:: yaml - - burst: 100 - qps: 10 - - -writeRateLimiter (`webapi.RateLimiterConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines rate limiter properties for write actions. - -**Default Value**: - -.. code-block:: yaml - - burst: 100 - qps: 10 - - -caching (`webapi.CachingConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines caching characteristics. - -**Default Value**: - -.. code-block:: yaml - - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - - -resourceMeta (interface) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - - -webapi.CachingConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -size (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the maximum number of items to cache. - -**Default Value**: - -.. code-block:: yaml - - "500000" - - -resyncInterval (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the sync interval. - -**Default Value**: - -.. code-block:: yaml - - 30s - - -workers (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the number of workers to start up to process items. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -maxSystemFailures (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the number of failures to fetch a task before failing the task. - -**Default Value**: - -.. code-block:: yaml - - "5" - - -webapi.RateLimiterConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -qps (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the max rate of calls per second. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the maximum burst size. - -**Default Value**: - -.. code-block:: yaml - - "100" - - -athena.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -webApi (`webapi.PluginConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines config for the base WebAPI plugin. - -**Default Value**: - -.. code-block:: yaml - - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -resourceConstraints (`core.ResourceConstraintsSpec`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - - -defaultWorkGroup (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the default workgroup to use when running on Athena unless overwritten by the task. - -**Default Value**: - -.. code-block:: yaml - - primary - - -defaultCatalog (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the default catalog to use when running on Athena unless overwritten by the task. - -**Default Value**: - -.. code-block:: yaml - - AwsDataCatalog - - -aws.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -AWS Region to connect to. - -**Default Value**: - -.. code-block:: yaml - - us-east-2 - - -accountId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -AWS Account Identifier. - -**Default Value**: - -.. code-block:: yaml - - "" - - -retries (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Number of retries. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -logLevel (uint64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -bigquery.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -webApi (`webapi.PluginConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines config for the base WebAPI plugin. - -**Default Value**: - -.. code-block:: yaml - - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -resourceConstraints (`core.ResourceConstraintsSpec`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - - -googleTokenSource (`google.TokenSourceFactoryConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines Google token source - -**Default Value**: - -.. code-block:: yaml - - gke-task-workload-identity: - remoteClusterConfig: - auth: - caCertPath: "" - tokenPath: "" - enabled: false - endpoint: "" - name: "" - type: default - - -bigQueryEndpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -google.TokenSourceFactoryConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines type of TokenSourceFactory, possible values are 'default' and 'gke-task-workload-identity' - -**Default Value**: - -.. code-block:: yaml - - default - - -gke-task-workload-identity (`google.GkeTaskWorkloadIdentityTokenSourceFactoryConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Extra configuration for GKE task workload identity token source factory - -**Default Value**: - -.. code-block:: yaml - - remoteClusterConfig: - auth: - caCertPath: "" - tokenPath: "" - enabled: false - endpoint: "" - name: "" - - -google.GkeTaskWorkloadIdentityTokenSourceFactoryConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -remoteClusterConfig (`k8s.ClusterConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Configuration of remote GKE cluster - -**Default Value**: - -.. code-block:: yaml - - auth: - caCertPath: "" - tokenPath: "" - enabled: false - endpoint: "" - name: "" - - -k8s.ClusterConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Friendly name of the remote cluster - -**Default Value**: - -.. code-block:: yaml - - "" - - -endpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Remote K8s cluster endpoint - -**Default Value**: - -.. code-block:: yaml - - "" - - -auth (`k8s.Auth`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - caCertPath: "" - tokenPath: "" - - -enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Boolean flag to enable or disable - -**Default Value**: - -.. code-block:: yaml - - "false" - - -k8s.Auth -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -tokenPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Token path - -**Default Value**: - -.. code-block:: yaml - - "" - - -caCertPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Certificate path - -**Default Value**: - -.. code-block:: yaml - - "" - - -catalog.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -reader (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Catalog reader workqueue config. Make sure the index cache must be big enough to accommodate the biggest array task allowed to run on the system. - -**Default Value**: - -.. code-block:: yaml - - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -writer (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Catalog writer workqueue config. Make sure the index cache must be big enough to accommodate the biggest array task allowed to run on the system. - -**Default Value**: - -.. code-block:: yaml - - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -workqueue.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -workers (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Number of concurrent workers to start processing the queue. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -maxRetries (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of retries per item. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -maxItems (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of entries to keep in the index. - -**Default Value**: - -.. code-block:: yaml - - "10000" - - -common.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 1m0s - - -config.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Endpoint for qubole to use - -**Default Value**: - -.. code-block:: yaml - - https://wellness.qubole.com - - -commandApiPath (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -API Path where commands can be launched on Qubole. Should be a valid url. - -**Default Value**: - -.. code-block:: yaml - - /api/v1.2/commands/ - - -analyzeLinkPath (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -URL path where queries can be visualized on qubole website. Should be a valid url. - -**Default Value**: - -.. code-block:: yaml - - /v2/analyze - - -quboleTokenKey (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the key where to find Qubole token in the secret manager. - -**Default Value**: - -.. code-block:: yaml - - FLYTE_QUBOLE_CLIENT_TOKEN - - -lruCacheSize (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Size of the AutoRefreshCache - -**Default Value**: - -.. code-block:: yaml - - "2000" - - -workers (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Number of parallel workers to refresh the cache - -**Default Value**: - -.. code-block:: yaml - - "15" - - -defaultClusterLabel (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The default cluster label. This will be used if label is not specified on the hive job. - -**Default Value**: - -.. code-block:: yaml - - default - - -clusterConfigs ([]config.ClusterConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - labels: - - default - limit: 100 - namespaceScopeQuotaProportionCap: 0.7 - primaryLabel: default - projectScopeQuotaProportionCap: 0.7 - - -destinationClusterConfigs ([]config.DestinationClusterConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - [] - - -config.K8sPluginConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -inject-finalizer (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Instructs the plugin to inject a finalizer on startTask and remove it on task termination. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -default-annotations (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - - -default-labels (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-env-vars (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-env-vars-from-env (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-cpus (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines a default value for cpu for containers if not specified. - -**Default Value**: - -.. code-block:: yaml - - "1" - - -default-memory (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines a default value for memory for containers if not specified. - -**Default Value**: - -.. code-block:: yaml - - 1Gi - - -default-tolerations ([]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-node-selector (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-affinity (v1.Affinity) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -scheduler-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines scheduler name. - -**Default Value**: - -.. code-block:: yaml - - "" - - -interruptible-tolerations ([]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible-node-selector (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -non-interruptible-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -resource-tolerations (map[v1.ResourceName][]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -co-pilot (`config.FlyteCoPilotConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Co-Pilot Configuration - -**Default Value**: - -.. code-block:: yaml - - cpu: 500m - default-input-path: /var/flyte/inputs - default-output-path: /var/flyte/outputs - image: cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - input-vol-name: flyte-inputs - memory: 128Mi - name: flyte-copilot- - output-vol-name: flyte-outputs - start-timeout: 1m40s - storage: "" - - -delete-resource-on-finalize (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Instructs the system to delete the resource upon successful execution of a k8s pod rather than have the k8s garbage collector clean it up. This ensures that no resources are kept around (potentially consuming cluster resources). This, however, will cause k8s log links to expire as soon as the resource is finalized. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -create-container-error-grace-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 3m0s - - -image-pull-backoff-grace-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 3m0s - - -gpu-device-node-label (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - k8s.amazonaws.com/accelerator - - -gpu-partition-size-node-label (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - k8s.amazonaws.com/gpu-partition-size - - -gpu-unpartitioned-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -gpu-unpartitioned-toleration (v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -gpu-resource-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - nvidia.com/gpu - - -default-pod-security-context (v1.PodSecurityContext) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-security-context (v1.SecurityContext) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -enable-host-networking-pod (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - - -default-pod-dns-config (v1.PodDNSConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-pod-template-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the PodTemplate to use as the base for all k8s pods created by FlytePropeller. - -**Default Value**: - -.. code-block:: yaml - - "" - - -default-pod-template-resync (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Frequency of resyncing default pod templates - -**Default Value**: - -.. code-block:: yaml - - 30s - - -send-object-events (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -If true, will send k8s object events in TaskExecutionEvent updates. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.FlyteCoPilotConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Flyte co-pilot sidecar container name prefix. (additional bits will be added after this) - -**Default Value**: - -.. code-block:: yaml - - flyte-copilot- - - -image (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Flyte co-pilot Docker Image FQN - -**Default Value**: - -.. code-block:: yaml - - cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - - -default-input-path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default path where the volume should be mounted - -**Default Value**: - -.. code-block:: yaml - - /var/flyte/inputs - - -default-output-path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default path where the volume should be mounted - -**Default Value**: - -.. code-block:: yaml - - /var/flyte/outputs - - -input-vol-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the data volume that is created for storing inputs - -**Default Value**: - -.. code-block:: yaml - - flyte-inputs - - -output-vol-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the data volume that is created for storing outputs - -**Default Value**: - -.. code-block:: yaml - - flyte-outputs - - -start-timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 1m40s - - -cpu (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Used to set cpu for co-pilot containers - -**Default Value**: - -.. code-block:: yaml - - 500m - - -memory (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Used to set memory for co-pilot containers - -**Default Value**: - -.. code-block:: yaml - - 128Mi - - -storage (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default storage limit for individual inputs / outputs - -**Default Value**: - -.. code-block:: yaml - - "" - - -resource.Quantity -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -i (`resource.int64Amount`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - {} - - -d (`resource.infDecAmount`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - - -s (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "1" - - -Format (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - DecimalSI - - -resource.infDecAmount -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Dec (inf.Dec) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -resource.int64Amount -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -value (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "1" - - -scale (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -databricks.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -webApi (`webapi.PluginConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines config for the base WebAPI plugin. - -**Default Value**: - -.. code-block:: yaml - - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -resourceConstraints (`core.ResourceConstraintsSpec`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - - -defaultWarehouse (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the default warehouse to use when running on Databricks unless overwritten by the task. - -**Default Value**: - -.. code-block:: yaml - - COMPUTE_CLUSTER - - -databricksTokenKey (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the key where to find Databricks token in the secret manager. - -**Default Value**: - -.. code-block:: yaml - - FLYTE_DATABRICKS_API_TOKEN - - -databricksInstance (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Databricks workspace instance name. - -**Default Value**: - -.. code-block:: yaml - - "" - - -entrypointFile (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -A URL of the entrypoint file. DBFS and cloud storage (s3://, gcs://, adls://, etc) locations are supported. - -**Default Value**: - -.. code-block:: yaml - - "" - - -databricksEndpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -k8s.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -scheduler (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Decides the scheduler to use when launching array-pods. - -**Default Value**: - -.. code-block:: yaml - - "" - - -maxErrorLength (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Determines the maximum length of the error string returned for the array. - -**Default Value**: - -.. code-block:: yaml - - "1000" - - -maxArrayJobSize (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum size of array job. - -**Default Value**: - -.. code-block:: yaml - - "5000" - - -resourceConfig (`k8s.ResourceConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - limit: 0 - primaryLabel: "" - - -remoteClusterConfig (`k8s.ClusterConfig (remoteClusterConfig)`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - auth: - certPath: "" - tokenPath: "" - type: "" - enabled: false - endpoint: "" - name: "" - - -node-selector (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -tolerations ([]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -namespaceTemplate (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -OutputAssembler (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - maxItems: 100000 - maxRetries: 5 - workers: 10 - - -ErrorAssembler (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - maxItems: 100000 - maxRetries: 5 - workers: 10 - - -logs (`k8s.LogConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Config for log links for k8s array jobs. - -**Default Value**: - -.. code-block:: yaml - - config: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: true - kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName - }}/pod?namespace={{ .namespace }} - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -k8s.ClusterConfig (remoteClusterConfig) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Friendly name of the remote cluster - -**Default Value**: - -.. code-block:: yaml - - "" - - -endpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Remote K8s cluster endpoint - -**Default Value**: - -.. code-block:: yaml - - "" - - -auth (`k8s.Auth (auth)`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - certPath: "" - tokenPath: "" - type: "" - - -enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Boolean flag to enable or disable - -**Default Value**: - -.. code-block:: yaml - - "false" - - -k8s.Auth (auth) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Authentication type - -**Default Value**: - -.. code-block:: yaml - - "" - - -tokenPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Token path - -**Default Value**: - -.. code-block:: yaml - - "" - - -certPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Certificate path - -**Default Value**: - -.. code-block:: yaml - - "" - - -k8s.LogConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -config (`logs.LogConfig (config)`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the log config for k8s logs. - -**Default Value**: - -.. code-block:: yaml - - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: true - kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName - }}/pod?namespace={{ .namespace }} - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -logs.LogConfig (config) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -cloudwatch-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Cloudwatch Logging - -**Default Value**: - -.. code-block:: yaml - - "false" - - -cloudwatch-region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -AWS region in which Cloudwatch logs are stored. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cloudwatch-log-group (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Log group to which streams are associated. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cloudwatch-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building cloudwatch log links - -**Default Value**: - -.. code-block:: yaml - - "" - - -kubernetes-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Kubernetes Logging - -**Default Value**: - -.. code-block:: yaml - - "true" - - -kubernetes-url (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Console URL for Kubernetes logs - -**Default Value**: - -.. code-block:: yaml - - "" - - -kubernetes-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building kubernetes log links - -**Default Value**: - -.. code-block:: yaml - - http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName }}/pod?namespace={{ .namespace - }} - - -stackdriver-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Log-links to stackdriver - -**Default Value**: - -.. code-block:: yaml - - "false" - - -gcp-project (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the project in GCP - -**Default Value**: - -.. code-block:: yaml - - "" - - -stackdriver-logresourcename (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the logresource in stackdriver - -**Default Value**: - -.. code-block:: yaml - - "" - - -stackdriver-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building stackdriver log links - -**Default Value**: - -.. code-block:: yaml - - "" - - -templates ([]tasklog.TemplateLogPlugin) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -k8s.ResourceConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -primaryLabel (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -PrimaryLabel of a given service cluster - -**Default Value**: - -.. code-block:: yaml - - "" - - -limit (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Resource quota (in the number of outstanding requests) for the cluster - -**Default Value**: - -.. code-block:: yaml - - "0" - - -logs.LogConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -cloudwatch-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Cloudwatch Logging - -**Default Value**: - -.. code-block:: yaml - - "false" - - -cloudwatch-region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -AWS region in which Cloudwatch logs are stored. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cloudwatch-log-group (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Log group to which streams are associated. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cloudwatch-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building cloudwatch log links - -**Default Value**: - -.. code-block:: yaml - - "" - - -kubernetes-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Kubernetes Logging - -**Default Value**: - -.. code-block:: yaml - - "true" - - -kubernetes-url (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Console URL for Kubernetes logs - -**Default Value**: - -.. code-block:: yaml - - "" - - -kubernetes-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building kubernetes log links - -**Default Value**: - -.. code-block:: yaml - - http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName }}/pod?namespace={{ .namespace - }} - - -stackdriver-enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable Log-links to stackdriver - -**Default Value**: - -.. code-block:: yaml - - "false" - - -gcp-project (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the project in GCP - -**Default Value**: - -.. code-block:: yaml - - "" - - -stackdriver-logresourcename (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the logresource in stackdriver - -**Default Value**: - -.. code-block:: yaml - - "" - - -stackdriver-template-uri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template Uri to use when building stackdriver log links - -**Default Value**: - -.. code-block:: yaml - - "" - - -templates ([]tasklog.TemplateLogPlugin) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -ray.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -shutdownAfterJobFinishes (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "true" - - -ttlSecondsAfterFinished (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "3600" - - -serviceType (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - NodePort - - -includeDashboard (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "true" - - -dashboardHost (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 0.0.0.0 - - -nodeIPAddress (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -remoteClusterConfig (`k8s.ClusterConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Configuration of remote K8s cluster for ray jobs - -**Default Value**: - -.. code-block:: yaml - - auth: - caCertPath: "" - tokenPath: "" - enabled: false - endpoint: "" - name: "" - - -logs (`logs.LogConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -logsSidecar (v1.Container) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -dashboardURLTemplate (tasklog.TemplateLogPlugin) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Template for URL of Ray dashboard running on a head node. - -**Default Value**: - -.. code-block:: yaml - - null - - -defaults (`ray.DefaultConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - headNode: - ipAddress: $MY_POD_IP - startParameters: - disable-usage-stats: "true" - workerNode: - ipAddress: $MY_POD_IP - startParameters: - disable-usage-stats: "true" - - -enableUsageStats (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable usage stats for ray jobs. These stats are submitted to usage-stats.ray.io per https://docs.ray.io/en/latest/cluster/usage-stats.html - -**Default Value**: - -.. code-block:: yaml - - "false" - - -ray.DefaultConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -headNode (`ray.NodeConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - ipAddress: $MY_POD_IP - startParameters: - disable-usage-stats: "true" - - -workerNode (`ray.NodeConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - ipAddress: $MY_POD_IP - startParameters: - disable-usage-stats: "true" - - -ray.NodeConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -startParameters (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - disable-usage-stats: "true" - - -ipAddress (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - $MY_POD_IP - - -snowflake.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -webApi (`webapi.PluginConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines config for the base WebAPI plugin. - -**Default Value**: - -.. code-block:: yaml - - caching: - maxSystemFailures: 5 - resyncInterval: 30s - size: 500000 - workers: 10 - readRateLimiter: - burst: 100 - qps: 10 - resourceMeta: null - resourceQuotas: - default: 1000 - writeRateLimiter: - burst: 100 - qps: 10 - - -resourceConstraints (`core.ResourceConstraintsSpec`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - NamespaceScopeResourceConstraint: - Value: 50 - ProjectScopeResourceConstraint: - Value: 100 - - -defaultWarehouse (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the default warehouse to use when running on Snowflake unless overwritten by the task. - -**Default Value**: - -.. code-block:: yaml - - COMPUTE_WH - - -snowflakeTokenKey (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the key where to find Snowflake token in the secret manager. - -**Default Value**: - -.. code-block:: yaml - - FLYTE_SNOWFLAKE_CLIENT_TOKEN - - -snowflakeEndpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -spark.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -spark-config-default (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -spark-history-server-url (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -URL for SparkHistory Server that each job will publish the execution history to. - -**Default Value**: - -.. code-block:: yaml - - "" - - -features ([]spark.Feature) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -logs (`spark.LogConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Config for log links for spark applications. - -**Default Value**: - -.. code-block:: yaml - - all-user: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - mixed: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: true - kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName - }}/pod?namespace={{ .namespace }} - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - system: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - user: - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -spark.LogConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -mixed (`logs.LogConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the log config that's not split into user/system. - -**Default Value**: - -.. code-block:: yaml - - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: true - kubernetes-template-uri: http://localhost:30082/#!/log/{{ .namespace }}/{{ .podName - }}/pod?namespace={{ .namespace }} - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -user (`logs.LogConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the log config for user logs. - -**Default Value**: - -.. code-block:: yaml - - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -system (`logs.LogConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the log config for system logs. - -**Default Value**: - -.. code-block:: yaml - - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -all-user (`logs.LogConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -All user logs across driver and executors. - -**Default Value**: - -.. code-block:: yaml - - cloudwatch-enabled: false - cloudwatch-log-group: "" - cloudwatch-region: "" - cloudwatch-template-uri: "" - gcp-project: "" - kubernetes-enabled: false - kubernetes-template-uri: "" - kubernetes-url: "" - stackdriver-enabled: false - stackdriver-logresourcename: "" - stackdriver-template-uri: "" - templates: null - - -testing.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -sleep-duration (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Indicates the amount of time before transitioning to success - -**Default Value**: - -.. code-block:: yaml - - 0s - - -Section: propeller -======================================================================================================================== - -kube-config (string) ------------------------------------------------------------------------------------------------------------------------- - -Path to kubernetes client config file. - -**Default Value**: - -.. code-block:: yaml - - "" - - -master (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -workers (int) ------------------------------------------------------------------------------------------------------------------------- - -Number of threads to process workflows - -**Default Value**: - -.. code-block:: yaml - - "20" - - -workflow-reeval-duration (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Frequency of re-evaluating workflows - -**Default Value**: - -.. code-block:: yaml - - 10s - - -downstream-eval-duration (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Frequency of re-evaluating downstream tasks - -**Default Value**: - -.. code-block:: yaml - - 30s - - -limit-namespace (string) ------------------------------------------------------------------------------------------------------------------------- - -Namespaces to watch for this propeller - -**Default Value**: - -.. code-block:: yaml - - all - - -prof-port (`config.Port`_) ------------------------------------------------------------------------------------------------------------------------- - -Profiler port - -**Default Value**: - -.. code-block:: yaml - - 10254 - - -metadata-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -MetadataPrefix should be used if all the metadata for Flyte executions should be stored under a specific prefix in CloudStorage. If not specified, the data will be stored in the base container directly. - -**Default Value**: - -.. code-block:: yaml - - metadata/propeller - - -rawoutput-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -a fully qualified storage path of the form s3://flyte/abc/..., where all data sandboxes should be stored. - -**Default Value**: - -.. code-block:: yaml - - "" - - -queue (`config.CompositeQueueConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Workflow workqueue configuration, affects the way the work is consumed from the queue. - -**Default Value**: - -.. code-block:: yaml - - batch-size: -1 - batching-interval: 1s - queue: - base-delay: 0s - capacity: 10000 - max-delay: 1m0s - rate: 1000 - type: maxof - sub-queue: - base-delay: 0s - capacity: 10000 - max-delay: 0s - rate: 1000 - type: bucket - type: batch - - -metrics-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -An optional prefix for all published metrics. - -**Default Value**: - -.. code-block:: yaml - - flyte - - -metrics-keys ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Metrics labels applied to prometheus metrics emitted by the service. - -**Default Value**: - -.. code-block:: yaml - - - project - - domain - - wf - - task - - -enable-admin-launcher (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable remote Workflow launcher to Admin - -**Default Value**: - -.. code-block:: yaml - - "true" - - -max-workflow-retries (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of retries per workflow - -**Default Value**: - -.. code-block:: yaml - - "10" - - -max-ttl-hours (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of hours a completed workflow should be retained. Number between 1-23 hours - -**Default Value**: - -.. code-block:: yaml - - "23" - - -gc-interval (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Run periodic GC every 30 minutes - -**Default Value**: - -.. code-block:: yaml - - 30m0s - - -leader-election (`config.LeaderElectionConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for leader election. - -**Default Value**: - -.. code-block:: yaml - - enabled: false - lease-duration: 15s - lock-config-map: - Name: "" - Namespace: "" - renew-deadline: 10s - retry-period: 2s - - -publish-k8s-events (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable events publishing to K8s events API. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -max-output-size-bytes (int64) ------------------------------------------------------------------------------------------------------------------------- - -Maximum size of outputs per task - -**Default Value**: - -.. code-block:: yaml - - "10485760" - - -enable-grpc-latency-metrics (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable grpc latency metrics. Note Histograms metrics can be expensive on Prometheus servers. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -kube-client-config (`config.KubeClientConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration to control the Kubernetes client - -**Default Value**: - -.. code-block:: yaml - - burst: 25 - qps: 100 - timeout: 30s - - -node-config (`config.NodeConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -config for a workflow node - -**Default Value**: - -.. code-block:: yaml - - default-deadlines: - node-active-deadline: 0s - node-execution-deadline: 0s - workflow-active-deadline: 0s - default-max-attempts: 1 - ignore-retry-cause: false - interruptible-failure-threshold: -1 - max-node-retries-system-failures: 3 - - -max-streak-length (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of consecutive rounds that one propeller worker can use for one workflow - >1 => turbo-mode is enabled. - -**Default Value**: - -.. code-block:: yaml - - "8" - - -event-config (`config.EventConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configures execution event behavior. - -**Default Value**: - -.. code-block:: yaml - - fallback-to-output-reference: false - raw-output-policy: reference - - -include-shard-key-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified shard key label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-shard-key-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified shard key label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -include-project-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified project label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-project-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified project label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -include-domain-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified domain label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-domain-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified domain label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -cluster-id (string) ------------------------------------------------------------------------------------------------------------------------- - -Unique cluster id running this flytepropeller instance with which to annotate execution events - -**Default Value**: - -.. code-block:: yaml - - propeller - - -create-flyteworkflow-crd (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable creation of the FlyteWorkflow CRD on startup - -**Default Value**: - -.. code-block:: yaml - - "false" - - -array-node-event-version (int) ------------------------------------------------------------------------------------------------------------------------- - -ArrayNode eventing version. 0 => legacy (drop-in replacement for maptask), 1 => new - -**Default Value**: - -.. code-block:: yaml - - "0" - - -admin-launcher (`launchplan.AdminConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - burst: 10 - cacheSize: 10000 - tps: 100 - workers: 10 - - -resourcemanager (`config.Config (resourcemanager)`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - redis: - hostKey: "" - hostPath: "" - hostPaths: [] - maxRetries: 0 - primaryName: "" - resourceMaxQuota: 1000 - type: noop - - -workflowstore (`workflowstore.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - policy: ResourceVersionCache - - -config.CompositeQueueConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Type of composite queue to use for the WorkQueue - -**Default Value**: - -.. code-block:: yaml - - batch - - -queue (`config.WorkqueueConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Workflow workqueue configuration, affects the way the work is consumed from the queue. - -**Default Value**: - -.. code-block:: yaml - - base-delay: 0s - capacity: 10000 - max-delay: 1m0s - rate: 1000 - type: maxof - - -sub-queue (`config.WorkqueueConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -SubQueue configuration, affects the way the nodes cause the top-level Work to be re-evaluated. - -**Default Value**: - -.. code-block:: yaml - - base-delay: 0s - capacity: 10000 - max-delay: 0s - rate: 1000 - type: bucket - - -batching-interval (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration for which downstream updates are buffered - -**Default Value**: - -.. code-block:: yaml - - 1s - - -batch-size (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "-1" - - -config.WorkqueueConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Type of RateLimiter to use for the WorkQueue - -**Default Value**: - -.. code-block:: yaml - - maxof - - -base-delay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -base backoff delay for failure - -**Default Value**: - -.. code-block:: yaml - - 0s - - -max-delay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max backoff delay for failure - -**Default Value**: - -.. code-block:: yaml - - 1m0s - - -rate (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Bucket Refill rate per second - -**Default Value**: - -.. code-block:: yaml - - "1000" - - -capacity (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Bucket capacity as number of items - -**Default Value**: - -.. code-block:: yaml - - "10000" - - -config.Config (resourcemanager) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Which resource manager to use - -**Default Value**: - -.. code-block:: yaml - - noop - - -resourceMaxQuota (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Global limit for concurrent Qubole queries - -**Default Value**: - -.. code-block:: yaml - - "1000" - - -redis (`config.RedisConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Config for Redis resourcemanager. - -**Default Value**: - -.. code-block:: yaml - - hostKey: "" - hostPath: "" - hostPaths: [] - maxRetries: 0 - primaryName: "" - - -config.RedisConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -hostPaths ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Redis hosts locations. - -**Default Value**: - -.. code-block:: yaml - - [] - - -primaryName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Redis primary name, fill in only if you are connecting to a redis sentinel cluster. - -**Default Value**: - -.. code-block:: yaml - - "" - - -hostPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Redis host location - -**Default Value**: - -.. code-block:: yaml - - "" - - -hostKey (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Key for local Redis access - -**Default Value**: - -.. code-block:: yaml - - "" - - -maxRetries (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -See Redis client options for more info - -**Default Value**: - -.. code-block:: yaml - - "0" - - -config.EventConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -raw-output-policy (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -How output data should be passed along in execution events. - -**Default Value**: - -.. code-block:: yaml - - reference - - -fallback-to-output-reference (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether output data should be sent by reference when it is too large to be sent inline in execution events. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.KubeClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -qps (float32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max burst rate for throttle. 0 defaults to 10 - -**Default Value**: - -.. code-block:: yaml - - "25" - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max duration allowed for every request to KubeAPI before giving up. 0 implies no timeout. - -**Default Value**: - -.. code-block:: yaml - - 30s - - -config.LeaderElectionConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enables/Disables leader election. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -lock-config-map (`types.NamespacedName`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -ConfigMap namespace/name to use for resource lock. - -**Default Value**: - -.. code-block:: yaml - - Name: "" - Namespace: "" - - -lease-duration (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration that non-leader candidates will wait to force acquire leadership. This is measured against time of last observed ack. - -**Default Value**: - -.. code-block:: yaml - - 15s - - -renew-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration that the acting master will retry refreshing leadership before giving up. - -**Default Value**: - -.. code-block:: yaml - - 10s - - -retry-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration the LeaderElector clients should wait between tries of actions. - -**Default Value**: - -.. code-block:: yaml - - 2s - - -types.NamespacedName -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Namespace (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.NodeConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -default-deadlines (`config.DefaultDeadlines`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value for timeouts - -**Default Value**: - -.. code-block:: yaml - - node-active-deadline: 0s - node-execution-deadline: 0s - workflow-active-deadline: 0s - - -max-node-retries-system-failures (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of retries per node for node failure due to infra issues - -**Default Value**: - -.. code-block:: yaml - - "3" - - -interruptible-failure-threshold (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -number of failures for a node to be still considered interruptible. Negative numbers are treated as complementary (ex. -1 means last attempt is non-interruptible).' - -**Default Value**: - -.. code-block:: yaml - - "-1" - - -default-max-attempts (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default maximum number of attempts for a node - -**Default Value**: - -.. code-block:: yaml - - "1" - - -ignore-retry-cause (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Ignore retry cause and count all attempts toward a node's max attempts - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.DefaultDeadlines -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -node-execution-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of node execution timeout that includes the time spent to run the node/workflow - -**Default Value**: - -.. code-block:: yaml - - 0s - - -node-active-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of node timeout that includes the time spent queued. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -workflow-active-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of workflow timeout that includes the time spent queued. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -config.Port -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "10254" - - -launchplan.AdminConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -tps (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The maximum number of transactions per second to flyte admin from this client. - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum burst for throttle - -**Default Value**: - -.. code-block:: yaml - - "10" - - -cacheSize (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum cache in terms of number of items stored. - -**Default Value**: - -.. code-block:: yaml - - "10000" - - -workers (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Number of parallel workers to work on the queue. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -workflowstore.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -policy (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Workflow Store Policy to initialize - -**Default Value**: - -.. code-block:: yaml - - ResourceVersionCache - - -Section: secrets -======================================================================================================================== - -secrets-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -Prefix where to look for secrets file - -**Default Value**: - -.. code-block:: yaml - - /etc/secrets - - -env-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -Prefix for environment variables - -**Default Value**: - -.. code-block:: yaml - - FLYTE_SECRET_ - - -Section: storage -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of storage to configure [s3/minio/local/mem/stow]. - -**Default Value**: - -.. code-block:: yaml - - s3 - - -connection (`storage.ConnectionConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - access-key: "" - auth-type: iam - disable-ssl: false - endpoint: "" - region: us-east-1 - secret-key: "" - - -stow (`storage.StowConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Storage config for stow backend. - -**Default Value**: - -.. code-block:: yaml - - {} - - -container (string) ------------------------------------------------------------------------------------------------------------------------- - -Initial container (in s3 a bucket) to create -if it doesn't exist-.' - -**Default Value**: - -.. code-block:: yaml - - "" - - -enable-multicontainer (bool) ------------------------------------------------------------------------------------------------------------------------- - -If this is true, then the container argument is overlooked and redundant. This config will automatically open new connections to new containers/buckets as they are encountered - -**Default Value**: - -.. code-block:: yaml - - "false" - - -cache (`storage.CachingConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - max_size_mbs: 0 - target_gc_percent: 0 - - -limits (`storage.LimitsConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets limits for stores. - -**Default Value**: - -.. code-block:: yaml - - maxDownloadMBs: 2 - - -defaultHttpClient (`storage.HTTPClientConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets the default http client config. - -**Default Value**: - -.. code-block:: yaml - - headers: null - timeout: 0s - - -signedUrl (`storage.SignedURLConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets config for SignedURL. - -**Default Value**: - -.. code-block:: yaml - - {} - - -storage.CachingConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -max_size_mbs (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used - -**Default Value**: - -.. code-block:: yaml - - "0" - - -target_gc_percent (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets the garbage collection target percentage. - -**Default Value**: - -.. code-block:: yaml - - "0" - - -storage.ConnectionConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -URL for storage client to connect to. - -**Default Value**: - -.. code-block:: yaml - - "" - - -auth-type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Auth Type to use [iam,accesskey]. - -**Default Value**: - -.. code-block:: yaml - - iam - - -access-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Access key to use. Only required when authtype is set to accesskey. - -**Default Value**: - -.. code-block:: yaml - - "" - - -secret-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Secret to use when accesskey is set. - -**Default Value**: - -.. code-block:: yaml - - "" - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Region to connect to. - -**Default Value**: - -.. code-block:: yaml - - us-east-1 - - -disable-ssl (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Disables SSL connection. Should only be used for development. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -storage.HTTPClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -headers (map[string][]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets time out on the http client. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -storage.LimitsConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxDownloadMBs (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed download size (in MBs) per call. - -**Default Value**: - -.. code-block:: yaml - - "2" - - -storage.SignedURLConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -stowConfigOverride (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -storage.StowConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -kind (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Kind of Stow backend to use. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - "" - - -config (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Configuration for stow backend. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: tasks -======================================================================================================================== - -task-plugins (`config.TaskPluginConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Task plugin configuration - -**Default Value**: - -.. code-block:: yaml - - default-for-task-types: {} - enabled-plugins: [] - - -max-plugin-phase-versions (int32) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of plugin phase versions allowed for one phase. - -**Default Value**: - -.. code-block:: yaml - - "100000" - - -backoff (`config.BackOffConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for Exponential BackOff implementation - -**Default Value**: - -.. code-block:: yaml - - base-second: 2 - max-duration: 20s - - -maxLogMessageLength (int) ------------------------------------------------------------------------------------------------------------------------- - -Deprecated!!! Max length of error message. - -**Default Value**: - -.. code-block:: yaml - - "0" - - -config.BackOffConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -base-second (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The number of seconds representing the base duration of the exponential backoff - -**Default Value**: - -.. code-block:: yaml - - "2" - - -max-duration (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The cap of the backoff duration - -**Default Value**: - -.. code-block:: yaml - - 20s - - -config.TaskPluginConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -enabled-plugins ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Plugins enabled currently - -**Default Value**: - -.. code-block:: yaml - - [] - - -default-for-task-types (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: webhook -======================================================================================================================== - -metrics-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -An optional prefix for all published metrics. - -**Default Value**: - -.. code-block:: yaml - - 'flyte:' - - -certDir (string) ------------------------------------------------------------------------------------------------------------------------- - -Certificate directory to use to write generated certs. Defaults to /etc/webhook/certs/ - -**Default Value**: - -.. code-block:: yaml - - /etc/webhook/certs - - -localCert (bool) ------------------------------------------------------------------------------------------------------------------------- - -write certs locally. Defaults to false - -**Default Value**: - -.. code-block:: yaml - - "false" - - -listenPort (int) ------------------------------------------------------------------------------------------------------------------------- - -The port to use to listen to webhook calls. Defaults to 9443 - -**Default Value**: - -.. code-block:: yaml - - "9443" - - -serviceName (string) ------------------------------------------------------------------------------------------------------------------------- - -The name of the webhook service. - -**Default Value**: - -.. code-block:: yaml - - flyte-pod-webhook - - -servicePort (int32) ------------------------------------------------------------------------------------------------------------------------- - -The port on the service that hosting webhook. - -**Default Value**: - -.. code-block:: yaml - - "443" - - -secretName (string) ------------------------------------------------------------------------------------------------------------------------- - -Secret name to write generated certs to. - -**Default Value**: - -.. code-block:: yaml - - flyte-pod-webhook - - -secretManagerType (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - K8s - - -awsSecretManager (`config.AWSSecretManagerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -AWS Secret Manager config. - -**Default Value**: - -.. code-block:: yaml - - resources: - limits: - cpu: 200m - memory: 500Mi - requests: - cpu: 200m - memory: 500Mi - sidecarImage: docker.io/amazon/aws-secrets-manager-secret-sidecar:v0.1.4 - - -gcpSecretManager (`config.GCPSecretManagerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -GCP Secret Manager config. - -**Default Value**: - -.. code-block:: yaml - - resources: - limits: - cpu: 200m - memory: 500Mi - requests: - cpu: 200m - memory: 500Mi - sidecarImage: gcr.io/google.com/cloudsdktool/cloud-sdk:alpine - - -vaultSecretManager (`config.VaultSecretManagerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Vault Secret Manager config. - -**Default Value**: - -.. code-block:: yaml - - annotations: null - kvVersion: "2" - role: flyte - - -config.AWSSecretManagerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -sidecarImage (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Specifies the sidecar docker image to use - -**Default Value**: - -.. code-block:: yaml - - docker.io/amazon/aws-secrets-manager-secret-sidecar:v0.1.4 - - -resources (`v1.ResourceRequirements`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - limits: - cpu: 200m - memory: 500Mi - requests: - cpu: 200m - memory: 500Mi - - -v1.ResourceRequirements -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -limits (v1.ResourceList) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - cpu: 200m - memory: 500Mi - - -requests (v1.ResourceList) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - cpu: 200m - memory: 500Mi - - -claims ([]v1.ResourceClaim) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -config.GCPSecretManagerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -sidecarImage (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Specifies the sidecar docker image to use - -**Default Value**: - -.. code-block:: yaml - - gcr.io/google.com/cloudsdktool/cloud-sdk:alpine - - -resources (`v1.ResourceRequirements`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - limits: - cpu: 200m - memory: 500Mi - requests: - cpu: 200m - memory: 500Mi - - -config.VaultSecretManagerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -role (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Specifies the vault role to use - -**Default Value**: - -.. code-block:: yaml - - flyte - - -kvVersion (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "2" - - -annotations (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - diff --git a/rsts/deployment/configuration/generated/index.rst b/rsts/deployment/configuration/generated/index.rst deleted file mode 100644 index 101eac2594..0000000000 --- a/rsts/deployment/configuration/generated/index.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. _deployment-configuration-generated: - -######################### -Configuration Reference -######################### - -This section documents the configuration settings for Flyte's backend services. - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: flytescheduler-config-specification - :type: ref - :text: Flyte Scheduler - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The Flyte workflow scheduler service. - - --- - - .. link-button:: flytedatacatalog-config-specification - :type: ref - :text: Data Catalog - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The Flyte Data Memoization (datacatalog) service. - - --- - - .. link-button:: flyteadmin-config-specification - :type: ref - :text: Flyte Admin - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The Flyte control plane. - - --- - - .. link-button:: flytepropeller-config-specification - :type: url - :text: Flytectl - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Execution engine configuration - -.. toctree:: - :maxdepth: 1 - :caption: Generated Configuration Documentation - :name: generatedconfigdoctoc - :hidden: - - datacatalog_config - flyteadmin_config - flytepropeller_config - scheduler_config diff --git a/rsts/deployment/configuration/generated/scheduler_config.rst b/rsts/deployment/configuration/generated/scheduler_config.rst deleted file mode 100644 index 81eb27bc44..0000000000 --- a/rsts/deployment/configuration/generated/scheduler_config.rst +++ /dev/null @@ -1,5325 +0,0 @@ -.. _flytescheduler-config-specification: - -######################################### -Flyte Scheduler Configuration -######################################### - -- `admin <#section-admin>`_ - -- `auth <#section-auth>`_ - -- `cloudevents <#section-cloudevents>`_ - -- `cluster_resources <#section-cluster_resources>`_ - -- `clusterpools <#section-clusterpools>`_ - -- `clusters <#section-clusters>`_ - -- `database <#section-database>`_ - -- `domains <#section-domains>`_ - -- `externalevents <#section-externalevents>`_ - -- `flyteadmin <#section-flyteadmin>`_ - -- `logger <#section-logger>`_ - -- `namespace_mapping <#section-namespace_mapping>`_ - -- `notifications <#section-notifications>`_ - -- `otel <#section-otel>`_ - -- `plugins <#section-plugins>`_ - -- `propeller <#section-propeller>`_ - -- `qualityofservice <#section-qualityofservice>`_ - -- `queues <#section-queues>`_ - -- `registration <#section-registration>`_ - -- `remotedata <#section-remotedata>`_ - -- `scheduler <#section-scheduler>`_ - -- `secrets <#section-secrets>`_ - -- `server <#section-server>`_ - -- `storage <#section-storage>`_ - -- `task_resources <#section-task_resources>`_ - -- `task_type_whitelist <#section-task_type_whitelist>`_ - -Section: admin -======================================================================================================================== - -endpoint (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -For admin types, specify where the uri of the service is located. - -**Default Value**: - -.. code-block:: yaml - - "" - - -insecure (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use insecure connection. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -insecureSkipVerify (bool) ------------------------------------------------------------------------------------------------------------------------- - -InsecureSkipVerify controls whether a client verifies the server's certificate chain and host name. Caution : shouldn't be use for production usecases' - -**Default Value**: - -.. code-block:: yaml - - "false" - - -caCertFilePath (string) ------------------------------------------------------------------------------------------------------------------------- - -Use specified certificate file to verify the admin server peer. - -**Default Value**: - -.. code-block:: yaml - - "" - - -maxBackoffDelay (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Max delay for grpc backoff - -**Default Value**: - -.. code-block:: yaml - - 8s - - -perRetryTimeout (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -gRPC per retry timeout - -**Default Value**: - -.. code-block:: yaml - - 15s - - -maxRetries (int) ------------------------------------------------------------------------------------------------------------------------- - -Max number of gRPC retries - -**Default Value**: - -.. code-block:: yaml - - "4" - - -authType (uint8) ------------------------------------------------------------------------------------------------------------------------- - -Type of OAuth2 flow used for communicating with admin.ClientSecret,Pkce,ExternalCommand are valid values - -**Default Value**: - -.. code-block:: yaml - - ClientSecret - - -tokenRefreshWindow (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Max duration between token refresh attempt and token expiry. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -useAuth (bool) ------------------------------------------------------------------------------------------------------------------------- - -Deprecated: Auth will be enabled/disabled based on admin's dynamically discovered information. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -clientId (string) ------------------------------------------------------------------------------------------------------------------------- - -Client ID - -**Default Value**: - -.. code-block:: yaml - - flytepropeller - - -clientSecretLocation (string) ------------------------------------------------------------------------------------------------------------------------- - -File containing the client secret - -**Default Value**: - -.. code-block:: yaml - - /etc/secrets/client_secret - - -clientSecretEnvVar (string) ------------------------------------------------------------------------------------------------------------------------- - -Environment variable containing the client secret - -**Default Value**: - -.. code-block:: yaml - - "" - - -scopes ([]string) ------------------------------------------------------------------------------------------------------------------------- - -List of scopes to request - -**Default Value**: - -.. code-block:: yaml - - [] - - -useAudienceFromAdmin (bool) ------------------------------------------------------------------------------------------------------------------------- - -Use Audience configured from admins public endpoint config. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -audience (string) ------------------------------------------------------------------------------------------------------------------------- - -Audience to use when initiating OAuth2 authorization requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -authorizationServerUrl (string) ------------------------------------------------------------------------------------------------------------------------- - -This is the URL to your IdP's authorization server. It'll default to Endpoint - -**Default Value**: - -.. code-block:: yaml - - "" - - -tokenUrl (string) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: Your IdP's token endpoint. It'll be discovered from flyte admin's OAuth Metadata endpoint if not provided. - -**Default Value**: - -.. code-block:: yaml - - "" - - -authorizationHeader (string) ------------------------------------------------------------------------------------------------------------------------- - -Custom metadata header to pass JWT - -**Default Value**: - -.. code-block:: yaml - - "" - - -pkceConfig (`pkce.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for Pkce authentication flow. - -**Default Value**: - -.. code-block:: yaml - - refreshTime: 5m0s - timeout: 2m0s - - -deviceFlowConfig (`deviceflow.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for Device authentication flow. - -**Default Value**: - -.. code-block:: yaml - - pollInterval: 5s - refreshTime: 5m0s - timeout: 10m0s - - -command ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Command for external authentication token generation - -**Default Value**: - -.. code-block:: yaml - - [] - - -proxyCommand ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Command for external proxy-authorization token generation - -**Default Value**: - -.. code-block:: yaml - - [] - - -defaultServiceConfig (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -httpProxyURL (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.Duration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Duration (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 8s - - -config.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -URL (`url.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - ForceQuery: false - Fragment: "" - Host: "" - OmitHost: false - Opaque: "" - Path: "" - RawFragment: "" - RawPath: "" - RawQuery: "" - Scheme: "" - User: null - - -url.URL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Opaque (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -User (url.Userinfo) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -Host (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -OmitHost (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -ForceQuery (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -RawQuery (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Fragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -RawFragment (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -deviceflow.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -refreshTime (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -grace period from the token expiry after which it would refresh the token. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -amount of time the device flow should complete or else it will be cancelled. - -**Default Value**: - -.. code-block:: yaml - - 10m0s - - -pollInterval (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -amount of time the device flow would poll the token endpoint if auth server doesn't return a polling interval. Okta and google IDP do return an interval' - -**Default Value**: - -.. code-block:: yaml - - 5s - - -pkce.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Amount of time the browser session would be active for authentication from client app. - -**Default Value**: - -.. code-block:: yaml - - 2m0s - - -refreshTime (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -grace period from the token expiry after which it would refresh the token. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -Section: auth -======================================================================================================================== - -httpAuthorizationHeader (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - flyte-authorization - - -grpcAuthorizationHeader (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - flyte-authorization - - -disableForHttp (bool) ------------------------------------------------------------------------------------------------------------------------- - -Disables auth enforcement on HTTP Endpoints. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -disableForGrpc (bool) ------------------------------------------------------------------------------------------------------------------------- - -Disables auth enforcement on Grpc Endpoints. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -authorizedUris ([]config.URL) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -httpProxyURL (`config.URL`_) ------------------------------------------------------------------------------------------------------------------------- - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -userAuth (`config.UserAuthConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Defines Auth options for users. - -**Default Value**: - -.. code-block:: yaml - - cookieBlockKeySecretName: cookie_block_key - cookieHashKeySecretName: cookie_hash_key - cookieSetting: - domain: "" - sameSitePolicy: DefaultMode - httpProxyURL: "" - openId: - baseUrl: "" - clientId: "" - clientSecretFile: "" - clientSecretName: oidc_client_secret - scopes: - - openid - - profile - redirectUrl: /console - - -appAuth (`config.OAuth2Options`_) ------------------------------------------------------------------------------------------------------------------------- - -Defines Auth options for apps. UserAuth must be enabled for AppAuth to work. - -**Default Value**: - -.. code-block:: yaml - - authServerType: Self - externalAuthServer: - allowedAudience: [] - baseUrl: "" - httpProxyURL: "" - metadataUrl: "" - retryAttempts: 5 - retryDelay: 1s - selfAuthServer: - accessTokenLifespan: 30m0s - authorizationCodeLifespan: 5m0s - claimSymmetricEncryptionKeySecretName: claim_symmetric_key - issuer: "" - oldTokenSigningRSAKeySecretName: token_rsa_key_old.pem - refreshTokenLifespan: 1h0m0s - staticClients: - flyte-cli: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flyte-cli - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytectl: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flytectl - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytepropeller: - audience: null - client_secret: JDJhJDA2JGQ2UFFuMlFBRlUzY0w1VjhNRGtldXVrNjN4dWJxVXhOeGp0ZlB3LkZjOU1nVjZ2cG15T0l5 - grant_types: - - refresh_token - - client_credentials - id: flytepropeller - public: false - redirect_uris: - - http://localhost:3846/callback - response_types: - - token - scopes: - - all - - offline - - access_token - tokenSigningRSAKeySecretName: token_rsa_key.pem - thirdPartyConfig: - flyteClient: - audience: "" - clientId: flytectl - redirectUri: http://localhost:53593/callback - scopes: - - all - - offline - - -config.OAuth2Options -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -authServerType (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - Self - - -selfAuthServer (`config.AuthorizationServer`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Authorization Server config to run as a service. Use this when using an IdP that does not offer a custom OAuth2 Authorization Server. - -**Default Value**: - -.. code-block:: yaml - - accessTokenLifespan: 30m0s - authorizationCodeLifespan: 5m0s - claimSymmetricEncryptionKeySecretName: claim_symmetric_key - issuer: "" - oldTokenSigningRSAKeySecretName: token_rsa_key_old.pem - refreshTokenLifespan: 1h0m0s - staticClients: - flyte-cli: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flyte-cli - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytectl: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flytectl - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytepropeller: - audience: null - client_secret: JDJhJDA2JGQ2UFFuMlFBRlUzY0w1VjhNRGtldXVrNjN4dWJxVXhOeGp0ZlB3LkZjOU1nVjZ2cG15T0l5 - grant_types: - - refresh_token - - client_credentials - id: flytepropeller - public: false - redirect_uris: - - http://localhost:3846/callback - response_types: - - token - scopes: - - all - - offline - - access_token - tokenSigningRSAKeySecretName: token_rsa_key.pem - - -externalAuthServer (`config.ExternalAuthorizationServer`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -External Authorization Server config. - -**Default Value**: - -.. code-block:: yaml - - allowedAudience: [] - baseUrl: "" - httpProxyURL: "" - metadataUrl: "" - retryAttempts: 5 - retryDelay: 1s - - -thirdPartyConfig (`config.ThirdPartyConfigOptions`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines settings to instruct flyte cli tools (and optionally others) on what config to use to setup their client. - -**Default Value**: - -.. code-block:: yaml - - flyteClient: - audience: "" - clientId: flytectl - redirectUri: http://localhost:53593/callback - scopes: - - all - - offline - - -config.AuthorizationServer -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -issuer (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the issuer to use when issuing and validating tokens. The default value is https:/// - -**Default Value**: - -.. code-block:: yaml - - "" - - -accessTokenLifespan (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the lifespan of issued access tokens. - -**Default Value**: - -.. code-block:: yaml - - 30m0s - - -refreshTokenLifespan (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the lifespan of issued access tokens. - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -authorizationCodeLifespan (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines the lifespan of issued access tokens. - -**Default Value**: - -.. code-block:: yaml - - 5m0s - - -claimSymmetricEncryptionKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use to encrypt claims in authcode token. - -**Default Value**: - -.. code-block:: yaml - - claim_symmetric_key - - -tokenSigningRSAKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use to retrieve RSA Signing Key. - -**Default Value**: - -.. code-block:: yaml - - token_rsa_key.pem - - -oldTokenSigningRSAKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use to retrieve Old RSA Signing Key. This can be useful during key rotation to continue to accept older tokens. - -**Default Value**: - -.. code-block:: yaml - - token_rsa_key_old.pem - - -staticClients (map[string]*fosite.DefaultClient) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - flyte-cli: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flyte-cli - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytectl: - audience: null - grant_types: - - refresh_token - - authorization_code - id: flytectl - public: true - redirect_uris: - - http://localhost:53593/callback - - http://localhost:12345/callback - response_types: - - code - - token - scopes: - - all - - offline - - access_token - flytepropeller: - audience: null - client_secret: JDJhJDA2JGQ2UFFuMlFBRlUzY0w1VjhNRGtldXVrNjN4dWJxVXhOeGp0ZlB3LkZjOU1nVjZ2cG15T0l5 - grant_types: - - refresh_token - - client_credentials - id: flytepropeller - public: false - redirect_uris: - - http://localhost:3846/callback - response_types: - - token - scopes: - - all - - offline - - access_token - - -config.ExternalAuthorizationServer -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -baseUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -This should be the base url of the authorization server that you are trying to hit. With Okta for instance, it will look something like https://company.okta.com/oauth2/abcdef123456789/ - -**Default Value**: - -.. code-block:: yaml - - "" - - -allowedAudience ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional: A list of allowed audiences. If not provided, the audience is expected to be the public Uri of the service. - -**Default Value**: - -.. code-block:: yaml - - [] - - -metadataUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional: If the server doesn't support /.well-known/oauth-authorization-server, you can set a custom metadata url here.' - -**Default Value**: - -.. code-block:: yaml - - "" - - -httpProxyURL (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -retryAttempts (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional: The number of attempted retries on a transient failure to get the OAuth metadata - -**Default Value**: - -.. code-block:: yaml - - "5" - - -retryDelay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Optional, Duration to wait between retries - -**Default Value**: - -.. code-block:: yaml - - 1s - - -config.ThirdPartyConfigOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -flyteClient (`config.FlyteClientConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - audience: "" - clientId: flytectl - redirectUri: http://localhost:53593/callback - scopes: - - all - - offline - - -config.FlyteClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -clientId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -public identifier for the app which handles authorization for a Flyte deployment - -**Default Value**: - -.. code-block:: yaml - - flytectl - - -redirectUri (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -This is the callback uri registered with the app which handles authorization for a Flyte deployment - -**Default Value**: - -.. code-block:: yaml - - http://localhost:53593/callback - - -scopes ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Recommended scopes for the client to request. - -**Default Value**: - -.. code-block:: yaml - - - all - - offline - - -audience (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Audience to use when initiating OAuth2 authorization requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.UserAuthConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -redirectUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - /console - - -openId (`config.OpenIDOptions`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OpenID Configuration for User Auth - -**Default Value**: - -.. code-block:: yaml - - baseUrl: "" - clientId: "" - clientSecretFile: "" - clientSecretName: oidc_client_secret - scopes: - - openid - - profile - - -httpProxyURL (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: HTTP Proxy to be used for OAuth requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -cookieHashKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use for cookie hash key. - -**Default Value**: - -.. code-block:: yaml - - cookie_hash_key - - -cookieBlockKeySecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Secret name to use for cookie block key. - -**Default Value**: - -.. code-block:: yaml - - cookie_block_key - - -cookieSetting (`config.CookieSettings`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -settings used by cookies created for user auth - -**Default Value**: - -.. code-block:: yaml - - domain: "" - sameSitePolicy: DefaultMode - - -config.CookieSettings -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -sameSitePolicy (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Allows you to declare if your cookie should be restricted to a first-party or same-site context.Wrapper around http.SameSite. - -**Default Value**: - -.. code-block:: yaml - - DefaultMode - - -domain (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -OPTIONAL: Allows you to set the domain attribute on the auth cookies. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.OpenIDOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -clientId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -clientSecretName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - oidc_client_secret - - -clientSecretFile (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -baseUrl (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scopes ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - openid - - profile - - -Section: cloudevents -======================================================================================================================== - -enable (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - local - - -aws (`interfaces.AWSConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - region: "" - - -gcp (`interfaces.GCPConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - projectId: "" - - -kafka (`interfaces.KafkaConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - brokers: null - version: "" - - -eventsPublisher (`interfaces.EventsPublisherConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - eventTypes: null - topicName: "" - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -interfaces.AWSConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.EventsPublisherConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -topicName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -eventTypes ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interfaces.GCPConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -projectId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.KafkaConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -version (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -brokers ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -Section: cluster_resources -======================================================================================================================== - -templatePath (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -templateData (map[string]interfaces.DataSource) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -refreshInterval (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - 1m0s - - -customData (map[string]map[string]interfaces.DataSource) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -standaloneDeployment (bool) ------------------------------------------------------------------------------------------------------------------------- - -Whether the cluster resource sync is running in a standalone deployment and should call flyteadmin service endpoints - -**Default Value**: - -.. code-block:: yaml - - "false" - - -Section: clusterpools -======================================================================================================================== - -clusterPoolAssignments (map[string]interfaces.ClusterPoolAssignment) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: clusters -======================================================================================================================== - -clusterConfigs ([]interfaces.ClusterConfig) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -labelClusterMap (map[string][]interfaces.ClusterEntity) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -defaultExecutionLabel (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: database -======================================================================================================================== - -host (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -port (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -dbname (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -username (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -password (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -passwordPath (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -options (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -debug (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -enableForeignKeyConstraintWhenMigrating (bool) ------------------------------------------------------------------------------------------------------------------------- - -Whether to enable gorm foreign keys when migrating the db - -**Default Value**: - -.. code-block:: yaml - - "false" - - -maxIdleConnections (int) ------------------------------------------------------------------------------------------------------------------------- - -maxIdleConnections sets the maximum number of connections in the idle connection pool. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -maxOpenConnections (int) ------------------------------------------------------------------------------------------------------------------------- - -maxOpenConnections sets the maximum number of open connections to the database. - -**Default Value**: - -.. code-block:: yaml - - "100" - - -connMaxLifeTime (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -sets the maximum amount of time a connection may be reused - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -postgres (`database.PostgresConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - dbname: postgres - debug: false - host: postgres - options: sslmode=disable - password: "" - passwordPath: "" - port: 5432 - username: postgres - - -sqlite (`database.SQLiteConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - file: "" - - -database.PostgresConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -host (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The host name of the database server - -**Default Value**: - -.. code-block:: yaml - - postgres - - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The port name of the database server - -**Default Value**: - -.. code-block:: yaml - - "5432" - - -dbname (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database name - -**Default Value**: - -.. code-block:: yaml - - postgres - - -username (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database user who is connecting to the server. - -**Default Value**: - -.. code-block:: yaml - - postgres - - -password (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The database password. - -**Default Value**: - -.. code-block:: yaml - - "" - - -passwordPath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Points to the file containing the database password. - -**Default Value**: - -.. code-block:: yaml - - "" - - -options (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -See http://gorm.io/docs/connecting_to_the_database.html for available options passed, in addition to the above. - -**Default Value**: - -.. code-block:: yaml - - sslmode=disable - - -debug (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether or not to start the database connection with debug mode enabled. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -database.SQLiteConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -file (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The path to the file (existing or new) where the DB should be created / stored. If existing, then this will be re-used, else a new will be created - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: domains -======================================================================================================================== - -id (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - development - - -name (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - development - - -Section: externalevents -======================================================================================================================== - -enable (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - local - - -aws (`interfaces.AWSConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - region: "" - - -gcp (`interfaces.GCPConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - projectId: "" - - -eventsPublisher (`interfaces.EventsPublisherConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - eventTypes: null - topicName: "" - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -Section: flyteadmin -======================================================================================================================== - -roleNameKey (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -metricsScope (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - 'flyte:' - - -metricsKeys ([]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - - project - - domain - - wf - - task - - phase - - tasktype - - runtime_type - - runtime_version - - app_name - - -profilerPort (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "10254" - - -metadataStoragePrefix ([]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - - metadata - - admin - - -eventVersion (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "2" - - -asyncEventsBufferSize (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "100" - - -maxParallelism (int32) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "25" - - -labels (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -annotations (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -overwriteCache (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -assumableIamRole (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -k8sServiceAccount (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -outputLocationPrefix (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -useOffloadedWorkflowClosure (bool) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "false" - - -envs (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -Section: logger -======================================================================================================================== - -show-source (bool) ------------------------------------------------------------------------------------------------------------------------- - -Includes source code location in logs. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -mute (bool) ------------------------------------------------------------------------------------------------------------------------- - -Mutes all logs regardless of severity. Intended for benchmarks/tests only. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -level (int) ------------------------------------------------------------------------------------------------------------------------- - -Sets the minimum logging level. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -formatter (`logger.FormatterConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets logging format. - -**Default Value**: - -.. code-block:: yaml - - type: json - - -logger.FormatterConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets logging format type. - -**Default Value**: - -.. code-block:: yaml - - json - - -Section: namespace_mapping -======================================================================================================================== - -mapping (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -template (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - '{{ project }}-{{ domain }}' - - -templateData (map[string]interfaces.DataSource) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - null - - -Section: notifications -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - local - - -region (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -aws (`interfaces.AWSConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - region: "" - - -gcp (`interfaces.GCPConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - projectId: "" - - -publisher (`interfaces.NotificationsPublisherConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - topicName: "" - - -processor (`interfaces.NotificationsProcessorConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - accountId: "" - queueName: "" - - -emailer (`interfaces.NotificationsEmailerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - body: "" - emailServerConfig: - apiKeyEnvVar: "" - apiKeyFilePath: "" - serviceName: "" - sender: "" - subject: "" - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -interfaces.NotificationsEmailerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -emailServerConfig (`interfaces.EmailServerConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - apiKeyEnvVar: "" - apiKeyFilePath: "" - serviceName: "" - - -subject (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -sender (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -body (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.EmailServerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -serviceName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -apiKeyEnvVar (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -apiKeyFilePath (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.NotificationsProcessorConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -queueName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -accountId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -interfaces.NotificationsPublisherConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -topicName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: otel -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of exporter to configure [noop/file/jaeger]. - -**Default Value**: - -.. code-block:: yaml - - noop - - -file (`otelutils.FileConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a file - -**Default Value**: - -.. code-block:: yaml - - filename: /tmp/trace.txt - - -jaeger (`otelutils.JaegerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration for exporting telemetry traces to a jaeger - -**Default Value**: - -.. code-block:: yaml - - endpoint: http://localhost:14268/api/traces - - -otelutils.FileConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -filename (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Filename to store exported telemetry traces - -**Default Value**: - -.. code-block:: yaml - - /tmp/trace.txt - - -otelutils.JaegerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Endpoint for the jaeger telemtry trace ingestor - -**Default Value**: - -.. code-block:: yaml - - http://localhost:14268/api/traces - - -Section: plugins -======================================================================================================================== - -catalogcache (`catalog.Config`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - reader: - maxItems: 10000 - maxRetries: 3 - workers: 10 - writer: - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -k8s (`config.K8sPluginConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - co-pilot: - cpu: 500m - default-input-path: /var/flyte/inputs - default-output-path: /var/flyte/outputs - image: cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - input-vol-name: flyte-inputs - memory: 128Mi - name: flyte-copilot- - output-vol-name: flyte-outputs - start-timeout: 1m40s - storage: "" - create-container-error-grace-period: 3m0s - default-annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - default-cpus: "1" - default-env-vars: null - default-env-vars-from-env: null - default-labels: null - default-memory: 1Gi - default-node-selector: null - default-pod-dns-config: null - default-pod-security-context: null - default-pod-template-name: "" - default-pod-template-resync: 30s - default-security-context: null - default-tolerations: null - delete-resource-on-finalize: false - enable-host-networking-pod: null - gpu-device-node-label: k8s.amazonaws.com/accelerator - gpu-partition-size-node-label: k8s.amazonaws.com/gpu-partition-size - gpu-resource-name: nvidia.com/gpu - gpu-unpartitioned-node-selector-requirement: null - gpu-unpartitioned-toleration: null - image-pull-backoff-grace-period: 3m0s - inject-finalizer: false - interruptible-node-selector: null - interruptible-node-selector-requirement: null - interruptible-tolerations: null - non-interruptible-node-selector-requirement: null - resource-tolerations: null - scheduler-name: "" - send-object-events: false - - -catalog.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -reader (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Catalog reader workqueue config. Make sure the index cache must be big enough to accommodate the biggest array task allowed to run on the system. - -**Default Value**: - -.. code-block:: yaml - - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -writer (`workqueue.Config`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Catalog writer workqueue config. Make sure the index cache must be big enough to accommodate the biggest array task allowed to run on the system. - -**Default Value**: - -.. code-block:: yaml - - maxItems: 10000 - maxRetries: 3 - workers: 10 - - -workqueue.Config -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -workers (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Number of concurrent workers to start processing the queue. - -**Default Value**: - -.. code-block:: yaml - - "10" - - -maxRetries (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of retries per item. - -**Default Value**: - -.. code-block:: yaml - - "3" - - -maxItems (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of entries to keep in the index. - -**Default Value**: - -.. code-block:: yaml - - "10000" - - -config.K8sPluginConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -inject-finalizer (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Instructs the plugin to inject a finalizer on startTask and remove it on task termination. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -default-annotations (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - cluster-autoscaler.kubernetes.io/safe-to-evict: "false" - - -default-labels (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-env-vars (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-env-vars-from-env (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-cpus (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines a default value for cpu for containers if not specified. - -**Default Value**: - -.. code-block:: yaml - - "1" - - -default-memory (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines a default value for memory for containers if not specified. - -**Default Value**: - -.. code-block:: yaml - - 1Gi - - -default-tolerations ([]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-node-selector (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-affinity (v1.Affinity) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -scheduler-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines scheduler name. - -**Default Value**: - -.. code-block:: yaml - - "" - - -interruptible-tolerations ([]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible-node-selector (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -interruptible-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -non-interruptible-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -resource-tolerations (map[v1.ResourceName][]v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -co-pilot (`config.FlyteCoPilotConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Co-Pilot Configuration - -**Default Value**: - -.. code-block:: yaml - - cpu: 500m - default-input-path: /var/flyte/inputs - default-output-path: /var/flyte/outputs - image: cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - input-vol-name: flyte-inputs - memory: 128Mi - name: flyte-copilot- - output-vol-name: flyte-outputs - start-timeout: 1m40s - storage: "" - - -delete-resource-on-finalize (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Instructs the system to delete the resource upon successful execution of a k8s pod rather than have the k8s garbage collector clean it up. This ensures that no resources are kept around (potentially consuming cluster resources). This, however, will cause k8s log links to expire as soon as the resource is finalized. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -create-container-error-grace-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 3m0s - - -image-pull-backoff-grace-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 3m0s - - -gpu-device-node-label (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - k8s.amazonaws.com/accelerator - - -gpu-partition-size-node-label (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - k8s.amazonaws.com/gpu-partition-size - - -gpu-unpartitioned-node-selector-requirement (v1.NodeSelectorRequirement) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -gpu-unpartitioned-toleration (v1.Toleration) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -gpu-resource-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - nvidia.com/gpu - - -default-pod-security-context (v1.PodSecurityContext) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-security-context (v1.SecurityContext) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -enable-host-networking-pod (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - - -default-pod-dns-config (v1.PodDNSConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -default-pod-template-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the PodTemplate to use as the base for all k8s pods created by FlytePropeller. - -**Default Value**: - -.. code-block:: yaml - - "" - - -default-pod-template-resync (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Frequency of resyncing default pod templates - -**Default Value**: - -.. code-block:: yaml - - 30s - - -send-object-events (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -If true, will send k8s object events in TaskExecutionEvent updates. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.FlyteCoPilotConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Flyte co-pilot sidecar container name prefix. (additional bits will be added after this) - -**Default Value**: - -.. code-block:: yaml - - flyte-copilot- - - -image (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Flyte co-pilot Docker Image FQN - -**Default Value**: - -.. code-block:: yaml - - cr.flyte.org/flyteorg/flytecopilot:v0.0.15 - - -default-input-path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default path where the volume should be mounted - -**Default Value**: - -.. code-block:: yaml - - /var/flyte/inputs - - -default-output-path (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default path where the volume should be mounted - -**Default Value**: - -.. code-block:: yaml - - /var/flyte/outputs - - -input-vol-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the data volume that is created for storing inputs - -**Default Value**: - -.. code-block:: yaml - - flyte-inputs - - -output-vol-name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Name of the data volume that is created for storing outputs - -**Default Value**: - -.. code-block:: yaml - - flyte-outputs - - -start-timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 1m40s - - -cpu (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Used to set cpu for co-pilot containers - -**Default Value**: - -.. code-block:: yaml - - 500m - - -memory (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Used to set memory for co-pilot containers - -**Default Value**: - -.. code-block:: yaml - - 128Mi - - -storage (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default storage limit for individual inputs / outputs - -**Default Value**: - -.. code-block:: yaml - - "" - - -resource.Quantity -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -i (`resource.int64Amount`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - {} - - -d (`resource.infDecAmount`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - - -s (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "1" - - -Format (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - DecimalSI - - -resource.infDecAmount -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Dec (inf.Dec) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -resource.int64Amount -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -value (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "1" - - -scale (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -Section: propeller -======================================================================================================================== - -kube-config (string) ------------------------------------------------------------------------------------------------------------------------- - -Path to kubernetes client config file. - -**Default Value**: - -.. code-block:: yaml - - "" - - -master (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -workers (int) ------------------------------------------------------------------------------------------------------------------------- - -Number of threads to process workflows - -**Default Value**: - -.. code-block:: yaml - - "20" - - -workflow-reeval-duration (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Frequency of re-evaluating workflows - -**Default Value**: - -.. code-block:: yaml - - 10s - - -downstream-eval-duration (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Frequency of re-evaluating downstream tasks - -**Default Value**: - -.. code-block:: yaml - - 30s - - -limit-namespace (string) ------------------------------------------------------------------------------------------------------------------------- - -Namespaces to watch for this propeller - -**Default Value**: - -.. code-block:: yaml - - all - - -prof-port (`config.Port`_) ------------------------------------------------------------------------------------------------------------------------- - -Profiler port - -**Default Value**: - -.. code-block:: yaml - - 10254 - - -metadata-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -MetadataPrefix should be used if all the metadata for Flyte executions should be stored under a specific prefix in CloudStorage. If not specified, the data will be stored in the base container directly. - -**Default Value**: - -.. code-block:: yaml - - metadata/propeller - - -rawoutput-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -a fully qualified storage path of the form s3://flyte/abc/..., where all data sandboxes should be stored. - -**Default Value**: - -.. code-block:: yaml - - "" - - -queue (`config.CompositeQueueConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Workflow workqueue configuration, affects the way the work is consumed from the queue. - -**Default Value**: - -.. code-block:: yaml - - batch-size: -1 - batching-interval: 1s - queue: - base-delay: 0s - capacity: 10000 - max-delay: 1m0s - rate: 1000 - type: maxof - sub-queue: - base-delay: 0s - capacity: 10000 - max-delay: 0s - rate: 1000 - type: bucket - type: batch - - -metrics-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -An optional prefix for all published metrics. - -**Default Value**: - -.. code-block:: yaml - - flyte - - -metrics-keys ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Metrics labels applied to prometheus metrics emitted by the service. - -**Default Value**: - -.. code-block:: yaml - - - project - - domain - - wf - - task - - -enable-admin-launcher (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable remote Workflow launcher to Admin - -**Default Value**: - -.. code-block:: yaml - - "true" - - -max-workflow-retries (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of retries per workflow - -**Default Value**: - -.. code-block:: yaml - - "10" - - -max-ttl-hours (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of hours a completed workflow should be retained. Number between 1-23 hours - -**Default Value**: - -.. code-block:: yaml - - "23" - - -gc-interval (`config.Duration`_) ------------------------------------------------------------------------------------------------------------------------- - -Run periodic GC every 30 minutes - -**Default Value**: - -.. code-block:: yaml - - 30m0s - - -leader-election (`config.LeaderElectionConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Config for leader election. - -**Default Value**: - -.. code-block:: yaml - - enabled: false - lease-duration: 15s - lock-config-map: - Name: "" - Namespace: "" - renew-deadline: 10s - retry-period: 2s - - -publish-k8s-events (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable events publishing to K8s events API. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -max-output-size-bytes (int64) ------------------------------------------------------------------------------------------------------------------------- - -Maximum size of outputs per task - -**Default Value**: - -.. code-block:: yaml - - "10485760" - - -enable-grpc-latency-metrics (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable grpc latency metrics. Note Histograms metrics can be expensive on Prometheus servers. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -kube-client-config (`config.KubeClientConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration to control the Kubernetes client - -**Default Value**: - -.. code-block:: yaml - - burst: 25 - qps: 100 - timeout: 30s - - -node-config (`config.NodeConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -config for a workflow node - -**Default Value**: - -.. code-block:: yaml - - default-deadlines: - node-active-deadline: 0s - node-execution-deadline: 0s - workflow-active-deadline: 0s - default-max-attempts: 1 - ignore-retry-cause: false - interruptible-failure-threshold: -1 - max-node-retries-system-failures: 3 - - -max-streak-length (int) ------------------------------------------------------------------------------------------------------------------------- - -Maximum number of consecutive rounds that one propeller worker can use for one workflow - >1 => turbo-mode is enabled. - -**Default Value**: - -.. code-block:: yaml - - "8" - - -event-config (`config.EventConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Configures execution event behavior. - -**Default Value**: - -.. code-block:: yaml - - fallback-to-output-reference: false - raw-output-policy: reference - - -include-shard-key-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified shard key label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-shard-key-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified shard key label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -include-project-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified project label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-project-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified project label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -include-domain-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Include the specified domain label in the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -exclude-domain-label ([]string) ------------------------------------------------------------------------------------------------------------------------- - -Exclude the specified domain label from the k8s FlyteWorkflow CRD label selector - -**Default Value**: - -.. code-block:: yaml - - [] - - -cluster-id (string) ------------------------------------------------------------------------------------------------------------------------- - -Unique cluster id running this flytepropeller instance with which to annotate execution events - -**Default Value**: - -.. code-block:: yaml - - propeller - - -create-flyteworkflow-crd (bool) ------------------------------------------------------------------------------------------------------------------------- - -Enable creation of the FlyteWorkflow CRD on startup - -**Default Value**: - -.. code-block:: yaml - - "false" - - -array-node-event-version (int) ------------------------------------------------------------------------------------------------------------------------- - -ArrayNode eventing version. 0 => legacy (drop-in replacement for maptask), 1 => new - -**Default Value**: - -.. code-block:: yaml - - "0" - - -config.CompositeQueueConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Type of composite queue to use for the WorkQueue - -**Default Value**: - -.. code-block:: yaml - - batch - - -queue (`config.WorkqueueConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Workflow workqueue configuration, affects the way the work is consumed from the queue. - -**Default Value**: - -.. code-block:: yaml - - base-delay: 0s - capacity: 10000 - max-delay: 1m0s - rate: 1000 - type: maxof - - -sub-queue (`config.WorkqueueConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -SubQueue configuration, affects the way the nodes cause the top-level Work to be re-evaluated. - -**Default Value**: - -.. code-block:: yaml - - base-delay: 0s - capacity: 10000 - max-delay: 0s - rate: 1000 - type: bucket - - -batching-interval (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration for which downstream updates are buffered - -**Default Value**: - -.. code-block:: yaml - - 1s - - -batch-size (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "-1" - - -config.WorkqueueConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Type of RateLimiter to use for the WorkQueue - -**Default Value**: - -.. code-block:: yaml - - maxof - - -base-delay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -base backoff delay for failure - -**Default Value**: - -.. code-block:: yaml - - 0s - - -max-delay (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max backoff delay for failure - -**Default Value**: - -.. code-block:: yaml - - 1m0s - - -rate (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Bucket Refill rate per second - -**Default Value**: - -.. code-block:: yaml - - "1000" - - -capacity (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Bucket capacity as number of items - -**Default Value**: - -.. code-block:: yaml - - "10000" - - -config.EventConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -raw-output-policy (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -How output data should be passed along in execution events. - -**Default Value**: - -.. code-block:: yaml - - reference - - -fallback-to-output-reference (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether output data should be sent by reference when it is too large to be sent inline in execution events. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.KubeClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -qps (float32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max burst rate for throttle. 0 defaults to 10 - -**Default Value**: - -.. code-block:: yaml - - "25" - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max duration allowed for every request to KubeAPI before giving up. 0 implies no timeout. - -**Default Value**: - -.. code-block:: yaml - - 30s - - -config.LeaderElectionConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enables/Disables leader election. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -lock-config-map (`types.NamespacedName`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -ConfigMap namespace/name to use for resource lock. - -**Default Value**: - -.. code-block:: yaml - - Name: "" - Namespace: "" - - -lease-duration (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration that non-leader candidates will wait to force acquire leadership. This is measured against time of last observed ack. - -**Default Value**: - -.. code-block:: yaml - - 15s - - -renew-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration that the acting master will retry refreshing leadership before giving up. - -**Default Value**: - -.. code-block:: yaml - - 10s - - -retry-period (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Duration the LeaderElector clients should wait between tries of actions. - -**Default Value**: - -.. code-block:: yaml - - 2s - - -types.NamespacedName -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Namespace (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Name (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.NodeConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -default-deadlines (`config.DefaultDeadlines`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value for timeouts - -**Default Value**: - -.. code-block:: yaml - - node-active-deadline: 0s - node-execution-deadline: 0s - workflow-active-deadline: 0s - - -max-node-retries-system-failures (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum number of retries per node for node failure due to infra issues - -**Default Value**: - -.. code-block:: yaml - - "3" - - -interruptible-failure-threshold (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -number of failures for a node to be still considered interruptible. Negative numbers are treated as complementary (ex. -1 means last attempt is non-interruptible).' - -**Default Value**: - -.. code-block:: yaml - - "-1" - - -default-max-attempts (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default maximum number of attempts for a node - -**Default Value**: - -.. code-block:: yaml - - "1" - - -ignore-retry-cause (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Ignore retry cause and count all attempts toward a node's max attempts - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.DefaultDeadlines -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -node-execution-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of node execution timeout that includes the time spent to run the node/workflow - -**Default Value**: - -.. code-block:: yaml - - 0s - - -node-active-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of node timeout that includes the time spent queued. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -workflow-active-deadline (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default value of workflow timeout that includes the time spent queued. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -config.Port -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "10254" - - -Section: qualityofservice -======================================================================================================================== - -tierExecutionValues (map[string]interfaces.QualityOfServiceSpec) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -defaultTiers (map[string]string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: queues -======================================================================================================================== - -executionQueues (interfaces.ExecutionQueues) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - [] - - -workflowConfigs (interfaces.WorkflowConfigs) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - [] - - -Section: registration -======================================================================================================================== - -maxWorkflowNodes (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "100" - - -maxLabelEntries (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -maxAnnotationEntries (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -workflowSizeLimit (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: remotedata -======================================================================================================================== - -scheme (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - none - - -region (string) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "" - - -signedUrls (`interfaces.SignedURL`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - durationMinutes: 0 - enabled: false - signingPrincipal: "" - - -maxSizeInBytes (int64) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "2097152" - - -inlineEventDataPolicy (int) ------------------------------------------------------------------------------------------------------------------------- - -Specifies how inline execution event data should be saved in the backend - -**Default Value**: - -.. code-block:: yaml - - Offload - - -interfaces.SignedURL -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -enabled (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Whether signed urls should even be returned with GetExecutionData, GetNodeExecutionData and GetTaskExecutionData response objects. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -durationMinutes (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -signingPrincipal (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: scheduler -======================================================================================================================== - -profilerPort (`config.Port`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - 10254 - - -eventScheduler (`interfaces.EventSchedulerConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - aws: null - local: {} - region: "" - scheduleNamePrefix: "" - scheduleRole: "" - scheme: local - targetName: "" - - -workflowExecutor (`interfaces.WorkflowExecutorConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - accountId: "" - aws: null - local: - adminRateLimit: - burst: 10 - tps: 100 - useUTCTz: false - region: "" - scheduleQueueName: "" - scheme: local - - -reconnectAttempts (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -reconnectDelaySeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - "0" - - -interfaces.EventSchedulerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - local - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scheduleRole (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -targetName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scheduleNamePrefix (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -aws (interfaces.AWSSchedulerConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -local (`interfaces.FlyteSchedulerConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - {} - - -interfaces.FlyteSchedulerConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -interfaces.WorkflowExecutorConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -scheme (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - local - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -scheduleQueueName (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -accountId (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -aws (interfaces.AWSWorkflowExecutorConfig) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -local (`interfaces.FlyteWorkflowExecutorConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - adminRateLimit: - burst: 10 - tps: 100 - useUTCTz: false - - -interfaces.FlyteWorkflowExecutorConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -adminRateLimit (`interfaces.AdminRateLimit`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - burst: 10 - tps: 100 - - -useUTCTz (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -interfaces.AdminRateLimit -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -tps (float64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "10" - - -Section: secrets -======================================================================================================================== - -secrets-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -Prefix where to look for secrets file - -**Default Value**: - -.. code-block:: yaml - - /etc/secrets - - -env-prefix (string) ------------------------------------------------------------------------------------------------------------------------- - -Prefix for environment variables - -**Default Value**: - -.. code-block:: yaml - - FLYTE_SECRET_ - - -Section: server -======================================================================================================================== - -httpPort (int) ------------------------------------------------------------------------------------------------------------------------- - -On which http port to serve admin - -**Default Value**: - -.. code-block:: yaml - - "8088" - - -grpcPort (int) ------------------------------------------------------------------------------------------------------------------------- - -deprecated - -**Default Value**: - -.. code-block:: yaml - - "0" - - -grpcServerReflection (bool) ------------------------------------------------------------------------------------------------------------------------- - -deprecated - -**Default Value**: - -.. code-block:: yaml - - "false" - - -kube-config (string) ------------------------------------------------------------------------------------------------------------------------- - -Path to kubernetes client config file, default is empty, useful for incluster config. - -**Default Value**: - -.. code-block:: yaml - - "" - - -master (string) ------------------------------------------------------------------------------------------------------------------------- - -The address of the Kubernetes API server. - -**Default Value**: - -.. code-block:: yaml - - "" - - -security (`config.ServerSecurityOptions`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - allowCors: true - allowedHeaders: - - Content-Type - - flyte-authorization - allowedOrigins: - - '*' - auditAccess: false - secure: false - ssl: - certificateFile: "" - keyFile: "" - useAuth: false - - -grpc (`config.GrpcConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - enableGrpcLatencyMetrics: false - maxMessageSizeBytes: 0 - port: 8089 - serverReflection: true - - -thirdPartyConfig (`config.ThirdPartyConfigOptions`_) ------------------------------------------------------------------------------------------------------------------------- - -Deprecated please use auth.appAuth.thirdPartyConfig instead. - -**Default Value**: - -.. code-block:: yaml - - flyteClient: - audience: "" - clientId: "" - redirectUri: "" - scopes: [] - - -dataProxy (`config.DataProxyConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Defines data proxy configuration. - -**Default Value**: - -.. code-block:: yaml - - download: - maxExpiresIn: 1h0m0s - upload: - defaultFileNameLength: 20 - maxExpiresIn: 1h0m0s - maxSize: 6Mi - storagePrefix: "" - - -readHeaderTimeoutSeconds (int) ------------------------------------------------------------------------------------------------------------------------- - -The amount of time allowed to read request headers. - -**Default Value**: - -.. code-block:: yaml - - "32" - - -kubeClientConfig (`config.KubeClientConfig (kubeClientConfig)`_) ------------------------------------------------------------------------------------------------------------------------- - -Configuration to control the Kubernetes client - -**Default Value**: - -.. code-block:: yaml - - burst: 25 - qps: 100 - timeout: 30s - - -config.DataProxyConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -upload (`config.DataProxyUploadConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines data proxy upload configuration. - -**Default Value**: - -.. code-block:: yaml - - defaultFileNameLength: 20 - maxExpiresIn: 1h0m0s - maxSize: 6Mi - storagePrefix: "" - - -download (`config.DataProxyDownloadConfig`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Defines data proxy download configuration. - -**Default Value**: - -.. code-block:: yaml - - maxExpiresIn: 1h0m0s - - -config.DataProxyDownloadConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxExpiresIn (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed expiration duration. - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -config.DataProxyUploadConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxSize (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed upload size. - -**Default Value**: - -.. code-block:: yaml - - 6Mi - - -maxExpiresIn (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed expiration duration. - -**Default Value**: - -.. code-block:: yaml - - 1h0m0s - - -defaultFileNameLength (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Default length for the generated file name if not provided in the request. - -**Default Value**: - -.. code-block:: yaml - - "20" - - -storagePrefix (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Storage prefix to use for all upload requests. - -**Default Value**: - -.. code-block:: yaml - - "" - - -config.GrpcConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -port (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -On which grpc port to serve admin - -**Default Value**: - -.. code-block:: yaml - - "8089" - - -serverReflection (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable GRPC Server Reflection - -**Default Value**: - -.. code-block:: yaml - - "true" - - -maxMessageSizeBytes (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The max size in bytes for incoming gRPC messages - -**Default Value**: - -.. code-block:: yaml - - "0" - - -enableGrpcLatencyMetrics (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Enable grpc latency metrics. Note Histograms metrics can be expensive on Prometheus servers. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -config.KubeClientConfig (kubeClientConfig) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -qps (int32) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max QPS to the master for requests to KubeAPI. 0 defaults to 5. - -**Default Value**: - -.. code-block:: yaml - - "100" - - -burst (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max burst rate for throttle. 0 defaults to 10 - -**Default Value**: - -.. code-block:: yaml - - "25" - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Max duration allowed for every request to KubeAPI before giving up. 0 implies no timeout. - -**Default Value**: - -.. code-block:: yaml - - 30s - - -config.ServerSecurityOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -secure (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -ssl (`config.SslOptions`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - certificateFile: "" - keyFile: "" - - -useAuth (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -auditAccess (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "false" - - -allowCors (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "true" - - -allowedOrigins ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - '*' - - -allowedHeaders ([]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - - Content-Type - - flyte-authorization - - -config.SslOptions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -certificateFile (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -keyFile (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "" - - -Section: storage -======================================================================================================================== - -type (string) ------------------------------------------------------------------------------------------------------------------------- - -Sets the type of storage to configure [s3/minio/local/mem/stow]. - -**Default Value**: - -.. code-block:: yaml - - s3 - - -connection (`storage.ConnectionConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - access-key: "" - auth-type: iam - disable-ssl: false - endpoint: "" - region: us-east-1 - secret-key: "" - - -stow (`storage.StowConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Storage config for stow backend. - -**Default Value**: - -.. code-block:: yaml - - {} - - -container (string) ------------------------------------------------------------------------------------------------------------------------- - -Initial container (in s3 a bucket) to create -if it doesn't exist-.' - -**Default Value**: - -.. code-block:: yaml - - "" - - -enable-multicontainer (bool) ------------------------------------------------------------------------------------------------------------------------- - -If this is true, then the container argument is overlooked and redundant. This config will automatically open new connections to new containers/buckets as they are encountered - -**Default Value**: - -.. code-block:: yaml - - "false" - - -cache (`storage.CachingConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - max_size_mbs: 0 - target_gc_percent: 0 - - -limits (`storage.LimitsConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets limits for stores. - -**Default Value**: - -.. code-block:: yaml - - maxDownloadMBs: 2 - - -defaultHttpClient (`storage.HTTPClientConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets the default http client config. - -**Default Value**: - -.. code-block:: yaml - - headers: null - timeout: 0s - - -signedUrl (`storage.SignedURLConfig`_) ------------------------------------------------------------------------------------------------------------------------- - -Sets config for SignedURL. - -**Default Value**: - -.. code-block:: yaml - - {} - - -storage.CachingConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -max_size_mbs (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum size of the cache where the Blob store data is cached in-memory. If not specified or set to 0, cache is not used - -**Default Value**: - -.. code-block:: yaml - - "0" - - -target_gc_percent (int) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets the garbage collection target percentage. - -**Default Value**: - -.. code-block:: yaml - - "0" - - -storage.ConnectionConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -endpoint (`config.URL`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -URL for storage client to connect to. - -**Default Value**: - -.. code-block:: yaml - - "" - - -auth-type (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Auth Type to use [iam,accesskey]. - -**Default Value**: - -.. code-block:: yaml - - iam - - -access-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Access key to use. Only required when authtype is set to accesskey. - -**Default Value**: - -.. code-block:: yaml - - "" - - -secret-key (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Secret to use when accesskey is set. - -**Default Value**: - -.. code-block:: yaml - - "" - - -region (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Region to connect to. - -**Default Value**: - -.. code-block:: yaml - - us-east-1 - - -disable-ssl (bool) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Disables SSL connection. Should only be used for development. - -**Default Value**: - -.. code-block:: yaml - - "false" - - -storage.HTTPClientConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -headers (map[string][]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -timeout (`config.Duration`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Sets time out on the http client. - -**Default Value**: - -.. code-block:: yaml - - 0s - - -storage.LimitsConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -maxDownloadMBs (int64) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Maximum allowed download size (in MBs) per call. - -**Default Value**: - -.. code-block:: yaml - - "2" - - -storage.SignedURLConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -stowConfigOverride (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - null - - -storage.StowConfig -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -kind (string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Kind of Stow backend to use. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - "" - - -config (map[string]string) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -Configuration for stow backend. Refer to github/flyteorg/stow - -**Default Value**: - -.. code-block:: yaml - - {} - - -Section: task_resources -======================================================================================================================== - -defaults (`interfaces.TaskResourceSet`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - cpu: "2" - ephemeralStorage: "0" - gpu: "0" - memory: 200Mi - storage: "0" - - -limits (`interfaces.TaskResourceSet`_) ------------------------------------------------------------------------------------------------------------------------- - -**Default Value**: - -.. code-block:: yaml - - cpu: "2" - ephemeralStorage: "0" - gpu: "1" - memory: 1Gi - storage: "0" - - -interfaces.TaskResourceSet -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -cpu (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "2" - - -gpu (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -memory (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - 200Mi - - -storage (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - -ephemeralStorage (`resource.Quantity`_) -"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -**Default Value**: - -.. code-block:: yaml - - "0" - - diff --git a/rsts/deployment/configuration/index.rst b/rsts/deployment/configuration/index.rst deleted file mode 100644 index 81b3f9df5f..0000000000 --- a/rsts/deployment/configuration/index.rst +++ /dev/null @@ -1,130 +0,0 @@ -.. _deployment-configuration: - -###################### -Configuration -###################### - -This section will cover how to configure your Flyte cluster for features like -authentication, monitoring, and notifications. - -.. important:: - - The configuration instructions in this section are for the ``flyte`` and ``flyte-core`` Helm charts, which is for - the :ref:`multi-cluster setup `. - - If you're using the ``flyte-binary`` chart for the :ref:`single cluster setup `, - instead of specifying configuration under a yaml file like ``cloud_events.yaml`` in :ref:`deployment-configuration-cloud-event`, - you'll need to add the configuration settings under the ``inline`` section in the `eks-production.yaml` file: - - .. literalinclude:: ../../../charts/flyte-binary/eks-production.yaml - :language: yaml - :lines: 30-41 - :caption: charts/flyte-binary/eks-production.yaml - - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: deployment-configuration-auth-setup - :type: ref - :text: Authenticating in Flyte - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Basic OIDC and Authentication Setup - - --- - - .. link-button:: deployment-configuration-auth-migration - :type: ref - :text: Migrating Your Authentication Config - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Migration guide to move to Admin's own authorization server. - - --- - - .. link-button:: deployment-configuration-auth-appendix - :type: ref - :text: Understanding Authentication in Detail - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Migration guide to move to Admin's own authorization server. - - --- - - .. link-button:: deployment-configuration-general - :type: ref - :text: Configuring Custom K8s Resources - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Use Flyte's cluster-resource-controller to control specific Kubernetes resources and administer project/domain-specific CPU/GPU/memory resource quotas. - - --- - - .. link-button:: deployment-configuration-customizable-resources - :type: ref - :text: Adding New Customizable Resources - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Create new default configurations or overriding certain values for specific combinations of user projects, domains and workflows through Flyte APIs. - - --- - - .. link-button:: deployment-configuration-notifications - :type: ref - :text: Notifications - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up and configuring notifications. - - --- - - .. link-button:: deployment-configuration-cloud-event - :type: ref - :text: External Events - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - How to set up Flyte to emit events to third-parties. - - --- - - .. link-button:: deployment-configuration-monitoring - :type: ref - :text: Monitoring - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up and configuring observability. - - --- - - .. link-button:: deployment-configuration-performance - :type: ref - :text: Optimizing Performance - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Improve the performance of the core Flyte engine. - - --- - - .. link-button:: deployment-configuration-eventing - :type: ref - :text: Platform Events - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Configure Flyte to to send events to external pub/sub systems. - -.. toctree:: - :maxdepth: 1 - :name: Cluster Config - :hidden: - - auth_setup - auth_migration - auth_appendix - general - customizable_resources - monitoring - notifications - performance - cloud_event diff --git a/rsts/deployment/configuration/monitoring.rst b/rsts/deployment/configuration/monitoring.rst deleted file mode 100644 index 75bc89adc4..0000000000 --- a/rsts/deployment/configuration/monitoring.rst +++ /dev/null @@ -1,99 +0,0 @@ -.. _deployment-configuration-monitoring: - -Monitoring ----------- - -.. tags:: Infrastructure, Advanced - -.. tip:: The Flyte core team publishes and maintains Grafana dashboards built using Prometheus data sources, which can be found `here `__. - -Metrics for Executions -====================== - -Flyte-provided Metrics -~~~~~~~~~~~~~~~~~~~~~~ - -Whenever you run a workflow, Flyte platform automatically emits high-level metrics. These metrics follow a consistent schema and aim to provide visibility into aspects of the platform which might otherwise be opaque. -These metrics help users diagnose whether an issue is inherent to the platform or one's own task or workflow implementation. - -We will be adding to this set of metrics as we implement the capabilities to pull more data from the system, so keep checking back for new stats! - -At a high level, workflow execution goes through the following discrete steps: - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/monitoring/flyte_wf_timeline.svg?sanitize=true - -=================================== ================================================================================================================================== - Description of main events for workflow execution ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ - Events Description -=================================== ================================================================================================================================== -Acceptance Measures the time consumed from receiving a service call to creating an Execution (Unknown) and moving to QUEUED. -Transition Latency Measures the latency between two consecutive node executions; the time spent in Flyte engine. -Queuing Latency Measures the latency between the node moving to QUEUED and the handler reporting the executable moving to RUNNING state. -Task Execution Actual time spent executing the user code. -Repeat steps 2-4 for every task -Transition Latency See #2 -Completion Latency Measures the time consumed by a workflow moving from SUCCEEDING/FAILING state to TERMINAL state. -=================================== ================================================================================================================================== - - -========================================================== =========== =============================================================================================================================================================== - Flyte Stats Schema ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Prefix Type Description -========================================================== =========== =============================================================================================================================================================== -``propeller.all.workflow.acceptance-latency-ms`` Timer (ms) Measures the time consumed from receiving a service call to creating an Execution (Unknown) and moving to QUEUED. -``propeller.all.node.queueing-latency-ms`` Timer (ms) Measures the latency between the node moving to QUEUED and the handler reporting the executable moving to RUNNING state. -``propeller.all.node.transition-latency-ms`` Timer (ms) Measures the latency between two consecutive node executions; the time spent in Flyte engine. -``propeller.all.workflow.completion-latency-ms`` Timer (ms) Measures the time consumed by a workflow moving from SUCCEEDING/FAILING state to TERMINAL state. -``propeller.all.node.success-duration-ms`` Timer (ms) Actual time spent executing user code (when the node ends with SUCCESS state). -``propeller.all.node.success-duration-ms-count`` Counter The number of times a node success has been reported. -``propeller.all.node.failure-duration-ms`` Timer (ms) Actual time spent executing user code (when the node ends with FAILURE state). -``propeller.all.node.failure-duration-ms-count`` Counter The number of times a node failure has been reported. - -========================================================== =========== =============================================================================================================================================================== - -All the above stats are automatically tagged with the following fields for further scoping. This includes user-produced stats. -Users can also provide additional tags (or override tags) for custom stats. - -.. _task_stats_tags: - -=============== ================================================================================= - Flyte Stats Tags --------------------------------------------------------------------------------------------------- - Tag Description -=============== ================================================================================= -wf Name of the workflow that was executing when this metric was emitted. - ``{{project}}:{{domain}}:{{workflow_name}}`` -=============== ================================================================================= - -User Stats With Flyte -~~~~~~~~~~~~~~~~~~~~~~ - -The workflow parameters object that the SDK injects into various tasks has a ``statsd`` handle that users should call -to emit stats of their workflows not captured by the default metrics. The usual caveats around cardinality apply, of course. - -.. todo: Reference to Flytekit task stats - -Users are encouraged to avoid creating their own stats handlers. -If not done correctly, these can pollute the general namespace and accidentally interfere with the production stats of live services, causing pages and wreaking havoc. -If you're using any libraries that emit stats, it's best to turn them off if possible. - - -Use Published Dashboards to Monitor Flyte Deployment -==================================================== - -Flyte Backend is written in Golang and exposes stats using Prometheus. The stats are labeled with workflow, task, project & domain, wherever appropriate. - -The dashboards are divided into two types: - -- **User-facing dashboards**: Dashboards that can be used to triage/investigate/observe performance and characteristics of workflows and tasks. - The user-facing dashboard is published under Grafana marketplace ID `13980 `__. - -- **System Dashboards**: Dashboards that are useful for the system maintainer to maintain their Flyte deployments. These are further divided into: - - DataPlane/FlytePropeller dashboards published @ `13979 `__ - - ControlPlane/Flyteadmin dashboards published @ `13981 `__ - -The above mentioned are basic dashboards and do no include all the metrics exposed by Flyte. -Please help us improve the dashboards by contributing to them 🙏. -Refer to the build scripts `here `__. \ No newline at end of file diff --git a/rsts/deployment/configuration/notifications.rst b/rsts/deployment/configuration/notifications.rst deleted file mode 100644 index e01dd1a0ca..0000000000 --- a/rsts/deployment/configuration/notifications.rst +++ /dev/null @@ -1,135 +0,0 @@ -.. _deployment-configuration-notifications: - -Notifications -------------- - -.. tags:: Infrastructure, Advanced - -When a workflow completes, users can be notified by email, `Pagerduty `__, -or `Slack `__. - -The content of these notifications is configurable at the platform level. - -***** -Usage -***** - -When a workflow reaches a specified `terminal workflow execution phase `__ -the :py:class:`flytekit:flytekit.Email`, :py:class:`flytekit:flytekit.PagerDuty`, or :py:class:`flytekit:flytekit.Slack` -objects can be used in the construction of a :py:class:`flytekit:flytekit.LaunchPlan`. - -For example - -.. code:: python - - from flytekit import Email, LaunchPlan - from flytekit.models.core.execution import WorkflowExecutionPhase - - # This launch plan triggers email notifications when the workflow execution it triggered reaches the phase `SUCCEEDED`. - my_notifiying_lp = LaunchPlan.create( - "my_notifiying_lp", - my_workflow_definition, - default_inputs={"a": 4}, - notifications=[ - Email( - phases=[WorkflowExecutionPhase.SUCCEEDED], - recipients_email=["admin@example.com"], - ) - ], - ) - - -See detailed usage examples in the :std:doc:`User Guide ` - -Notifications can be combined with schedules to automatically alert you when a scheduled job succeeds or fails. - -Future work -=========== - -Work is ongoing to support a generic event egress system that can be used to publish events for tasks, workflows and -workflow nodes. When this is complete, generic event subscribers can asynchronously process these vents for a rich -and fully customizable experience. - - -****************************** -Platform Configuration Changes -****************************** - -Setting Up Workflow Notifications -================================= - -The ``notifications`` top-level portion of the FlyteAdmin config specifies how to handle notifications. - -As with schedules, the notifications handling is composed of two parts. One handles enqueuing notifications asynchronously and the second part handles processing pending notifications and actually firing off emails and alerts. - -This is only supported for Flyte instances running on AWS. - -Config -======= - -To publish notifications, you'll need to set up an `SNS topic `_. - -In order to process notifications, you'll need to set up an `AWS SQS `_ queue to consume notification events. This queue must be configured as a subscription to your SNS topic you created above. - -In order to actually publish notifications, you'll need a `verified SES email address `_ which will be used to send notification emails and alerts using email APIs. - -The role you use to run FlyteAdmin must have permissions to read and write to your SNS topic and SQS queue. - -Let's look at the following config section and explain what each value represents: - -.. code-block:: yaml - - notifications: - # Because AWS is the only cloud back-end supported for executing scheduled - # workflows in this case, only ``"aws"`` is a valid value. By default, the - #no-op executor is used. - type: "aws" - - # This specifies which region AWS clients will use when creating SNS and SQS clients. - region: "us-east-1" - - # This handles pushing notification events to your SNS topic. - publisher: - - # This is the arn of your SNS topic. - topicName: "arn:aws:sns:us-east-1:{{ YOUR ACCOUNT ID }}:{{ YOUR TOPIC }}" - - # This handles the recording notification events and enqueueing them to be - # processed asynchronously. - processor: - - # This is the name of the SQS queue which will capture pending notification events. - queueName: "{{ YOUR QUEUE NAME }}" - - # Your AWS `account id, see: https://docs.aws.amazon.com/IAM/latest/UserGuide/console_account-alias.html#FindingYourAWSId - accountId: "{{ YOUR ACCOUNT ID }}" - - # This section encloses config details for sending and formatting emails - # used as notifications. - emailer: - - # Configurable subject line used in notification emails. - subject: "Notice: Execution \"{{ workflow.name }}\" has {{ phase }} in \"{{ domain }}\"." - - # Your verified SES email sender. - sender: "flyte-notifications@company.com" - - # Configurable email body used in notifications. - body: > - Execution \"{{ workflow.name }} [{{ name }}]\" has {{ phase }} in \"{{ domain }}\". View details at - - http://flyte.company.com/console/projects/{{ project }}/domains/{{ domain }}/executions/{{ name }}. {{ error }} - -The full set of parameters which can be used for email templating are checked -into `code `_. - -.. _admin-config-example: - -Example config -============== - -You can find the full configuration file `here `__. - -.. rli:: https://raw.githubusercontent.com/flyteorg/flyteadmin/master/flyteadmin_config.yaml - :caption: flyteadmin/flyteadmin_config.yaml - :lines: 91-105 diff --git a/rsts/deployment/configuration/performance.rst b/rsts/deployment/configuration/performance.rst deleted file mode 100644 index 9d6f71ea8b..0000000000 --- a/rsts/deployment/configuration/performance.rst +++ /dev/null @@ -1,264 +0,0 @@ -.. _deployment-configuration-performance: - -###################################################### -Optimizing Performance -###################################################### - -.. tags:: Infrastructure, Kubernetes, Advanced - -.. tip:: Before getting started, it is always important to measure the performance. Flyte project publishes and manages some grafana templates as described in - :ref:`deployment-configuration-monitoring`. - -The video below contains an overview of the Flyte architecture, what is meant by "performance", details of one loop in FlytePropeller, and a demo of the Grafana Labs dashboard. - -.. youtube:: FJ-rG9lZDhY - -Scaling up FlytePropeller -========================== -`FlytePropeller `_ is the core engine of Flyte that executes the workflows for Flyte. -It is designed as a `Kubernetes Controller `_, where the desired state is specified as a FlyteWorkflow `Custom Resource `_. - -One of the base assumptions of FlytePropeller is that every workflow is independent and can be executed by a completely distinct process, without a need for communication with other processes. Meanwhile, one workflow tracks the dependencies between tasks using a DAG structure and hence constantly needs synchronization. -Currently, FlytePropeller executes Workflows by using an event loop to periodically track and amend the execution status. Within each iteration, a single thread requests the state of Workflow nodes and performs operations (i.e., scheduling node executions, handling failures, etc) to gracefully transition a Workflow from the observed state to the desired state (i.e., Success). Consequently, actual node executions are performed by various FlytePlugins, a diverse collection of operations spanning k8s and other remote services, and FlytePropeller is only responsible for effectively monitoring and managing these executions. - -FlytePropeller has a lot of knobs that can be tweaked for performance. The default configuration is usually adequate for small to medium sized installations of Flyte, that are running about 500 workflows concurrently with no noticeable overhead. In the case when the number of workflows increases, -FlytePropeller will automatically slow down, without losing correctness. - -Signs of slowdown ------------------- - -.. list-table:: Important metrics to look out for in Prometheus dashboard - :widths: 25 25 50 100 - :header-rows: 1 - - * - Metric - - Dashboard - - Alerting factor - - Effect - * - ``flyte:propeller:all:free_workers_count`` - - Flyte Propeller Dashboard - - Number of free-workers in all FlytePropeller instances are very low. - - This will increase overall latency for Each workflow propagation - * - ``sum(rate(flyte:propeller:all:round:raw_ms[5m])) by (wf)`` - - Flyte Propeller Dashboard - - Round Latency for Each workflow increases - - Flyte propeller is taking more time to process each workflow - * - ``sum(rate(flyte:propeller:all:main_depth[5m]))`` - - Flyte Propeller Dashboard - - Workflows take longer to start or slow - - The processing queue depth is long and is taking long to drain - -For each of the metrics above you can dig deeper into the cause for this spike in latency. All of them are mostly related to one latency and should be correlated - ``The Round latency!``. -The round-latency is the time it takes for FlytePropeller to to perform a single iteration of workflow evaluation. To understand this, you have to understand the :ref:`divedeep-execution-timeline`. Once you understand this, continue reading on. - -Optimizing round latency -------------------------- - -Optimize FlytePropeller configuration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Usually round-latency can be resolved by adjusting FlytePropeller config specified `here `_ or sometimes adjusting the global defaults for your deployment or per workflow-execution. -Let us first look at various config properties that can be set and would impact the performance of one round in Flyte and should be tweaked - -.. list-table:: Important Properties - :widths: 25 25 25 50 - :header-rows: 1 - - * - Property - - Section - - Rule of thumb - - Description - * - ``workers`` - - propeller - - Larger the number, implies more workflows can be evaluated in parallel. But it should depend on number of CPU cores assigned to FlytePropeller and evaluated against the cost of context switching. A number usually < 500 - 800 with 4-8 cpu cores works fine. - - Number of `logical threads` workers, that can work concurrently. Also implies number of workflows that can be executed in parallel. Since FlytePropeller uses go-routines, it can run way more than number of physical cores. - * - ``workflow-reeval-duration`` - - propeller - - lower the number - lower latency, lower throughput (low throughput is because the same workflow will be evaluated more times) - - frequency at which, given no external signal, a workflow should be re-evaluated by Flyte propellers reval loop - * - ``downstream-eval-duration`` - - propeller - - lower the number - lower latency and lower throughput (low throughput is because the same workflow will be evaluated more times) - - This indicates how often are external events like pods completion etc recorded. - * - ``max-streak-length`` - - propeller - - higher the number lower the latency for end to end workflow, especially for cached workflows - - number of consecutive rounds to try with one workflow - prioritize a hot workflow over others. - * - ``kube-client-config`` - - propeller - - This is how you can control the number of requests ceiling that FlytePropeller can initiate to KubeAPI. This is usual the #1 bottle neck - - this configures the kubernetes client used by FlytePropeller - * - ``workflowStore.policy`` - - propeller - - This config uses a trick in etcD to minimize number of redundant loops in FlytePropeller, thus improving free slots - - Use this to configure how FlytePropeller should evaluate workflows, the default is usually a good choice - * - ``storage.cache`` - - propeller - - This config is used to configure the write-through cache used by FlytePropeller on top of the metastore - - FlytePropeller uses the configure blob-store (can be changed to something more performant in the future) to optimize read and write latency, for all metadata IO operations. Metadata refers to the input and output pointers - * - ``admin-launcher.tps``, ``admin-launcher.cacheSize``, ``admin-launcher.workers`` - - propeller - - This config is used to configure the max rate and launch-plans that FlytePropeller can launch against FlyteAdmin - - It is essential to limit the number of writes from FlytePropeller to flyteadmin to prevent brown-outs or request throttling at the server. Also the cache reduces number of calls to the server. - * - ``tasks.backoff.max-duration`` - - propeller - - This config is used to configure the maximum back-off interval in case of resource-quota errors - - FlytePropeller will automatically back-off when k8s or other services request it to slowdown or when desired quotas are met. - * - ``max-parallelism`` - - admin, per workflow, per execution - - Refer to examples and documentation below - - docs below - - -In the above table the 2 most important configs are ``workers`` and ``kube-client-config``. - -The Kube client config controls the request throughput from FlytePropeller to the Kube API server. These requests may include creating/monitoring Pods or creating/updating FlyteWorkflow CRDs to track workflow execution. The default configuration (provided by k8s) contains very steep rate-limiting, and therefore FlytePropeller provides a default configuration that offers better performance. However, if your workload involves larger scales (e.g., >5k fanout dynamic or map tasks, >8k concurrent workflows, etc.,) the Kube client config rate limiting may still contribute to a noticeable drop in performance. Increasing the ``qps`` and ``burst`` values may help alleviate back pressure and improve FlytePropeller performance. An example of Kube-client-config is as follows: - -.. code-block:: yaml - - propeller: - kube-client-config: - qps: 100 # Refers to max rate of requests to KubeAPI server - burst: 50 # refers to max burst rate to Kube API server - timeout: 30s # Refers to timeout when talking with kubeapi server - - -.. note:: As you increase the number of workers in FlytePropeller it is important to increase the number of cpu's given to FlytePropeller pod. - -It is worth noting that the Kube API server tends to throttle requests transparently. This means that while tweaking performance by increasing the allowed frequency of Kube API server requests (e.g., increasing FlytePropeller workers or relaxing Kube client config rate-limiting), there may be steep performance decreases for no apparent reason. Looking at the Kube API server request queue metrics in these cases can assist in identifying whether throttling is to blame. Unfortunately, there is no one-size-fits-all solution here, and customizing these parameters for your workload will require trial and error. - -Another area of slowdown could be the size of the input-output cache that FlytePropeller maintains in-memory. This can be configured, while configuring -the storage for FlytePropeller. Rule of thumb, for FlytePropeller with x memory limit, allocate x/2 to the cache - -Learn: max-streak-length & ResourceVersionCaching -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Kubernetes controllers often use Informer caches, rather than reading data directly from KubeAPI. This is to prevent excessive requests to KubeAPI server. The caches are eventually consistent, i.e., every write by the controller is eventually replicated to the cache, but there can be time periods, when the cache lags. -Since FlytePropeller, runs Workflow evaluations as an event loop, which is triggered by any changes to one of the resources that a workflow spawned. -It is possible that a Workflow will be evaluated, even when the last write has not yet propagated to the Informer cache. EtcD also does not allow stale writes, i.e., writes with an object that is older than the object that was written. This is maintained using a server side vector-clock - called the resource version. -Stale writes are writes when the evaluation resulted in a mutation of an object that is older than the object recorded in etcD. -These stale writes often lead to conflicts and hence increase load on the KubeAPI server and on FlytePropeller as the workers are busy writing stale objects repeatedly. - -To prevent this duplication and redundancy, FlytePropeller employs a trick. For every write, it records the last known version number in the database and then tries to wait for the change to propagate to the informer cache. - -If `max-streaks` are enabled then instead of waiting for the informer cache to be refreshed, FlytePropeller uses its own inmemory copy to run multiple rounds as long as mutations occur or the max-streak-length configuration is met. This reduces the latency of cache propagation, which can be order of seconds. - -Worst case workflows: Poison Pills & max-parallelism -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -The worst case for FlytePropeller is workflows that have an extremely large fan-out. This is because FlytePropeller implements a greedy traversal algorithm, that tries to evaluate the entire unblocked nodes within a workflow in every round. -A solution for this is to limit the maximum number of nodes that can be evaluated. This can be done by setting max-parallelism for an execution. -This can done in multiple ways - -#. Platform default: This allows to set platform-wide defaults for maximum concurrency within a Workflow execution. This can be overridden per Launch plan or per execution. - The default `maxParallelism is configured to be 25 `_. - It can be overridden with this config block in flyteadmin - - .. code-block:: yaml - - flyteadmin: - maxParallelism: 25 - -#. Default for a specific launch plan. For any launch plan, the maxParallelism value can be changed or altered. This can be done using :py:meth:`flytekit.LaunchPlan.get_or_create` or the :std:ref:`protos/docs/admin/admin:launchplancreaterequest` - **Flytekit Example** - - .. code-block:: python - - LaunchPlan.get_or_create( - name="my_cron_scheduled_lp", - workflow=date_formatter_wf, - max_parallelism=30, - ) - -#. Specify for an execution. For any specific execution the max-parallelism can be overridden. This can be done using flytectl (and soon flyteconsole). Refer to :std:ref:`flyteCtl docs ` - - - - -Scaling out FlyteAdmin -======================= -FlyteAdmin is a stateless service. Often time before needing to scale FlyteAdmin, you need to scale the backing database. Check out the FlyteAdmin Dashboard to see signs of latency degradation and increase the size of backing postgres instance. -FlyteAdmin is a stateless service and its replicas (in the kubernetes deployment) can be simply increased to allow higher throughput. - -Scaling out Datacatalog -======================== -Datacatalog is a stateless service. Often time before needing to scale Datacatalog, you need to scale the backing database. Check out the Datacatalog Dashboard to see signs of latency degradation and increase the size of backing postgres instance. -Datacatalog is a stateless service and its replicas (in the kubernetes deployment) can be simply increased to allow higher throughput. - -Scaling out FlytePropeller -=========================== - -Manual scale-out ----------------- -FlytePropeller can be run manually per namespace. This is not a recommended solution as it is harder to deploy, but if your organization can deploy and maintain multiple copies of FlytePropeller, you can use this. - -Automatic scale-out -------------------- -FlytePropeller Manager is a new component introduced as part of `this RFC `_ to facilitate horizontal scaling of FlytePropeller through sharding. Effectively, the Manager is responsible for maintaining liveness and proper configuration over a collection of FlytePropeller instances. This scheme uses k8s label selectors to deterministically assign FlyteWorkflow CRD responsibilities to FlytePropeller instances, effectively distributing processing load over the shards. - -Deployment of FlytePropeller Manager requires k8s configuration updates including a modified FlytePropeller Deployment and a new PodTemplate defining managed FlytePropeller instances. The easiest way to apply these updates is by setting the "flytepropeller.manager" value to "true" in the `helm deployment `_ and setting the manager config at "configmap.core.manager". - -Flyte provides a variety of Shard Strategies to configure how FlyteWorkflows are sharded among managed FlytePropeller instances. These include hash, which uses consistent hashing to load-balance evaluation over shards, and project / domain, which map the respective IDs to specific managed FlytePropeller instances. Below we include examples of helm configurations for each of the existing Shard Strategies. - -The Hash Shard Strategy, denoted by "type: hash" in the configuration below, uses consistent hashing to evenly distribute FlyteWorkflows over managed FlytePropeller instances. This configuration requires a "shard-count" variable which defines the number of managed FlytePropeller instances. - -.. code-block:: yaml - - configmap: - core: - # a configuration example using the "hash" shard type - manager: - # pod and scanning configuration redacted - # ... - shard: - type: hash # use the "hash" shard strategy - shard-count: 4 # the total number of shards - -The Project and Domain Shard Strategies, denoted by "type: project" and "type: domain" respectively, use the FlyteWorkflow project and domain metadata to shard FlyteWorkflows. These Shard Strategies are configured using a "per-shard-mapping" option, which is a list of ID lists. Each element in the "per-shard-mapping" list defines a new shard and the ID list assigns responsibility for the specified IDs to that shard. A shard configured as a single wildcard ID (i.e. "*") is responsible for all IDs that are not covered by other shards. Only a single shard may be configured with a wildcard ID and on that shard their must be only one ID, namely the wildcard. - -.. code-block:: yaml - - configmap: - core: - # a configuration example using the "project" shard type - manager: - # pod and scanning configuration redacted - # ... - shard: - type: project # use the "project" shard strategy - per-shard-mapping: # a list of per shard mappings - one shard is created for each element - - ids: # the list of ids to be managed by the first shard - - flytesnacks - - ids: # the list of ids to be managed by the second shard - - flyteexamples - - flytelabs - - ids: # the list of ids to be managed by the third shard - - "*" # use the wildcard to manage all ids not managed by other shards - - configmap: - core: - # a configuration example using the "domain" shard type - manager: - # pod and scanning configuration redacted - # ... - shard: - type: domain # use the "domain" shard strategy - per-shard-mapping: # a list of per shard mappings - one shard is created for each element - - ids: # the list of ids to be managed by the first shard - - production - - ids: # the list of ids to be managed by the second shard - - "*" # use the wildcard to manage all ids not managed by other shards - -Multi-Cluster mode -=================== -In our experience at Lyft, we saw that the Kubernetes cluster would have problems before FlytePropeller or FlyteAdmin would have impact. Thus Flyte supports adding multiple dataplane clusters by default. Each dataplane cluster, has one or more FlytePropellers running in them, and flyteadmin manages the routing and assigning of workloads to these clusters. - - -Improving etcd Performance -=========================== - -Offloading Static Workflow Information from CRD ------------------------------------------------ - -Flyte uses a k8s CRD (Custom Resource Definition) to store and track workflow executions. This resource includes the workflow definition, for example tasks and subworkflows that are involved and the dependencies between nodes, but also includes the execution status of the workflow. The latter information (ie. runtime status) is dynamic, meaning changes during the workflow's execution as nodes transition phases and the workflow execution progresses. However, the former information (ie. workflow definition) remains static, meaning it will never change and is only consulted to retrieve node definitions and workflow dependencies. - -CRDs are stored within etcd, a key-value datastore heavily used in kubernetes. Etcd requires a complete rewrite of the value data every time a single field changes. Consequently, the read / write performance of etcd, as with all key-value stores, is strongly correlated with the size of the data. In Flyte's case, to guarantee only-once execution of nodes we need to persist workflow state by updating the CRD at every node phase change. As the size of a workflow increases this means we are frequently rewriting a large CRD. In addition to poor read / write performance in etcd this update may be restricted by a hard limit on the overall CRD size. - -To counter the challenges of large FlyteWorkflow CRDs Flyte includes a configuration option to offload the static portions of the CRD (ie. workflow / task / subworkflow definitions and node dependencies) to the blobstore. This functionality can be enabled by setting the ``useOffloadedWorkflowClosure`` option to ``true`` in the `FlyteAdmin configuration `_. When set, the FlyteWorkflow CRD will populate a ``WorkflowClosureReference`` field on the CRD with the location of the static data and FlytePropeller will read this information (through a cache) during each workflow evaluation. One important note is that currently this requires FlyteAdmin and FlytePropeller to have access to the same blobstore since FlyteAdmin only specifies a blobstore location in the CRD. diff --git a/rsts/deployment/deployment/cloud_production.rst b/rsts/deployment/deployment/cloud_production.rst deleted file mode 100644 index c5993dbe53..0000000000 --- a/rsts/deployment/deployment/cloud_production.rst +++ /dev/null @@ -1,74 +0,0 @@ -.. _deployment-deployment-cloud-production: - -################################################# -Single Cluster Production-grade Cloud Deployment -################################################# - -.. tags:: Kubernetes, Infrastructure, Advanced - -The following guide assumes you've successfully set up a -:ref:`Single Cluster Simple Cloud Deployment `. - -This guide describes additional setup steps to productionize your Flyte -deployment. While not strictly required, we recommend that you incorporate these -changes. - -*********** -Ingress/DNS -*********** - -Assuming your cluster has an existing Ingress controller, Flyte will be -accessible without port forwarding. The base chart installed in the previous -guide already contains the ingress rules, but they are not enabled by default. - -To turn on ingress, update your ``values.yaml`` file to include the following block. - -.. tabs:: - - .. group-tab:: ``flyte-binary`` on EKS using NGINX - - .. literalinclude:: ../../../charts/flyte-binary/eks-production.yaml - :caption: charts/flyte-binary/eks-production.yaml - :language: yaml - :lines: 93-102 - - .. group-tab:: ``flyte-binary``/ on EKS using ALB - - .. literalinclude:: ../../../charts/flyte-binary/eks-production.yaml - :caption: charts/flyte-binary/eks-production.yaml - :language: yaml - :lines: 106-118 - - .. group-tab:: ``flyte-core`` on GCP using NGINX - - .. literalinclude:: ../../../charts/flyte-core/values-gcp.yaml - :caption: charts/flyte-core/values-gcp.yaml - :language: yaml - :lines: 156-164 - - -*************** -Authentication -*************** - -Authentication comes with Flyte in the form of OAuth 2.0. Please see the -`authentication guide `__ for instructions. - -.. note:: - - Authorization is not supported out-of-the-box in Flyte. This is due to the - wide and variety of authorization requirements that different organizations use. - -*************** -Upgrade Path -*************** - -To upgrade, simply ``helm upgrade`` your relevant chart. - -One thing to keep in mind during upgrades is that Flyte is released regularly -using semantic versioning. Since Flyte ``1.0.0`` will be with us for a while, -you should expect large changes in minor version bumps, which backwards -compatibility being maintained, for the most part. - -If you're using the :ref:`multi-cluster ` -deployment model for Flyte, components should be upgraded together. diff --git a/rsts/deployment/deployment/cloud_simple.rst b/rsts/deployment/deployment/cloud_simple.rst deleted file mode 100644 index b280546708..0000000000 --- a/rsts/deployment/deployment/cloud_simple.rst +++ /dev/null @@ -1,132 +0,0 @@ -.. _deployment-deployment-cloud-simple: - -####################################### -Single Cluster Simple Cloud Deployment -####################################### - -.. tags:: Kubernetes, Infrastructure, Basic - -These instructions are suitable for the main cloud providers. - -**************** -Prerequisites -**************** -In order to install Flyte, you will need access to the following: - -* A Kubernetes cluster: `EKS `__, - `GKE `__, etc. -* At least one blob storage bucket: `S3 `__, - `GCS `__, etc. -* A Postgres database: `RDS `__, - `CloudSQL `__, etc. -* At least one IAM role on `AWS `__, - `GCP `__, etc. This is the role for the Flyte - backend service to assume. You can provision another role for user code to assume as well. - -As Flyte documentation cannot keep up with the pace of change of the cloud -provider APIs, please refer to their official documentation for each of -these prerequisites. - -.. note:: - - `Union.AI `__ plans to open-source a reference - implementation of these requirements for the major cloud providers in early - 2023. - -*************** -Installation -*************** - -Flyte is installed via a `Helm `__ chart. First, add the Flyte -chart repo to Helm: - -.. prompt:: bash $ - - helm repo add flyteorg https://flyteorg.github.io/flyte - -Then download and update the values files: - -.. prompt:: bash $ - - curl -sL https://raw.githubusercontent.com/flyteorg/flyte/master/charts/flyte-binary/eks-starter.yaml - -Finally, install the chart: - -.. prompt:: bash $ - - helm install flyte-backend flyteorg/flyte-binary \ - --dry-run --namespace flyte --values eks-starter.yaml - -When ready to install, remove the ``--dry-run`` switch. - -Verify the Installation -======================= - -The values supplied by the ``eks-starter.yaml`` file provides only the simplest -installation of Flyte. The core functionality and scalability of Flyte will be -there, but no plugins are included (e.g. Spark tasks will not work), there is no -DNS or SSL, and there is no authentication. - -Port Forward Flyte Service --------------------------- - -To verify the installation therefore you'll need to port forward the Kubernetes service. - -.. prompt:: bash $ - - kubectl -n flyte port-forward service/flyte-binary 8088:8088 8089:8089 - -You should be able to navigate to http://localhost:8088/console. - -The Flyte server operates on two different ports, one for HTTP traffic and one for gRPC traffic, which is why we port forward both. - -From here, you should be able to run through the :ref:`Getting Started ` -examples again. Save a backup copy of your existing configuration if you have one -and generate a new config with ``flytectl``. - -.. prompt:: bash $ - - mv ~/.flyte/config.yaml ~/.flyte/bak.config.yaml - flytectl config init --host localhost:8088 - -This will produce a file like: - -.. code-block:: yaml - :caption: ``~/.flyte/config.yaml`` - - admin: - # For GRPC endpoints you might want to use dns:///flyte.myexample.com - endpoint: dns:///localhost:8088 - authType: Pkce - insecure: true - logger: - show-source: true - level: 0 - -Test Workflow -------------- - -You can test a workflow by cloning the ``flytesnacks`` repo and running the -hello world example: - -.. prompt:: bash $ - - git clone https://github.com/flyteorg/flytesnacks - cd flytesnacks/cookbook - pyflyte run --remote core/flyte_basics/hello_world.py my_wf - -*********************************** -Flyte in on-premises infrastructure -*********************************** - -Sometimes, it's also helpful to be able to set up a Flyte environment in an on-premises Kubernetes environment or even on a laptop for testing and development purposes. -Check out `this community-maintained tutorial `__ to learn how to setup the required dependencies and deploy the `flyte-binary` chart to a local Kubernetes cluster. - - -************* -What's Next? -************* - -Congratulations ⭐️! Now that you have a Flyte cluster up and running on the cloud, -you can productionize it by following the :ref:`deployment-deployment-cloud-production` -guide. diff --git a/rsts/deployment/deployment/index.rst b/rsts/deployment/deployment/index.rst deleted file mode 100644 index eb06d0a6c0..0000000000 --- a/rsts/deployment/deployment/index.rst +++ /dev/null @@ -1,159 +0,0 @@ -.. _deployment-deployment: - -################### -Deployment Paths -################### - -The articles in this section will guide a new Flyte administrator through deploying Flyte. - -The most complex parts of a Flyte deployment are authentication, ingress, DNS, and SSL support. Due to the complexity -introduced by these components, we recommend deploying Flyte without these at first and relying on K8s port forwarding -to test your Flyte cluster. After the base deployment is tested, these additional features can be turned on more -seamlessly. - -******************************** -Components of a Flyte Deployment -******************************** - -.. important:: - - We recommend working with your infrastructure team to set up the cloud service requirements below. - -Relational Database -=================== - -Two of Flyte's components, :ref:`FlyteAdmin ` and :ref:`DataCatalog `, rely on -PostgreSQL to store persistent records. In the sandbox deployment, a containerized version of Postgres is included but -for a proper Flyte installation, we recommend one of the cloud provided databases. - -.. note:: - - We recommend the following services, depending on your cloud platform of choice: - - - **AWS**: `RDS `__ - - **GCP**: `Cloud SQL `__ - - **Azure**: `PostgreSQL `__ - -Production Grade Object Store -============================= - -Core Flyte components such as :ref:`FlyteAdmin `, :ref:`FlytePropeller `, -:ref:`DataCatalog `, and user runtime containers rely on an object store to hold files. The sandbox -deployment comes with a containerized `Minio `__, which offers AWS S3 compatibility. - -.. note:: - - We recommend swapping this out for a production-grade object store, depending on your cloud platform of choice: - - - **AWS**: `S3 `__ - - **GCP**: `GCS `__ - - **Azure**: `Azure Blob Storage `__ - -************************ -Flyte Deployment Paths -************************ - -There are three different paths for deploying a Flyte cluster: - -.. list-table:: - :header-rows: 1 - :widths: 25, 65, 20 - - * - Deployment Path - - Description - - Production-grade? - * - :ref:`Sandbox ` - - This uses portable replacements for the relational database and blob store. - It's good for testing out and experimenting with Flyte. - - ❌ - * - :ref:`Single Cluster ` - - This bundles Flyte as one executable. It runs on a single K8s cluster and - supports all of Flyte's extensions and plugins. Once the simple deployment - is established, you can follow steps to :ref:`productionize it `. - - ✅ - * - :ref:`Multiple Clusters ` - - For large-scale deployments that require multiple K8s clusters. Flyte's control - plane (:ref:`FlyteAdmin `, :ref:`FlyteConsole `, and :ref:`DataCatalog `) - is separated from Flyte's execution engine, :ref:`FlytePropeller `, which runs - typically once per compute cluster. - - ✅ - -.. important:: - - We recommend the **Single Cluster** option for a capable though not massively scalable cluster. - - This option is appropriate if all your compute can `fit on one EKS cluster `__ . - As of this writing, a single Flyte cluster can handle more than 13,000 nodes. - - Regardless of using single or multiple Kubernetes clusters for Flyte, note that ``FlytePropeller`` -the main data plane component- can be scaled out as well by using ``sharding`` if scale demands require it. - See `Automatic scale-out `__ to learn more about the sharding mechanism. - - - -Helm -==== - -Flyte uses `Helm `__ as the K8s release packaging solution, though you may still see some old -`Kustomize `__ artifacts in the `flyte `__ repo. The core Flyte -team maintains Helm charts that correspond with the latter two deployment paths. - -.. note:: - - Technically there is a Helm chart for the sandbox environment as well, but it's been tested only with the Dockerized - K3s bundled container. - -.. dropdown:: ``flyte-binary``: chart for the **Single Cluster** option. - :title: text-muted - - .. literalinclude:: ../../../charts/flyte-binary/Chart.yaml - :language: yaml - :caption: charts/flyte-binary/Chart.yaml - -.. dropdown:: ``flyte-core``: chart for the **Multiple Cluster** option. - :title: text-muted - - .. literalinclude:: ../../../charts/flyte-core/Chart.yaml - :language: yaml - :caption: charts/flyte-core/Chart.yaml - -.. dropdown:: ``flyte-deps``: chart that installs additional useful dependencies alongside Flyte. - :title: text-muted - - .. literalinclude:: ../../../charts/flyte-deps/Chart.yaml - :language: yaml - :caption: charts/flyte-deps/Chart.yaml - -.. dropdown:: ``flyte``: chart that depends on ``flyte-core``, installing additional dependencies to Flyte deployment. - :title: text-muted - - .. literalinclude:: ../../../charts/flyte/Chart.yaml - :language: yaml - :caption: charts/flyte/Chart.yaml - -************************************** -Deployment Tips and Tricks -************************************** - -Due to the many choices and constraints that you may face in your organization, the specific steps for deploying Flyte -can vary significantly. For example, which cloud platform to use is typically a big fork in the road for many, and there -are many choices to make in terms of Ingress controllers, auth providers, and versions of different dependent libraries that -may interact with other parts of your stack. - -Considering the above, we recommend checking out the `"Flyte The Hard Way" `__ set of community-maintained tutorials that can guide you through the process of preparing the infrastructure and -deploying Flyte. - -In addition to searching and posting on the `#flyte-deployment Slack channel `__, -we have a `Github Discussion `__ -section dedicated to deploying Flyte. Feel free to submit any hints you've found helpful as a discussion, ask questions, -or simply document what worked or what didn't work for you. - - -.. toctree:: - :maxdepth: 1 - :name: deployment options toc - :hidden: - - sandbox - cloud_simple - cloud_production - multicluster diff --git a/rsts/deployment/deployment/multicluster.rst b/rsts/deployment/deployment/multicluster.rst deleted file mode 100644 index e8c6b84f13..0000000000 --- a/rsts/deployment/deployment/multicluster.rst +++ /dev/null @@ -1,661 +0,0 @@ -.. _deployment-deployment-multicluster: - -###################################### -Multiple Kubernetes Cluster Deployment -###################################### - -.. tags:: Kubernetes, Infrastructure, Advanced - -.. note:: - - The multicluster deployment described in this section, assumes you have deployed - the ``flyte-core`` Helm chart, which runs the individual Flyte components separately. - This is needed because in a multicluster setup, the execution engine is - deployed to multiple K8s clusters; it won't work with the ``flyte-binary`` - Helm chart, since it deploys all Flyte services as one single binary. - -Scaling Beyond Kubernetes -------------------------- - -.. tip:: - - As described in the `Architecture Overview `_, - the Flyte ``Control Plane`` sends workflows off to the ``Data Plane`` for - execution. The data plane fulfills these workflows by launching pods in - Kubernetes. - - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/common/flyte-multicluster-arch-v2.png - -The case for multiple Kubernetes clusters may arise due to security constraints, -cost effectiveness or a need to scale out computing resources. - -To address this, you can deploy Flyte's data plane to multiple Kubernetes clusters. -The control plane (FlyteAdmin) can be configured to submit workflows to -these individual data planes. Additionally, Flyte provides the mechanisms for -administrators to retain control on the workflow placement logic while enabling -users to reap the benefits using simple abstractions like ``projects`` and ``domains``. - -Prerequisites -************* - -To make sure that your multicluster deployment is able to scale and process -requests successfully, the following environment-specific requirements should be met: - -.. tabbed:: AWS - - 1. An IAM Policy that defines the permissions needed for Flyte. A minimum set of permissions include: - - .. code-block:: json - - "Action": [ - "s3:DeleteObject*", - "s3:GetObject*", - "s3:ListBucket", - "s3:PutObject*" - ], - "Resource": [ - "arn:aws:s3:::*", - "arn:aws:s3:::*/*" - ], - - - 2. Two IAM Roles configured: one for the control plane components, and another for the data plane where the worker Pods and ``flytepropeller`` run. - - .. note:: - - Using the guidance from this document, make sure to follow your organization's policies to configure IAM resources. - - 3. An OIDC Provider associated with each of your EKS clusters. You can use the following command to create and connect the Provider: - - .. prompt:: bash - - eksctl utils associate-iam-oidc-provider --cluster --approve - - 4. An IAM Trust Relationship that associates each EKS cluster type (control plane or data plane) with the Service Account(s) and namespaces - where the different elements of the system will run. - - Follow the steps in this section to complete the requirements indicated above: - - **Control plane role** - - 1. Use the following command to simplify the process of both creating a role and configuring an initial Trust Relationship: - - .. prompt:: bash - - eksctl create iamserviceaccount --cluster= --name=flyteadmin --role-only --role-name=flyte-controlplane-role --attach-policy-arn --approve --region --namespace flyte - - 2. Go to the **IAM** section in your **AWS Management Console** and select the role that was just created - 3. Go to the **Trust Relationships** tab and **Edit the Trust Policy** - 4. Add the ``datacatalog`` Service Account to the ``sub`` section - - .. note:: - - When caching is enabled, the ``datacatalog`` service store hashes of workflow inputs alongside with outputs on blob storage. Learn more `here `__. - - Example configuration: - - .. code-block:: json - - { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Principal": { - "Federated": "arn:aws:iam:::oidc-provider/oidc.eks..amazonaws.com/id/" - }, - "Action": "sts:AssumeRoleWithWebIdentity", - "Condition": { - "StringEquals": { - "oidc.eks..amazonaws.com/id/:aud": "sts.amazonaws.com", - "oidc.eks..amazonaws.com/id/:sub": [ - "system:serviceaccount:flyte:flyteadmin", - "system:serviceaccount:flyte:datacatalog" - ] - } - } - } - ] - } - - **Data plane role** - - 1. Create the role and Trust Relationship: - - .. prompt:: bash - - eksctl create iamserviceaccount --cluster= --name=flytepropeller --role-only --role-name=flyte-dataplane-role --attach-policy-arn --approve --region --namespace flyte - - 2. Edit the **Trust Relationship** of the data plane role - - .. note:: - - By default, every Pod created for Task execution, uses the ``default`` Service Account on their respective namespace. In your cluster, you'll have as many - namespaces as ``project`` and ``domain`` combinations you may have. Hence, it might be useful to use a ``StringLike`` condition and to use a wildcard for the namespace name in the Trust Policy - - 3. Add the ``default`` Service Account: - - - Example configuration for one data plane cluster: - - .. code-block:: json - - { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Principal": { - "Federated": "arn:aws:iam:::oidc-provider/oidc.eks..amazonaws.com/id/" - }, - "Action": "sts:AssumeRoleWithWebIdentity", - "Condition": { - "StringLike": { - "oidc.eks..amazonaws.com/id/.:aud": "sts.amazonaws.com", - "oidc.eks..amazonaws.com/id/.:sub": [ - "system:serviceaccount:flyte:flytepropeller", - "system:serviceaccount:*:default" - ] - } - } - } - - .. note:: - - To further refine the Trust Relationship, consider using a ``StringEquals`` condition and adding the ``default`` Service Account only for the ``project``-``domain`` - namespaces where Flyte tasks will run, instead of using a wildcard. - -.. _dataplane-deployment: - -Data Plane Deployment -********************* - -This guide assumes that you have two Kubernetes clusters and that you can access -them all with ``kubectl``. - -Let's call these clusters ``dataplane1`` and ``dataplane2``. In this section, you'll prepare -the first cluster only. - -1. Add the ``flyteorg`` Helm repo: - -.. prompt:: bash - - helm repo add flyteorg https://flyteorg.github.io/flyte - helm repo update - # Get flyte-core helm chart - helm fetch --untar --untardir . flyteorg/flyte-core - cd flyte-core - -2. Open the ``values-dataplane.yaml`` file and add the following contents: - - .. code-block:: yaml - - configmap: - admin: - admin: - endpoint: :443 #indicate the URL you're using to connect to Flyte - insecure: false #enables secure communication over SSL. Requires a signed certificate - catalog: - catalog-cache: - endpoint: :443 - insecure: false - -.. note:: - - This step is needed so the ``flytepropeller`` instance in the data plane cluster is able to send notifications - back to the ``flyteadmin`` service in the control plane. The ``catalog`` service runs in the control plane and is used when caching is enabled. - -3. Install Flyte data plane Helm chart: - -.. note:: - - Use the same ``values-eks.yaml`` or ``values-gcp.yaml`` file you used to deploy the control plane. - -.. tabbed:: AWS - - .. code-block:: - - helm install flyte-core-data flyteorg/flyte-core -n flyte \ - --values values-eks.yaml --values values-dataplane.yaml \ - --create-namespace - -.. tabbed:: GCP - - .. code-block:: - - helm install flyte-core-data -n flyte flyteorg/flyte-core \ - --values values-gcp.yaml \ - --values values-dataplane.yaml \ - --create-namespace flyte - -.. _control-plane-deployment: - -Control Plane configuration -********************************* - -For ``flyteadmin`` to access and create Kubernetes resources in one or more -Flyte data plane clusters, it needs credentials to each cluster. -Flyte makes use of Kubernetes Service Accounts to enable every control plane cluster to perform -authenticated requests to the data plane Kubernetes API Server. -The default behaviour is that the Helm chart creates a `ServiceAccount `_ -in each data plane cluster. -In order to verify requests, the Kubernetes API Server expects a `signed bearer token `__ -attached to the Service Account. As of Kubernetes 1.24 and above, the bearer token has to be generated manually. - - -1. Use the following manifest to create a long-lived bearer token for the ``flyteadmin`` Service Account in your data plane cluster: - - .. prompt:: bash - - kubectl apply -f - < - -5. Obtain the corresponding certificate: - -.. prompt:: bash $ - - kubectl get secret -n flyte dataplane1-token \ - -o jsonpath='{.data.ca\.crt}' | pbcopy - -6. Add another entry on your ``secrets.yaml`` file for the certificate: - -.. code-block:: yaml - :caption: secrets.yaml - - apiVersion: v1 - kind: Secret - metadata: - name: cluster-credentials - namespace: flyte - type: Opaque - data: - dataplane_1_token: - dataplane_1_cacert: - -7. Connect to your control plane cluster and create the ``cluster-credentials`` secret: - -.. prompt:: bash $ - - kubectl apply -f secrets.yaml - -8. Create a file named ``values-override.yaml`` and add the following config to it: - -.. code-block:: yaml - :caption: values-override.yaml - - flyteadmin: - additionalVolumes: - - name: cluster-credentials - secret: - secretName: cluster-credentials - additionalVolumeMounts: - - name: cluster-credentials - mountPath: /var/run/credentials - initContainerClusterSyncAdditionalVolumeMounts: - - name: cluster-credentials - mountPath: /etc/credentials - configmap: - clusters: - labelClusterMap: - label1: - - id: dataplane_1 - weight: 1 - clusterConfigs: - - name: "dataplane_1" - endpoint: https://:443 - enabled: true - auth: - type: "file_path" - tokenPath: "/var/run/credentials/dataplane_1_token" - certPath: "/var/run/credentials/dataplane_1_cacert" - -.. note:: - - Typically, you can obtain your Kubernetes API endpoint URL using the following command: - - .. prompt:: bash $ - - kubectl cluster-info - -In this configuration, ``label1`` and ``label2`` are just labels that we will use later in the process -to configure mappings that enable workflow executions matching those labels, to be scheduled -on one or multiple clusters depending on the weight (e.g. ``label1`` on ``dataplane_1``). The ``weight`` is the -priority of a specific cluster, relative to the other clusters under the ``labelClusterMap`` entry. The total sum of weights under a particular -label has to be 1. - -9. Add the ``flyte-dataplane-role`` IAM Role as the ``defaultIamRole`` in your ``values-eks.yaml`` file. `See section here `__ - -10. Update the control plane Helm release: - -.. note:: - This step will disable ``flytepropeller`` in the control plane cluster, leaving no possibility of running workflows there. If you require - the control plane to run workflows, edit the ``values-controlplane.yaml`` file and set ``flytepropeller.enabled`` to ``true``. Then, perform the ``helm upgrade`` operation and complete the steps in :ref:`this section ` to configure it - as a dataplane cluster. - -.. tabbed:: AWS - - .. code-block:: - - helm upgrade flyte-core flyteorg/flyte-core \ - --values values-eks-controlplane.yaml --values values-override.yaml \ - --values values-eks.yaml -n flyte - -.. tabbed:: GCP - - .. code-block:: - - helm upgrade flyte -n flyte flyteorg/flyte-core values.yaml \ - --values values-gcp.yaml \ - --values values-controlplane.yaml \ - --values values-override.yaml - -11. Verify that all Pods in the ``flyte`` namespace are ``Running``: - -Example output: - -.. prompt:: bash $ - - kubectl get pods -n flyte - NAME READY STATUS RESTARTS AGE - datacatalog-86f6b9bf64-bp2cj 1/1 Running 0 23h - datacatalog-86f6b9bf64-fjzcp 1/1 Running 0 23h - flyteadmin-84f666b6f5-7g65j 1/1 Running 0 23h - flyteadmin-84f666b6f5-sqfwv 1/1 Running 0 23h - flyteconsole-cdcb48b56-5qzlb 1/1 Running 0 23h - flyteconsole-cdcb48b56-zj75l 1/1 Running 0 23h - flytescheduler-947ccbd6-r8kg5 1/1 Running 0 23h - syncresources-6d8794bbcb-754wn 1/1 Running 0 23h - - -Configure Execution Cluster Labels -********************************** - -The next step is to configure project-domain or workflow labels to schedule on a specific -Kubernetes cluster. - -.. tabbed:: Configure Project & Domain - - 1. Create an ``ecl.yaml`` file with the following contents: - - .. code-block:: yaml - - domain: development - project: project1 - value: label1 - - .. note:: - - Change ``domain`` and ``project`` according to your environment. The ``value`` has - to match with the entry under ``labelClusterMap`` in the ``values-override.yaml`` file. - - 2. Repeat step 1 for every project-domain mapping you need to configure, creating a YAML file for each one. - - 3. Update the execution cluster label of the project and domain: - - .. prompt:: bash $ - - flytectl update execution-cluster-label --attrFile ecl.yaml - - Example output: - - .. prompt:: bash $ - - Updated attributes from team1 project and domain development - - - 4. Execute a workflow indicating project and domain: - - .. prompt:: bash $ - - pyflyte run --remote --project team1 --domain development example.py training_workflow \  ✔ ╱ docs-development-env  - --hyperparameters '{"C": 0.1}' - -.. tabbed:: Configure a Specific Workflow mapping - - 1. Create a ``workflow-ecl.yaml`` file with the following example contents: - - .. code-block:: yaml - - domain: development - project: project1 - workflow: example.training_workflow - value: project1 - - 2. Update execution cluster label of the project and domain - - .. prompt:: bash $ - - flytectl update execution-cluster-label \ - -p project1 -d development \ - example.training_workflow \ - --attrFile workflow-ecl.yaml - - 3. Execute a workflow indicating project and domain: - - .. prompt:: bash $ - - pyflyte run --remote --project team1 --domain development example.py training_workflow \  ✔ ╱ docs-development-env  - --hyperparameters '{"C": 0.1}' - -Congratulations 🎉! With this, the execution of workflows belonging to a specific -project-domain or a single specific workflow will be scheduled on the target label -cluster. - -Day 2 Operations ----------------- - -Add another Kubernetes cluster -****************************** - -Find in this section the necessary steps to scale out your deployment by adding one Kubernetes cluster. -The process can be repeated for additional clusters. - -.. tabbed:: AWS - - - - 1. Create the new cluster: - - .. prompt:: bash $ - - eksctl create cluster --name flyte-dataplane-2 --region --version 1.25 --vpc-private-subnets , --without-nodegroup - - .. note:: - - This is only one of multiple ways to provision an EKS cluster. Follow your organization's policies to complete this step. - - - 2. Add a nodegroup to the cluster. Typically ``t3.xlarge`` instances provide enough resources to get started. Follow your organization's policies in this regard. - - 4. Create an OIDC Provider for the new cluster: - - .. prompt:: bash $ - - eksctl utils associate-iam-oidc-provider --cluster flyte-dataplane-2 --region --approve - - 5. Take note of the OIDC Provider ID: - - .. prompt:: bash $ - - aws eks describe-cluster --region --name flyte-dataplane-2 --query "cluster.identity.oidc.issuer" --output text - - 6. Go to the **IAM** section in the **AWS Management Console** and edit the **Trust Policy** of the ``flyte-dataplane-role`` - 7. Add a new ``Principal`` with the new cluster's OIDC Provider ID. Include the ``Action`` and ``Conditions`` section: - - .. code-block:: json - - { - "Version": "2012-10-17", - "Statement": [ - { - "Effect": "Allow", - "Principal": { - "Federated": "arn:aws:iam:::oidc-provider/oidc.eks..amazonaws.com/id/" - }, - "Action": "sts:AssumeRoleWithWebIdentity", - "Condition": { - "StringLike": { - "oidc.eks..amazonaws.com/id/:aud": "sts.amazonaws.com", - - "oidc.eks..amazonaws.com/id/:sub": [ - "system:serviceaccount:flyte:flytepropeller", - "system:serviceaccount:*:default" - ] - } - } - }, - { - "Effect": "Allow", - "Principal": { - "Federated": "arn:aws:iam:::oidc-provider/oidc.eks..amazonaws.com/id/" - }, - "Action": "sts:AssumeRoleWithWebIdentity", - "Condition": { - "StringLike": { - "oidc.eks..amazonaws.com/id/:aud": "sts.amazonaws.com", - "oidc.eks..amazonaws.com/id/:sub": [ - "system:serviceaccount:flyte:flytepropeller", - "system:serviceaccount:*:default" - ] - } - } - } - ] - } - - - - 7. Install the data plane Helm chart following the steps in the **Data plane deployment** section. See :ref:`section `. - 8. Follow steps 1-3 in the **control plane configuration** section (see :ref:`section `) to generate and populate a new section in your ``secrets.yaml`` file - - Example: - - .. code-block:: yaml - - apiVersion: v1 - kind: Secret - metadata: - name: cluster-credentials - namespace: flyte - type: Opaque - data: - dataplane_1_token: - dataplane_1_cacert: - dataplane_2_token: - dataplane_2_cacert: - - 9. Connect to the control plane cluster and update the ``cluster-credentials`` Secret: - - .. prompt:: bash $ - - kubect apply -f secrets.yaml - - 10. Go to your ``values-override.yaml`` file and add the information of the new cluster. Adding a new label is not entirely needed. - Nevertheless, in the following example a new label is created to illustrate Flyte's capability to schedule workloads on different clusters - in response to user-defined mappings of ``project``, ``domain`` and ``label``:abbr: - - .. code-block:: yaml - - ... #all the above content remains the same - configmap: - clusters: - labelClusterMap: - label1: - - id: dataplane_1 - weight: 1 - label2: - - id: dataplane_2 - weight: 1 - clusterConfigs: - - name: "dataplane_1" - endpoint: https://.com:443 - enabled: true - auth: - type: "file_path" - tokenPath: "/var/run/credentials/dataplane_1_token" - certPath: "/var/run/credentials/dataplane_1_cacert" - - name: "dataplane_2" - endpoint: https://:443 - enabled: true - auth: - type: "file_path" - tokenPath: "/var/run/credentials/dataplane_2_token" - certPath: "/var/run/credentials/dataplane_2_cacert" - - 11. Update the Helm release in the control plane cluster: - - .. prompt:: bash $ - - helm upgrade flyte-core-control flyteorg/flyte-core -n flyte --values values-controlplane.yaml --values values-eks.yaml --values values-override.yaml - - 12. Create a new execution cluster labels file with the following sample content: - - .. code-block:: yaml - - domain: production - project: team1 - value: label2 - - 13. Update the cluster execution labels for the project: - - .. prompt:: bash $ - - flytectl update execution-cluster-label --attrFile ecl-production.yaml - - 14. Finally, submit a workflow execution that matches the label of the new cluster: - - .. prompt:: bash $ - - pyflyte run --remote --project team1 --domain production example.py training_workflow \ - --hyperparameters '{"C": 0.1}' - - 15. A successful execution should be visible on the UI, confirming it ran in the new cluster: - - .. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/common/multicluster-execution.png \ No newline at end of file diff --git a/rsts/deployment/deployment/sandbox.rst b/rsts/deployment/deployment/sandbox.rst deleted file mode 100644 index 970d001fb5..0000000000 --- a/rsts/deployment/deployment/sandbox.rst +++ /dev/null @@ -1,144 +0,0 @@ -.. _deployment-deployment-sandbox: - -######################### -Sandbox Deployment -######################### - -.. tags:: Kubernetes, Infrastructure, Basic - -A sandbox deployment of Flyte bundles together portable versions of Flyte's -dependencies such as a relational database and durable object store. - -For the blob store requirements, Flyte Sandbox uses `Minio `__, -which offers an S3 compatible interface, and for Postgres, it uses the stock -Postgres Docker image and Helm chart. - -.. important:: - - The sandbox deployment is not suitable for production environments. For instructions on how to create a - production-ready Flyte deployment, checkout the :ref:`Deployment Paths ` guide. - -******************************************* -Flyte Sandbox as a Single Docker Container -******************************************* - -Flyte provides a way for creating a Flyte cluster as a self-contained Docker image. This is mini-replica of an -entire Flyte deployment, without the scalability and with minimal extensions. - -The Flyte Sandbox can be run on any environment that supports containers and makes it extremely easy for users of Flyte -to try out the platform and get a feel for the user experience, all without having to understand Kubernetes or dabble -with configuration. - -.. note:: - - The Flyte single container sandbox is also used by the team to run continuous integration tests and used by the - :ref:`cookbook:userguide`, :ref:`cookbook:tutorials` and :ref:`cookbook:integrations` documentation. - -Requirements -============ - -- Install `kubectl `__. -- Install `docker `__ or any other OCI-compatible tool, like Podman or LXD. -- Install `flytectl `__, the official CLI for Flyte. - -While Flyte can run any OCI-compatible task image using the default Kubernetes container runtime (``containerd``), the Flyte -core maintainers typically use Docker. Note that the ``flytectl demo`` command does rely on Docker APIs, but as this -demo environment is just one self-contained image, you can also run the image directly using another run time. - -Within the single container environment, a mini Kubernetes cluster is installed using `k3s `__. K3s -uses an in-container Docker daemon, run using `docker-in-docker configuration `__ -to orchestrate user containers. - -Start the Sandbox -================== - -To spin up a Flyte Sandbox, run: - -.. prompt:: bash $ - - flytectl demo start - -This command runs a Docker container, which itself comes with a Docker registry -on ``localhost:30000`` so you can build images outside of the docker-in-docker -container by tagging your containers with ``localhost:30000/imgname:tag`` and -pushing the image. - -The local Postgres installation is also available on port ``30001`` for users -who wish to dig deeper into the storage layer. - -.. div:: shadow p-3 mb-8 rounded - - **Expected Output:** - - .. code-block:: - - 👨‍💻 Flyte is ready! Flyte UI is available at http://localhost:30080/console 🚀 🚀 🎉 - ❇️ Run the following command to export sandbox environment variables for accessing flytectl - export FLYTECTL_CONFIG=~/.flyte/config-sandbox.yaml - 🐋 Flyte sandbox ships with a Docker registry. Tag and push custom workflow images to localhost:30000 - 📂 The Minio API is hosted on localhost:30002. Use http://localhost:30080/minio/login for Minio console - -Flytectl/Flyte-remote Configuration -___________________________________ - -The ``config-sandbox.yaml`` file contains configuration for clients to communicate with **FlyteAdmin**, which is the Flyte cluster backend component that processes all client requests such as workflow executions. The default values are enough to let you connect and use Flyte: - - -.. code-block:: yaml - - admin: - # For GRPC endpoints you might want to use dns:///flyte.myexample.com - endpoint: localhost:30080 - authType: Pkce - insecure: true - console: - endpoint: http://localhost:30080 - logger: - show-source: true - level: 0 - -.. note:: - - You can also create your own config file with `flytectl config init`, which - will create a config file at `~/.flyte/config.yaml`. - - Learn more about the configuration settings in the - {ref}`Deployment Guide ` - -Flyte Cluster Configuration -___________________________ - -Flyte Sandbox ships with a reasonable default configuration. However, you can specify overrides as necessary to fit your use case, in the ``~/.flyte/sandbox/config.yaml`` file. See the following example for enabling the Ray plugin (requires `kuberay-operator `__ to also be installed): - -.. code-block:: shell - - > cat ~/.flyte/sandbox/config.yaml - tasks: - task-plugins: - default-for-task-types: - ray: ray - enabled-plugins: - - container - - sidecar - - k8s-array - - agent-service - - ray - plugins: - ray: - ttlSecondsAfterFinished: 60 - -You can also specify additional cluster resource templates in the ``~/.flyte/sandbox/cluster-resource-templates`` directory. See the following example: - -.. code-block:: shell - - > cat ~/.flyte/sandbox/cluster-resource-templates/001-serviceaccount.yaml - apiVersion: v1 - kind: ServiceAccount - metadata: - name: flyte-worker - namespace: {{ namespace }} - -Once you are happy with the changes, simply run ``flytectl demo reload`` to trigger a reload of the sandbox with the updated configuration. - -Now that you have the sandbox cluster running, you can now go to the :ref:`User Guide ` or -:ref:`Tutorials ` to run tasks and workflows written in ``flytekit``, the Python SDK for Flyte. diff --git a/rsts/deployment/index.rst b/rsts/deployment/index.rst deleted file mode 100644 index 820e941bc4..0000000000 --- a/rsts/deployment/index.rst +++ /dev/null @@ -1,66 +0,0 @@ -.. _deployment: - -############# -Deployment -############# - -These *Deployment Guides* are primarily for platform and devops engineers to learn how to deploy and administer Flyte. - -The sections below walk through how to create a Flyte cluster and cover topics related to enabling and configuring -plugins, authentication, performance tuning, and maintaining Flyte as a production-grade service. - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: deployment-deployment - :type: ref - :text: 🛣 Deployment Paths - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Walkthroughs for deploying Flyte, from the most basic to a fully-featured, multi-cluster production system. - - --- - - .. link-button:: deployment-plugin-setup - :type: ref - :text: 🔌 Plugin Setup - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Enable backend plugins to extend Flyte's capabilities, such as hooks for K8s, AWS, GCP, and Web API services. - - --- - - .. link-button:: deployment-agent-setup - :type: ref - :text: 🤖 Agent Setup - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Enable Flyte agents to extend Flyte's capabilities, including features like File sensor, Databricks job, and Snowflake query services. - - --- - - .. link-button:: deployment-configuration - :type: ref - :text: 🎛 Cluster Configuration - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - How to configure the various components of your cluster. - - --- - - .. link-button:: deployment-configuration-generated - :type: ref - :text: 📖 Configuration Reference - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Reference docs for configuration settings for Flyte's backend services. - - --- - - .. link-button:: deployment-security-overview - :type: ref - :text: 🔒 Security Overview - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Read for comments on security in Flyte. diff --git a/rsts/deployment/plugins/aws/athena.rst b/rsts/deployment/plugins/aws/athena.rst deleted file mode 100644 index 34edafc4bd..0000000000 --- a/rsts/deployment/plugins/aws/athena.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. _deployment-plugin-setup-aws-athena: - -Athena Plugin -============= - -This guide provides an overview of setting up Athena in your Flyte deployment. - -.. note:: - Please note that the Athena plugin requires a Flyte deployment in the AWS cloud; it won't work with demo/GCP/Azure. - -Set up the AWS Flyte cluster ----------------------------- - -1. Ensure you have a functional Flyte cluster up and running in `AWS `__ -2. Verify that you possess the correct ``kubeconfig`` and have selected the appropriate Kubernetes context -3. Double-check that your ``~/.flyte/config.yaml`` file contains the correct Flytectl configuration - -Specify plugin configuration ----------------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - Edit the relevant YAML file to specify the plugin. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - athena - default-for-task-types: - - container: container - - container_array: k8s-array - - athena: athena - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and include the following configuration: - - .. code-block:: yaml - - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - athena - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - athena: athena - -Ensure that the propeller has the correct service account for Athena. - -Upgrade the Flyte Helm release ------------------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: bash - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - and ```` with the name of your namespace (e.g., ``flyte``). diff --git a/rsts/deployment/plugins/aws/batch.rst b/rsts/deployment/plugins/aws/batch.rst deleted file mode 100644 index ba6d069b74..0000000000 --- a/rsts/deployment/plugins/aws/batch.rst +++ /dev/null @@ -1,165 +0,0 @@ -.. _deployment-plugin-setup-aws-array: - -AWS Batch -========= - -This setup document applies to both :py:func:`map tasks ` -and single tasks running on AWS Batch. - -.. note:: - - For single [non-map] task use, please take note of - the additional code when updating the flytepropeller config. - -AWS Batch simplifies the process for developers, scientists and engineers to run -hundreds of thousands of batch computing jobs on AWS. - -Flyte abstracts away the complexity of integrating AWS Batch into users' workflows, -taking care of packaging inputs, reading outputs, scheduling map tasks and -optimizing AWS Batch job queues for load distribution and priority coordination. - -Set up AWS Batch ----------------- - -Follow the guide `Running batch jobs -at scale for less `__. - -By the end of this step, your AWS Account should have a configured compute environment -and one or more AWS Batch Job Queues. - -Modify users' AWS IAM role trust policy document ------------------------------------------------- - -Follow the guide `AWS Batch Execution -IAM role `__. - -When running workflows in Flyte, users can specify a Kubernetes service account and/or an IAM Role to run as. -For AWS Batch, an IAM Role must be specified. For each of these IAM Roles, modify the trust policy -to allow elastic container service (ECS) to assume the role. - -Modify system's AWS IAM role policies -------------------------------------- - -Follow the guide `Granting a user permissions to pass a -role to an AWS service `__. - -The best practice for granting permissions to Flyte components is by utilizing OIDC, -as described in the -`OIDC documentation `__. -This approach entails assigning an IAM Role to each service account being used. -To proceed, identify the IAM Role associated with the flytepropeller's Kubernetes service account, -and subsequently, modify the policy document to enable the role to pass other roles to AWS Batch. - -Update FlyteAdmin configuration -------------------------------- - -FlyteAdmin must be informed of all the AWS Batch job queues -and how the system should distribute the load among them. -The simplest setup is as follows: - -.. code-block:: yaml - - flyteadmin: - roleNameKey: "eks.amazonaws.com/role-arn" - queues: - # A list of items, one per AWS Batch Job Queue. - executionQueues: - # The name of the job queue from AWS Batch - - dynamic: "tutorial" - # A list of tags/attributes that can be used to match workflows to this queue. - attributes: - - default - # A list of configs to match project and/or domain and/or workflows to job queues using tags. - workflowConfigs: - # An empty rule to match any workflow to the queue tagged as "default" - - tags: - - default - -If you are using Helm, you can add this block under the ``configMaps.adminServer`` section, -as shown `here `__. - -For a more complex matching configuration, the example below defines three different queues -with distinct attributes and matching logic based on project/domain/workflowName. - -.. code-block:: yaml - - queues: - executionQueues: - - dynamic: "gpu_dynamic" - attributes: - - gpu - - dynamic: "critical" - attributes: - - critical - - dynamic: "default" - attributes: - - default - workflowConfigs: - - project: "my_queue_1" - domain: "production" - workflowName: "my_workflow_1" - tags: - - critical - - project: "production" - workflowName: "my_workflow_2" - tags: - - gpu - - project: "my_queue_3" - domain: "production" - workflowName: "my_workflow_3" - tags: - - critical - - tags: - - default - -These settings can also be dynamically altered through ``flytectl`` (or FlyteAdmin API). -Learn about the :ref:`core concept here `. -For guidance on how to dynamically update these configurations, refer to the :ref:`Flytectl docs `. - -Update FlytePropeller's configuration -------------------------------------- - -The AWS Array Plugin requires specific configurations to ensure proper communication with the AWS Batch Service. - -These configurations reside within FlytePropeller's configMap. Modify the config in the relevant YAML file to set the following keys: - -.. code-block:: yaml - - plugins: - aws: - batch: - # Must match that set in flyteAdmin's configMap flyteadmin.roleNameKey - roleAnnotationKey: eks.amazonaws.com/role-arn - # Must match the desired region to launch these tasks. - region: us-east-2 - tasks: - task-plugins: - enabled-plugins: - # Enable aws_array task plugin. - - aws_array - default-for-task-types: - # Set it as the default handler for array/map tasks. - container_array: aws_array - # Make sure to add this line to enable single (non-map) AWS Batch tasks - aws-batch: aws_array - -.. note:: - - To register the `map task - `__ on Flyte, - use the command ``pyflyte register ``. - Launch the execution through the FlyteConsole by selecting the appropriate ``IAM Role`` and entering the full - ``AWS Arn`` of an IAM Role configured according to the above guide. - - Once the task starts executing, you'll find a link for the AWS Array Job in the log links section of the Flyte Console. - As individual jobs start getting scheduled, links to their respective CloudWatch log streams will also appear in the UI. - - .. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/aws_plugin_setup/map_task_success.png - :alt: A screenshot of Flyte Console displaying log links for a successful array job. - - *A screenshot of Flyte Console displaying log links for a successful array job.* - - .. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/aws_plugin_setup/map_task_failure.png - :alt: A screenshot of Flyte Console displaying log links for a failed array job. - - *A screenshot of Flyte Console displaying log links for a failed array job.* diff --git a/rsts/deployment/plugins/aws/index.rst b/rsts/deployment/plugins/aws/index.rst deleted file mode 100644 index b2231ca574..0000000000 --- a/rsts/deployment/plugins/aws/index.rst +++ /dev/null @@ -1,37 +0,0 @@ -.. _deployment-plugin-setup-aws: - -Configure AWS Plugins -===================== - -.. tags:: AWS, Integration, MachineLearning, Data, Advanced - -Discover the process of setting up AWS plugins for Flyte. - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - - .. link-button:: deployment-plugin-setup-aws-array - :type: ref - :text: AWS Batch - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the AWS Batch plugin. - - --- - - .. link-button:: deployment-plugin-setup-aws-athena - :type: ref - :text: AWS Athena - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the AWS Athena plugin. - -.. toctree:: - :maxdepth: 1 - :name: AWS plugin setup - :hidden: - - batch - athena diff --git a/rsts/deployment/plugins/gcp/bigquery.rst b/rsts/deployment/plugins/gcp/bigquery.rst deleted file mode 100644 index 03b21e02e1..0000000000 --- a/rsts/deployment/plugins/gcp/bigquery.rst +++ /dev/null @@ -1,90 +0,0 @@ -.. _deployment-plugin-setup-gcp-bigquery: - -Google BigQuery Plugin -====================== - -This guide provides an overview of setting up BigQuery in your Flyte deployment. -Please note that the BigQuery plugin requires Flyte deployment in the GCP cloud; -it is not compatible with demo/AWS/Azure. - -Set up the GCP Flyte cluster ----------------------------- - -* Ensure you have a functional Flyte cluster running in `GCP `__. -* Create a service account for BigQuery. For more details, refer to: https://cloud.google.com/bigquery/docs/quickstarts/quickstart-client-libraries. -* Verify that you have the correct kubeconfig and have selected the appropriate Kubernetes context. -* Confirm that you have the correct Flytectl configuration at ``~/.flyte/config.yaml``. - -Specify plugin configuration ----------------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - Edit the relevant YAML file to specify the plugin. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - bigquery - default-for-task-types: - - container: container - - container_array: k8s-array - - bigquery_query_job_task: bigquery - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following configuration to it. - - .. code-block:: yaml - - configmap: - enabled_plugins: - # -- Tasks specific configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#GetConfig) - tasks: - # -- Plugins configuration, [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#TaskPluginConfig) - task-plugins: - # -- [Enabled Plugins](https://pkg.go.dev/github.com/flyteorg/flyteplugins/go/tasks/config#Config). Enable sagemaker*, athena if you install the backend - enabled-plugins: - - container - - sidecar - - k8s-array - - bigquery - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - bigquery_query_job_task: bigquery - -Ensure that the propeller has the correct service account for BigQuery. - -Upgrade the Flyte Helm release ------------------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: bash - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - and ```` with the name of your namespace (e.g., ``flyte``). diff --git a/rsts/deployment/plugins/gcp/index.rst b/rsts/deployment/plugins/gcp/index.rst deleted file mode 100644 index 2fcd827ed6..0000000000 --- a/rsts/deployment/plugins/gcp/index.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. _deployment-plugin-setup-gcp: - -Configure GCP Plugins -===================== - -.. tags:: GCP, Integration, Data, Advanced - -Discover the process of setting up GCP plugins for Flyte. - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: deployment-plugin-setup-gcp-bigquery - :type: ref - :text: Google BigQuery - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the Google BigQuery plugin. - -.. toctree:: - :maxdepth: 1 - :name: GCP plugin setup - :hidden: - - bigquery diff --git a/rsts/deployment/plugins/index.rst b/rsts/deployment/plugins/index.rst deleted file mode 100644 index d83bd79c54..0000000000 --- a/rsts/deployment/plugins/index.rst +++ /dev/null @@ -1,59 +0,0 @@ -.. _deployment-plugin-setup: - -Plugin Setup -============ - -Flyte integrates with a wide variety of `data, ML and analytical tools `__. -Some of these plugins, such as Databricks, Kubeflow, and Ray integrations, require the Flyte cluster administrator to enable them. - -This section of the *Deployment Guides* will cover how to configure your cluster -to use these plugins in your workflows written in ``flytekit``. - - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: deployment-plugin-setup-k8s - :type: ref - :text: K8s Plugins - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the K8s Operator Plugins. - - --- - - .. link-button:: deployment-plugin-setup-webapi - :type: ref - :text: Web API Plugin - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the Web API Plugins. - - --- - - .. link-button:: deployment-plugin-setup-aws - :type: ref - :text: AWS Plugins - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up AWS-specific Plugins. - - --- - - .. link-button:: deployment-plugin-setup-gcp - :type: ref - :text: GCP Plugins - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up GCP-specific Plugins. - -.. toctree:: - :maxdepth: 1 - :name: Plugin Setup - :hidden: - - k8s/index - aws/index - gcp/index - webapi/index diff --git a/rsts/deployment/plugins/k8s/index.rst b/rsts/deployment/plugins/k8s/index.rst deleted file mode 100644 index 9e81351043..0000000000 --- a/rsts/deployment/plugins/k8s/index.rst +++ /dev/null @@ -1,833 +0,0 @@ -.. _deployment-plugin-setup-k8s: - -Configure Kubernetes Plugins -============================ - -.. tags:: Kubernetes, Integration, Spark, AWS, GCP, Advanced - -This guide provides an overview of setting up the Kubernetes Operator backend plugin in your Flyte deployment. - -Spin up a cluster ------------------ - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - .. tabs:: - - .. group-tab:: PyTorch - - Enable the PyTorch plugin on the demo cluster by adding the following block to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - pytorch: pytorch - enabled-plugins: - - container - - k8s-array - - sidecar - - pytorch - - .. group-tab:: TensorFlow - - Enable the TensorFlow plugin on the demo cluster by adding the following block to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - tensorflow: tensorflow - enabled-plugins: - - container - - k8s-array - - sidecar - - tensorflow - - .. group-tab:: MPI - - Enable the MPI plugin on the demo cluster by adding the following block to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - mpi: mpi - enabled-plugins: - - container - - k8s-array - - sidecar - - mpi - - .. group-tab:: Ray - - Enable the Ray plugin on the demo cluster by adding the following block to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - ray: ray - enabled-plugins: - - container - - k8s-array - - sidecar - - ray - - .. group-tab:: Spark - - Enable the Spark plugin on the demo cluster by adding the following config to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - spark: spark - enabled-plugins: - - container - - sidecar - - k8s-array - - spark - plugins: - spark: - spark-config-default: - - spark.driver.cores: "1" - - spark.hadoop.fs.s3a.aws.credentials.provider: "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider" - - spark.hadoop.fs.s3a.endpoint: "http://minio.flyte:9000" - - spark.hadoop.fs.s3a.access.key: "minio" - - spark.hadoop.fs.s3a.secret.key: "miniostorage" - - spark.hadoop.fs.s3a.path.style.access: "true" - - spark.kubernetes.allocation.batch.size: "50" - - spark.hadoop.fs.s3a.acl.default: "BucketOwnerFullControl" - - spark.hadoop.fs.s3n.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" - - spark.hadoop.fs.AbstractFileSystem.s3n.impl: "org.apache.hadoop.fs.s3a.S3A" - - spark.hadoop.fs.s3.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" - - spark.hadoop.fs.AbstractFileSystem.s3.impl: "org.apache.hadoop.fs.s3a.S3A" - - spark.hadoop.fs.s3a.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" - - spark.hadoop.fs.AbstractFileSystem.s3a.impl: "org.apache.hadoop.fs.s3a.S3A" - cluster_resources: - refreshInterval: 5m - customData: - - production: - - projectQuotaCpu: - value: "5" - - projectQuotaMemory: - value: "4000Mi" - - staging: - - projectQuotaCpu: - value: "2" - - projectQuotaMemory: - value: "3000Mi" - - development: - - projectQuotaCpu: - value: "4" - - projectQuotaMemory: - value: "5000Mi" - refresh: 5m - - Also add the following cluster resource templates to the ``~/.flyte/sandbox/cluster-resource-templates`` directory: - - 1. ``serviceaccount.yaml`` - - .. code-block:: yaml - - apiVersion: v1 - kind: ServiceAccount - metadata: - name: default - namespace: "{{ namespace }}" - annotations: - eks.amazonaws.com/role-arn: "{{ defaultIamRole }}" - - 2. ``spark_role.yaml`` - - .. code-block:: yaml - - apiVersion: rbac.authorization.k8s.io/v1 - kind: Role - metadata: - name: spark-role - namespace: "{{ namespace }}" - rules: - - apiGroups: - - "" - resources: - - pods - - services - - configmaps - verbs: - - "*" - - 3. ``spark_service_account.yaml`` - - .. code-block:: yaml - - apiVersion: v1 - kind: ServiceAccount - metadata: - name: spark - namespace: "{{ namespace }}" - annotations: - eks.amazonaws.com/role-arn: "{{ defaultIamRole }}" - - 4. ``spark_role_binding.yaml`` - - .. code-block:: yaml - - apiVersion: rbac.authorization.k8s.io/v1 - kind: RoleBinding - metadata: - name: spark-role-binding - namespace: "{{ namespace }}" - roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: spark-role - subjects: - - kind: ServiceAccount - name: spark - namespace: "{{ namespace }}" - - .. group-tab:: Dask - - Enable the Dask plugin on the demo cluster by adding the following block to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - dask: dask - enabled-plugins: - - container - - k8s-array - - sidecar - - dask - - Start the demo cluster by running the following command: - - .. code-block:: bash - - flytectl demo start - - .. group-tab:: Helm chart - - 1. Add the following to your values file under `configmap.inline`: - - .. code-block:: yaml - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - K8S-ARRAY - - spark - - ray - default-for-task-types: - - container: container - - container_array: K8S-ARRAY - - spark: spark - - ray: ray - - 2. Install the :ref:`flyte-binary Helm chart `. - - .. group-tab:: Flyte core - - If you have installed Flyte using the `flyte-core Helm chart - `__, please ensure: - - * You have the correct kubeconfig and have selected the correct Kubernetes context. - * You have configured the correct flytectl settings in ``~/.flyte/config.yaml``. - -.. note:: - - Add the Flyte chart repo to Helm if you're installing via the Helm charts. - - .. code-block:: bash - - helm repo add flyteorg https://flyteorg.github.io/flyte - -Install the Kubernetes operator -------------------------------- - -.. tabs:: - - .. group-tab:: PyTorch/TensorFlow/MPI - - First, `install kustomize `__. - - Build and apply the training-operator. - - .. code-block:: bash - - export KUBECONFIG=$KUBECONFIG:~/.kube/config:~/.flyte/k3s/k3s.yaml - kustomize build "https://github.com/kubeflow/training-operator.git/manifests/overlays/standalone?ref=v1.5.0" | kubectl apply -f - - - **Optional: Using a gang scheduler** - - To address potential issues with worker pods of distributed training jobs being scheduled at different times - due to resource constraints, you can opt for a gang scheduler. This ensures that all worker pods are scheduled - simultaneously, reducing the likelihood of job failures caused by timeout errors. - - To `enable gang scheduling for the Kubeflow training-operator `__, - you can install the `Kubernetes scheduler plugins `__ - or the `Apache YuniKorn scheduler `__. - - 1. Install the `scheduler plugin `_ or - `Apache YuniKorn `_ as a second scheduler. - 2. Configure the Kubeflow training-operator to use the new scheduler: - - Create a manifest called ``kustomization.yaml`` with the following content: - - .. code-block:: yaml - - apiVersion: kustomize.config.k8s.io/v1beta1 - kind: Kustomization - - resources: - - github.com/kubeflow/training-operator/manifests/overlays/standalone - - patchesStrategicMerge: - - patch.yaml - - Create a patch file called ``patch.yaml`` with the following content: - - .. code-block:: yaml - - apiVersion: apps/v1 - kind: Deployment - metadata: - name: training-operator - spec: - template: - spec: - containers: - - name: training-operator - command: - - /manager - - --gang-scheduler-name= - - Install the patched kustomization with the following command: - - .. code-block:: bash - - kustomize build path/to/overlay/directory | kubectl apply -f - - - (Only for Apache YuniKorn) To configure gang scheduling with Apache YuniKorn, - make sure to set the following annotations in Flyte pod templates: - - - ``template.metadata.annotations.yunikorn.apache.org/task-group-name`` - - ``template.metadata.annotations.yunikorn.apache.org/task-groups`` - - ``template.metadata.annotations.yunikorn.apache.org/schedulingPolicyParameters`` - - For more configuration details, - refer to the `Apache YuniKorn Gang-Scheduling documentation - `__. - - 3. Use a Flyte pod template with ``template.spec.schedulerName: scheduler-plugins-scheduler`` - to use the new gang scheduler for your tasks. - - See the :ref:`using-k8s-podtemplates` section for more information on pod templates in Flyte. - You can set the scheduler name in the pod template passed to the ``@task`` decorator. However, to prevent the - two different schedulers from competing for resources, it is recommended to set the scheduler name in the pod template - in the ``flyte`` namespace which is applied to all tasks. Non distributed training tasks can be scheduled by the - gang scheduler as well. - - - For more information on pod templates in Flyte, refer to the :ref:`using-k8s-podtemplates` section. - You can set the scheduler name in the pod template passed to the ``@task`` decorator. - However, to avoid resource competition between the two different schedulers, - it is recommended to set the scheduler name in the pod template in the ``flyte`` namespace, - which is applied to all tasks. This allows non-distributed training tasks to be - scheduled by the gang scheduler as well. - - .. group-tab:: Ray - - To install the Ray Operator, run the following commands: - - .. code-block:: bash - - export KUBERAY_VERSION=v0.5.2 - kubectl create -k "github.com/ray-project/kuberay/manifests/cluster-scope-resources?ref=${KUBERAY_VERSION}&timeout=90s" - kubectl apply -k "github.com/ray-project/kuberay/manifests/base?ref=${KUBERAY_VERSION}&timeout=90s" - - .. group-tab:: Spark - - To add the Spark repository, run the following commands: - - .. code-block:: bash - - helm repo add spark-operator https://googlecloudplatform.github.io/spark-on-k8s-operator - - To install the Spark operator, run the following command: - - .. code-block:: bash - - helm install spark-operator spark-operator/spark-operator --namespace spark-operator --create-namespace - - .. group-tab:: Dask - - To add the Dask repository, run the following command: - - .. code-block:: bash - - helm repo add dask https://helm.dask.org - - To install the Dask operator, run the following command: - - .. code-block:: bash - - helm install dask-operator dask/dask-kubernetes-operator --namespace dask-operator --create-namespace - -Specify plugin configuration ----------------------------- - -.. tabs:: - - .. group-tab:: PyTorch - - .. tabs:: - - .. group-tab:: Flyte binary - - To specify the plugin when using the Helm chart, edit the relevant YAML file. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - pytorch - default-for-task-types: - - container: container - - container_array: k8s-array - - pytorch: pytorch - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - pytorch - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - pytorch: pytorch - - .. group-tab:: TensorFlow - - .. tabs:: - - .. group-tab:: Flyte binary - - To specify the plugin when using the Helm chart, edit the relevant YAML file. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - tensorflow - default-for-task-types: - - container: container - - container_array: k8s-array - - tensorflow: tensorflow - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - tensorflow - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - tensorflow: tensorflow - - .. group-tab:: MPI - - .. tabs:: - - .. group-tab:: Flyte binary - - To specify the plugin when using the Helm chart, edit the relevant YAML file. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - mpi - default-for-task-types: - - container: container - - container_array: k8s-array - - mpi: mpi - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - mpi - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - mpi: mpi - - .. group-tab:: Ray - - .. tabs:: - - .. group-tab:: Flyte binary - - 1. Make sure that your Helm values file includes the following configuration: - - .. code-block:: yaml - - configuration: - inline: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - ray - default-for-task-types: - - container: container - - container_array: k8s-array - - ray: ray - - rbac: - extraRules: - - apiGroups: - - "ray.io" - resources: - - rayjob - verbs: - - create - - get - - list - - patch - - update - - 2. Run a ``helm upgrade`` operation - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - ray - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - ray: ray - - .. group-tab:: Spark - - .. tabs:: - - .. group-tab:: Flyte binary - - To specify the plugin when using the Helm chart, edit the relevant YAML file. - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - - cluster_resource_manager: - enabled: true - config: - cluster_resources: - refreshInterval: 5m - templatePath: "/etc/flyte/clusterresource/templates" - customData: - - production: - - projectQuotaCpu: - value: "5" - - projectQuotaMemory: - value: "4000Mi" - - staging: - - projectQuotaCpu: - value: "2" - - projectQuotaMemory: - value: "3000Mi" - - development: - - projectQuotaCpu: - value: "4" - - projectQuotaMemory: - value: "3000Mi" - refresh: 5m - - # -- Resource templates that should be applied - templates: - # -- Template for namespaces resources - - key: aa_namespace - value: | - apiVersion: v1 - kind: Namespace - metadata: - name: {{ namespace }} - spec: - finalizers: - - kubernetes - - - key: ab_project_resource_quota - value: | - apiVersion: v1 - kind: ResourceQuota - metadata: - name: project-quota - namespace: {{ namespace }} - spec: - hard: - limits.cpu: {{ projectQuotaCpu }} - limits.memory: {{ projectQuotaMemory }} - - - key: ac_spark_role - value: | - apiVersion: rbac.authorization.k8s.io/v1beta1 - kind: Role - metadata: - name: spark-role - namespace: {{ namespace }} - rules: - - apiGroups: ["*"] - resources: - - pods - verbs: - - '*' - - apiGroups: ["*"] - resources: - - services - verbs: - - '*' - - apiGroups: ["*"] - resources: - - configmaps - verbs: - - '*' - - - key: ad_spark_service_account - value: | - apiVersion: v1 - kind: ServiceAccount - metadata: - name: spark - namespace: {{ namespace }} - - - key: ae_spark_role_binding - value: | - apiVersion: rbac.authorization.k8s.io/v1beta1 - kind: RoleBinding - metadata: - name: spark-role-binding - namespace: {{ namespace }} - roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: spark-role - subjects: - - kind: ServiceAccount - name: spark - namespace: {{ namespace }} - - sparkoperator: - enabled: true - plugin_config: - plugins: - spark: - # Edit the Spark configuration as you see fit - spark-config-default: - - spark.driver.cores: "1" - - spark.hadoop.fs.s3a.aws.credentials.provider: "com.amazonaws.auth.DefaultAWSCredentialsProviderChain" - - spark.kubernetes.allocation.batch.size: "50" - - spark.hadoop.fs.s3a.acl.default: "BucketOwnerFullControl" - - spark.hadoop.fs.s3n.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" - - spark.hadoop.fs.AbstractFileSystem.s3n.impl: "org.apache.hadoop.fs.s3a.S3A" - - spark.hadoop.fs.s3.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" - - spark.hadoop.fs.AbstractFileSystem.s3.impl: "org.apache.hadoop.fs.s3a.S3A" - - spark.hadoop.fs.s3a.impl: "org.apache.hadoop.fs.s3a.S3AFileSystem" - - spark.hadoop.fs.AbstractFileSystem.s3a.impl: "org.apache.hadoop.fs.s3a.S3A" - - spark.network.timeout: 600s - - spark.executorEnv.KUBERNETES_REQUEST_TIMEOUT: 100000 - - spark.executor.heartbeatInterval: 60s - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - spark - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - spark: spark - - .. group-tab:: Dask - - .. tabs:: - - .. group-tab:: Flyte binary - - Edit the relevant YAML file to specify the plugin. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - dask - default-for-task-types: - - container: container - - container_array: k8s-array - - dask: dask - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - dask - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - dask: dask - -Upgrade the deployment ----------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - If you are installing Flyte via the Helm chart, run the following command: - - .. note:: - - There is no need to run ``helm upgrade`` for Spark. - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: bash - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - and ```` with the name of your namespace (e.g., ``flyte``). - -Wait for the upgrade to complete. You can check the status of the deployment pods by running the following command: - -.. code-block:: bash - - kubectl get pods -n --all-namespaces diff --git a/rsts/deployment/plugins/webapi/databricks.rst b/rsts/deployment/plugins/webapi/databricks.rst deleted file mode 100644 index ee38a481df..0000000000 --- a/rsts/deployment/plugins/webapi/databricks.rst +++ /dev/null @@ -1,451 +0,0 @@ -.. _deployment-plugin-setup-webapi-databricks: - -Databricks Plugin -================= - -This guide provides an overview of how to set up Databricks in your Flyte deployment. - -Spin up a cluster ------------------ - -.. tabs:: - - .. group-tab:: Flyte binary - - You can spin up a demo cluster using the following command: - - .. code-block:: bash - - flytectl demo start - - Or install Flyte using the :ref:`flyte-binary helm chart `. - - .. group-tab:: Flyte core - - If you've installed Flyte using the - `flyte-core helm chart `__, please ensure: - - * You have the correct kubeconfig and have selected the correct Kubernetes context. - * You have configured the correct flytectl settings in ``~/.flyte/config.yaml``. - -.. note:: - - Add the Flyte chart repo to Helm if you're installing via the Helm charts. - - .. code-block:: bash - - helm repo add flyteorg https://flyteorg.github.io/flyte - -Databricks workspace --------------------- - -To set up your Databricks account, follow these steps: - -1. Create a `Databricks account `__. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/plugins/databricks/databricks_workspace.png - :alt: A screenshot of Databricks workspace creation. - -2. Ensure that you have a Databricks workspace up and running. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/plugins/databricks/open_workspace.png - :alt: A screenshot of Databricks workspace. - -3. Generate a `personal access token - `__ to be used in the Flyte configuration. - You can find the personal access token in the user settings within the workspace. ``User settings`` -> ``Developer`` -> ``Access tokens`` - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/plugins/databricks/databricks_access_token.png - :alt: A screenshot of access token. - -4. Enable custom containers on your Databricks cluster before you trigger the workflow. - -.. code-block:: bash - - curl -X PATCH -n -H "Authorization: Bearer " \ - https:///api/2.0/workspace-conf \ - -d '{"enableDcs": "true"}' - -For more detail, check `custom containers `__. - -5. Create an `instance profile -`__ -for the Spark cluster. This profile enables the Spark job to access your data in the S3 bucket. - -Create an instance profile using the AWS console (For AWS Users) -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -1. In the AWS console, go to the IAM service. -2. Click the Roles tab in the sidebar. -3. Click Create role. - - a. Under Trusted entity type, select AWS service. - b. Under Use case, select **EC2**. - c. Click Next. - d. At the bottom of the page, click Next. - e. In the Role name field, type a role name. - f. Click Create role. - -4. In the role list, click the **AmazonS3FullAccess** role. -5. Click Create role button. - -In the role summary, copy the Role ARN. - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/plugins/databricks/s3_arn.png - :alt: A screenshot of s3 arn. - -Locate the IAM role that created the Databricks deployment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you don’t know which IAM role created the Databricks deployment, do the following: - -1. As an account admin, log in to the account console. -2. Go to ``Workspaces`` and click your workspace name. -3. In the Credentials box, note the role name at the end of the Role ARN - -For example, in the Role ARN ``arn:aws:iam::123456789123:role/finance-prod``, the role name is finance-prod - -Edit the IAM role that created the Databricks deployment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. In the AWS console, go to the IAM service. -2. Click the Roles tab in the sidebar. -3. Click the role that created the Databricks deployment. -4. On the Permissions tab, click the policy. -5. Click Edit Policy. -6. Append the following block to the end of the Statement array. Ensure that you don’t overwrite any of the existing policy. Replace with the role you created in Configure S3 access with instance profiles. - -.. code-block:: bash - - { - "Effect": "Allow", - "Action": "iam:PassRole", - "Resource": "arn:aws:iam:::role/" - } - - -6. Upload the following ``entrypoint.py`` file to either -`DBFS `__ -(the final path will be ``dbfs:///FileStore/tables/entrypoint.py``) or S3. -This file will be executed by the Spark driver node, overriding the default command of the -`Databricks `__ job. This entrypoint file will - -1. Download the inputs from S3 to the local filesystem. -2. Execute the spark task. -3. Upload the outputs from the local filesystem to S3 for the downstream tasks to consume. - - -.. image:: https://raw.githubusercontent.com/flyteorg/static-resources/main/flyte/deployment/plugins/databricks/dbfs.png - :alt: A screenshot of dbfs. - -.. code-block:: python - - import os - import sys - from typing import List - - import click - import pandas - from flytekit.bin.entrypoint import fast_execute_task_cmd as _fast_execute_task_cmd - from flytekit.bin.entrypoint import execute_task_cmd as _execute_task_cmd - from flytekit.exceptions.user import FlyteUserException - from flytekit.tools.fast_registration import download_distribution - - - def fast_execute_task_cmd(additional_distribution: str, dest_dir: str, task_execute_cmd: List[str]): - if additional_distribution is not None: - if not dest_dir: - dest_dir = os.getcwd() - download_distribution(additional_distribution, dest_dir) - - # Insert the call to fast before the unbounded resolver args - cmd = [] - for arg in task_execute_cmd: - if arg == "--resolver": - cmd.extend(["--dynamic-addl-distro", additional_distribution, "--dynamic-dest-dir", dest_dir]) - cmd.append(arg) - - click_ctx = click.Context(click.Command("dummy")) - parser = _execute_task_cmd.make_parser(click_ctx) - args, _, _ = parser.parse_args(cmd[1:]) - _execute_task_cmd.callback(test=False, **args) - - - def main(): - args = sys.argv - click_ctx = click.Context(click.Command("dummy")) - if args[1] == "pyflyte-fast-execute": - parser = _fast_execute_task_cmd.make_parser(click_ctx) - args, _, _ = parser.parse_args(args[2:]) - fast_execute_task_cmd(**args) - elif args[1] == "pyflyte-execute": - parser = _execute_task_cmd.make_parser(click_ctx) - args, _, _ = parser.parse_args(args[2:]) - _execute_task_cmd.callback(test=False, dynamic_addl_distro=None, dynamic_dest_dir=None, **args) - else: - raise FlyteUserException(f"Unrecognized command: {args[1:]}") - - - if __name__ == '__main__': - main() - -Specify plugin configuration ----------------------------- -.. note:: - - Demo cluster saves the data to minio, but Databricks job saves the data to S3. - Therefore, you need to update the AWS credentials for the single binary deployment, so the pod can - access the S3 bucket that DataBricks job writes to. - - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - Enable the Databricks plugin on the demo cluster by adding the following config to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - spark: databricks - enabled-plugins: - - container - - sidecar - - k8s-array - - databricks - plugins: - databricks: - entrypointFile: dbfs:///FileStore/tables/entrypoint.py - databricksInstance: .cloud.databricks.com - k8s: - default-env-vars: - - FLYTE_AWS_ACCESS_KEY_ID: - - FLYTE_AWS_SECRET_ACCESS_KEY: - - AWS_DEFAULT_REGION: - remoteData: - region: - scheme: aws - signedUrls: - durationMinutes: 3 - propeller: - rawoutput-prefix: s3:/// - storage: - container: "" - type: s3 - stow: - kind: s3 - config: - region: - disable_ssl: true - v2_signing: false - auth_type: accesskey - access_key_id: - secret_key: - signedURL: - stowConfigOverride: - endpoint: "" - - Substitute ```` with the name of your Databricks account, - ```` with the region where you created your AWS bucket, - ```` with your AWS access key ID, - ```` with your AWS secret access key, - and ```` with the name of your S3 bucket. - - .. group-tab:: Helm chart - - Edit the relevant YAML file to specify the plugin. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - databricks - default-for-task-types: - - container: container - - container_array: k8s-array - - spark: databricks - - .. code-block:: yaml - :emphasize-lines: 3-5 - - inline: - plugins: - databricks: - entrypointFile: dbfs:///FileStore/tables/entrypoint.py - databricksInstance: .cloud.databricks.com - - Substitute ```` with the name of your Databricks account. - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - :emphasize-lines: 9,14,15-21 - - configmap: - enabled_plugins: - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - databricks - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - spark: databricks - databricks: - enabled: True - plugin_config: - plugins: - databricks: - entrypointFile: dbfs:///FileStore/tables/entrypoint.py - databricksInstance: .cloud.databricks.com - - Substitute ```` with the name of your Databricks account. - -Add the Databricks access token -------------------------------- - -Add the Databricks access token to FlytePropeller: - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - Add the access token as an environment variable to the ``flyte-sandbox`` deployment. - - .. code-block:: bash - - kubectl edit deploy flyte-sandbox -n flyte - - Update the ``env`` configuration: - - .. code-block:: yaml - :emphasize-lines: 12-13 - - env: - - name: POD_NAME - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.name - - name: POD_NAMESPACE - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: FLYTE_SECRET_FLYTE_DATABRICKS_API_TOKEN - value: - image: flyte-binary:sandbox - ... - - .. group-tab:: Helm chart - - Create an external secret as follows: - - .. code-block:: bash - - cat < - EOF - - Reference the newly created secret in - ``.Values.configuration.auth.clientSecretsExternalSecretRef`` - in your YAML file as follows: - - .. code-block:: yaml - :emphasize-lines: 3 - - configuration: - auth: - clientSecretsExternalSecretRef: flyte-binary-client-secrets-external-secret - - Replace ```` with your access token. - - .. group-tab:: Flyte core - - Add the access token as a secret to ``flyte-secret-auth``. - - .. code-block:: bash - - kubectl edit secret -n flyte flyte-secret-auth - - .. code-block:: yaml - :emphasize-lines: 3 - - apiVersion: v1 - data: - FLYTE_DATABRICKS_API_TOKEN: - kind: Secret - ... - - Replace ```` with your access token. - -Upgrade the deployment ----------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - .. code-block:: bash - - kubectl rollout restart deployment flyte-sandbox -n flyte - - .. group-tab:: Helm chart - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - and ```` with the name of your namespace (e.g., ``flyte``). - -Wait for the upgrade to complete. You can check the status of the deployment pods by running the following command: - -.. code-block:: - - kubectl get pods -n flyte - -For databricks plugin on the Flyte cluster, please refer to `Databricks Plugin Example `_ diff --git a/rsts/deployment/plugins/webapi/index.rst b/rsts/deployment/plugins/webapi/index.rst deleted file mode 100644 index c54cb3cb87..0000000000 --- a/rsts/deployment/plugins/webapi/index.rst +++ /dev/null @@ -1,39 +0,0 @@ -.. _deployment-plugin-setup-webapi: - -Configure Web APIs -================== - -.. tags:: WebAPI, Integration, Data, Advanced - -Discover the process of setting up Web API plugins for Flyte. - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: deployment-plugin-setup-webapi-snowflake - :type: ref - :text: Snowflake Plugin - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the Snowflake plugin. - - --- - - .. link-button:: deployment-plugin-setup-webapi-databricks - :type: ref - :text: Databricks Plugin - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guide to setting up the Databricks plugin. - - -.. toctree:: - :maxdepth: 1 - :name: Web API plugin setup - :hidden: - - snowflake - databricks - - diff --git a/rsts/deployment/plugins/webapi/snowflake.rst b/rsts/deployment/plugins/webapi/snowflake.rst deleted file mode 100644 index ac1faaa9dc..0000000000 --- a/rsts/deployment/plugins/webapi/snowflake.rst +++ /dev/null @@ -1,245 +0,0 @@ -.. _deployment-plugin-setup-webapi-snowflake: - -Snowflake Plugin -================ - -This guide provides an overview of how to set up Snowflake in your Flyte deployment. - -Spin up a cluster ------------------ - -.. tabs:: - - .. group-tab:: Flyte binary - - You can spin up a demo cluster using the following command: - - .. code-block:: bash - - flytectl demo start - - Or install Flyte using the :ref:`flyte-binary helm chart `. - - .. group-tab:: Flyte core - - If you've installed Flyte using the - `flyte-core helm chart `__, - please ensure: - - * You have the correct kubeconfig and have selected the correct Kubernetes context. - * You have configured the correct flytectl settings in ``~/.flyte/config.yaml``. - -.. note:: - - Add the Flyte chart repo to Helm if you're installing via the Helm charts. - - .. code-block:: bash - - helm repo add flyteorg https://flyteorg.github.io/flyte - -Specify plugin configuration ----------------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - Enable the Snowflake plugin on the demo cluster by adding the following block to ``~/.flyte/sandbox/config.yaml``: - - .. code-block:: yaml - - tasks: - task-plugins: - default-for-task-types: - container: container - container_array: k8s-array - sidecar: sidecar - snowflake: snowflake - enabled-plugins: - - container - - k8s-array - - sidecar - - snowflake - - .. group-tab:: Helm chart - - Edit the relevant YAML file to specify the plugin. - - .. code-block:: yaml - :emphasize-lines: 7,11 - - tasks: - task-plugins: - enabled-plugins: - - container - - sidecar - - k8s-array - - snowflake - default-for-task-types: - - container: container - - container_array: k8s-array - - snowflake: snowflake - - .. group-tab:: Flyte core - - Create a file named ``values-override.yaml`` and add the following config to it: - - .. code-block:: yaml - - configmap: - enabled_plugins: - # -- Tasks specific configuration [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#GetConfig) - tasks: - # -- Plugins configuration, [structure](https://pkg.go.dev/github.com/flyteorg/flytepropeller/pkg/controller/nodes/task/config#TaskPluginConfig) - task-plugins: - # -- [Enabled Plugins](https://pkg.go.dev/github.com/flyteorg/flyteplugins/go/tasks/config#Config). Enable sagemaker*, athena if you install the backend - # plugins - enabled-plugins: - - container - - sidecar - - k8s-array - - snowflake - default-for-task-types: - container: container - sidecar: sidecar - container_array: k8s-array - snowflake: snowflake - -Obtain and add the Snowflake JWT token --------------------------------------- - -Create a Snowflake account, and follow the `Snowflake docs -`__ -to generate a JWT token. -Then, add the Snowflake JWT token to FlytePropeller. - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - Add the JWT token as an environment variable to the ``flyte-sandbox`` deployment. - - .. code-block:: bash - - kubectl edit deploy flyte-sandbox -n flyte - - Update the ``env`` configuration: - - .. code-block:: yaml - :emphasize-lines: 12-13 - - env: - - name: POD_NAME - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.name - - name: POD_NAMESPACE - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: FLYTE_SECRET_FLYTE_SNOWFLAKE_CLIENT_TOKEN - value: - image: flyte-binary:sandbox - ... - - .. group-tab:: Helm chart - - Create an external secret as follows: - - .. code-block:: bash - - cat < - EOF - - Reference the newly created secret in - ``.Values.configuration.auth.clientSecretsExternalSecretRef`` - in your YAML file as follows: - - .. code-block:: yaml - :emphasize-lines: 3 - - configuration: - auth: - clientSecretsExternalSecretRef: flyte-binary-client-secrets-external-secret - - Replace ```` with your JWT token. - - .. group-tab:: Flyte core - - Add the JWT token as a secret to ``flyte-secret-auth``. - - .. code-block:: bash - - kubectl edit secret -n flyte flyte-secret-auth - - .. code-block:: yaml - :emphasize-lines: 3 - - apiVersion: v1 - data: - FLYTE_SNOWFLAKE_CLIENT_TOKEN: - client_secret: Zm9vYmFy - kind: Secret - ... - - Replace ```` with your JWT token. - -Upgrade the deployment ----------------------- - -.. tabs:: - - .. group-tab:: Flyte binary - - .. tabs:: - - .. group-tab:: Demo cluster - - .. code-block:: bash - - kubectl rollout restart deployment flyte-sandbox -n flyte - - .. group-tab:: Helm chart - - .. code-block:: bash - - helm upgrade flyteorg/flyte-binary -n --values - - Replace ```` with the name of your release (e.g., ``flyte-backend``), - ```` with the name of your namespace (e.g., ``flyte``), - and ```` with the name of your YAML file. - - .. group-tab:: Flyte core - - .. code-block:: - - helm upgrade flyte/flyte-core -n --values values-override.yaml - - Replace ```` with the name of your release (e.g., ``flyte``) - and ```` with the name of your namespace (e.g., ``flyte``). - -Wait for the upgrade to complete. You can check the status of the deployment pods by running the following command: - -.. code-block:: - - kubectl get pods -n flyte - -For snowflake plugin on the Flyte cluster, please refer to `Snowflake Plugin Example `_ diff --git a/rsts/deployment/security/index.rst b/rsts/deployment/security/index.rst deleted file mode 100644 index 6beccb5f95..0000000000 --- a/rsts/deployment/security/index.rst +++ /dev/null @@ -1,132 +0,0 @@ -.. _deployment-security-overview: - -################### -Security Overview -################### - -.. tags:: Kubernetes, Infrastructure, Advanced - -Here we cover the security aspects of running your flyte deployments. In the current state, we will cover the user -used for running the flyte services, and go through why we do this and not run them as a root user. - -************************ -Using the Non-root User -************************ - -It's considered to be a best practice to use a non-root user for security because -running in a constrained permission environment will prevent any malicious code -from utilizing the full permissions of the host `Ref `__ -Moreover, in certain container platforms like `OpenShift `__, -running non-root containers is mandatory. - -Flyte uses OCI-compatible container technology like Docker for container packaging, -and by default, its containers run as root. This gives full permissions to the -system but may not be suitable for production deployments where a security breach -could comprise your application deployments. - -.. important:: - - As a Flyte administrator, it's up to you a to enforce whatever policy complies - with the conventions and regulations of your industry/application. - -******* -Changes -******* - -A new user group and user have been added to the Docker files for all the Flyte components: -`Flyteadmin `__, -`Flytepropeller `__, -`Datacatalog `__, -`Flyteconsole `__. - -And Dockerfile uses the `USER command `__, which sets the user -and group, that's used for running the container. - -Additionally, the k8s manifest files for the flyte components define the overridden security context with the created -user and group to run them. The following shows the overridden security context added for flyteadmin -`Flyteadmin `__. - - -************ -Why override -************ -Certain init-containers still require root permissions, and hence we are required to override the security -context for these. -For example: in the case of `Flyteadmin `__, -the init container of check-db-ready that runs postgres-provided docker image cannot resolve the host for the checks and fails. This is mostly due to no read -permissions on etc/hosts file. Only the check-db-ready container is run using the root user, which we will also plan to fix. - - -************ -OAuth -************ -Flytectl requires CA-certified SSL cert for OAuth to work. Using a self-signed certificate throws the following error: - -.. code-block:: - - certificate is not standards compliant. - -There are two options to fix this: - -#. Switch to a full external OAuth Provider (okta, GCP cloud identity, keycloak, Azure AD, etc.). -#. Use a CA-certified SSL cert. - -******************************************************** -Running flyteadmin and flyteconsole on different domains -******************************************************** - -In some cases when flyteadmin and flyteconsole are running on different domains, -you'll would need to allow the flyteadmin's domain to allow cross origin request -from the flyteconsole's domain. Here are all the domains/namespaces to keep in -mind: - -- ````: the domain which will get the request. -- ````: the domain which will be sending the request as the originator. -- ````: the k8s namespace where your flyteconsole pod is running. -- ````: the k8s namespace where your flyteadmin pod is running. - -Modify FlyteAdmin Config -======================== - -To modify the FlyteConsole deployment to use ````, do the following: - -.. prompt:: bash $ - - kubectl edit deployment flyteconsole -n - -.. code-block:: yaml - - - env: - - name: ENABLE_GA - value: "true" - - name: GA_TRACKING_ID - value: G-0123456789 - - name: ADMIN_API_URL - value: https:// - -Rollout FlyteConsole - -.. prompt:: bash $ - - kubectl rollout restart deployment/flyteconsole -n - -Modify the flyte-admin-config as follows: - -.. prompt:: bash $ - - kubectl edit configmap flyte-admin-config -n - -.. code-block:: yaml - - security: - allowCors: true - ...... - allowedOrigins: - - 'https://' - ...... - -Finally, rollout FlyteAdmin - -.. prompt:: bash $ - - kubectl rollout restart deployment/flyteadmin -n diff --git a/rsts/images/flyte-and-lf.png b/rsts/images/flyte-and-lf.png deleted file mode 100644 index 47c8958805..0000000000 Binary files a/rsts/images/flyte-and-lf.png and /dev/null differ diff --git a/rsts/images/flyte_circle_gradient_1_4x4.png b/rsts/images/flyte_circle_gradient_1_4x4.png deleted file mode 100644 index 49cdbbbc34..0000000000 Binary files a/rsts/images/flyte_circle_gradient_1_4x4.png and /dev/null differ diff --git a/rsts/images/flyte_lockup_gradient_on_light.png b/rsts/images/flyte_lockup_gradient_on_light.png deleted file mode 100644 index 1f986f45ad..0000000000 Binary files a/rsts/images/flyte_lockup_gradient_on_light.png and /dev/null differ diff --git a/rsts/images/flyte_lockup_on_dark.png b/rsts/images/flyte_lockup_on_dark.png deleted file mode 100644 index c6facc4ea4..0000000000 Binary files a/rsts/images/flyte_lockup_on_dark.png and /dev/null differ diff --git a/rsts/index.rst b/rsts/index.rst deleted file mode 100644 index 9468f92cf5..0000000000 --- a/rsts/index.rst +++ /dev/null @@ -1,318 +0,0 @@ -.. toctree:: - :maxdepth: 1 - :name: mainsections - :titlesonly: - :hidden: - - |plane| Getting Started - |book-reader| User Guide - |chalkboard| Tutorials - |project-diagram| Concepts - |rocket| Deployment and Administration - |book| API Reference - |hands-helping| Community - -.. toctree:: - :caption: Concepts - :maxdepth: -1 - :name: divedeeptoc - :hidden: - - concepts/basics - concepts/control_plane - concepts/architecture - -.. toctree:: - :caption: Deployment - :maxdepth: -1 - :name: deploymenttoc - :hidden: - - deployment/index - deployment/deployment/index - deployment/plugins/index - deployment/agents/index - deployment/configuration/index - deployment/configuration/generated/index - deployment/security/index - - -.. toctree:: - :caption: Community - :maxdepth: -1 - :name: roadmaptoc - :hidden: - - Community - community/contribute - community/roadmap - Frequently Asked Questions - community/troubleshoot - -.. toctree:: - :caption: API Reference - :maxdepth: -1 - :name: apireference - :hidden: - - API Reference - - -************************************************* -Production-grade Data and ML Workflows Made Easy -************************************************* - -.. image:: https://img.shields.io/badge/Graduate%20Project-Linux%20Foundation-purple?style=for-the-badge - :target: https://lfaidata.foundation/projects/flyte/ - :alt: Linux Foundation - -.. image:: https://img.shields.io/github/stars/flyteorg/flyte?label=github&logo=github&style=for-the-badge - :target: https://github.com/flyteorg/flyte - :alt: GitHub Repo stars - -.. image:: https://img.shields.io/github/release/flyteorg/flyte.svg?style=for-the-badge&color=blue - :target: https://github.com/flyteorg/flyte/releases/latest - :alt: Flyte Release - -.. image:: https://img.shields.io/github/actions/workflow/status/flyteorg/flyte/tests.yml?label=tests&style=for-the-badge - :target: https://github.com/flyteorg/flyte/actions/workflows/tests.yml - :alt: GitHub Test Status - -.. image:: https://img.shields.io/github/actions/workflow/status/flyteorg/flyte/sandbox.yml?label=Sandbox%20docker%20image&style=for-the-badge - :target: https://github.com/flyteorg/flyte/actions/workflows/sandbox.yml - :alt: GitHub Sandbox Status - -.. image:: https://img.shields.io/github/milestones/closed/flyteorg/flyte?style=for-the-badge - :target: https://github.com/flyteorg/flyte/milestones?state=closed - :alt: Completed Milestones - -.. image:: https://img.shields.io/pypi/dm/flytekit?color=blue&label=flytekit%20downloads&style=for-the-badge&logo=pypi&logoColor=white - :target: https://github.com/flyteorg/flytekit - :alt: Flytekit Downloads - -.. image:: https://img.shields.io/badge/Slack-Chat-pink?style=for-the-badge&logo=slack - :target: https://slack.flyte.org - :alt: Flyte Slack - -.. image:: https://img.shields.io/badge/LICENSE-Apache2.0-ff69b4.svg?style=for-the-badge - :target: http://www.apache.org/licenses/LICENSE-2.0.html - :alt: License - -.. raw:: html - -

- Highly scalable and flexible workflow orchestration for prototyping and production -

- -`Flyte Tags <_tags/tagsindex.html>`__ - -`Flyte `__ is an open-source, Kubernetes-native -workflow orchestrator implemented in `Go `__. It enables highly -concurrent, scalable and reproducible workflows for data processing, machine -learning and analytics. - -Created at `Lyft `__ in collaboration with Spotify, -Freenome, and many others, Flyte provides first-class support for -`Python `__, -`Java, and Scala `__. Data Scientists -and ML Engineers in the industry use Flyte to create: - -- ETL pipelines for petabyte-scale data processing. -- Analytics workflows for business and finance use cases. -- Machine learning pipelines for logistics, image processing, and cancer diagnostics. - -Explore Flyte -============= - -Get a birds-eye view 🦅 of Flyte at the `official website `__: - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: https://flyte.org/features - :type: url - :text: ⭐️ Core features - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - From strongly typed interfaces to container-native DAGs, Flyte mitigates the - trade-off between scalability and usability. - - --- - - .. link-button:: https://flyte.org/integrations - :type: url - :text: 🤝 Integrations - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - From strongly typed interfaces to container-native DAGs, Flyte mitigates the - trade-off between scalability and usability. - - --- - - .. link-button:: https://flyte.org/airflow-alternative - :type: url - :text: 💨 Flyte vs Airflow - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Say goodbye to Airflow versioning pain and stepping over your teammate's toes - when you change your package versions. Ouch! - - --- - - .. link-button:: https://flyte.org/kubeflow-alternative - :type: url - :text: 🔁 Flyte vs Kubeflow - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Unintuitive Python DSL boilerplate got you down? With ``flytekit`` you just - write Python code and Flyte compiles down to type-safe execution graphs. - - --- - - .. link-button:: https://flyte.org/blog - :type: url - :text: 📝 Blog - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Learn more about orchestration, Flyte, and everything in between. - - --- - - .. link-button:: https://flyte.org/events - :type: url - :text: 🗓 Events - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Keep up-to-date with Flyte's upcoming talks, conferences, and more. - - -Learn Flyte -=========== - -The following main sections in the documentation will guide you through your -Flyte journey, whether you want to write Flyte workflows, deploy the Flyte -platform to your K8s cluster, or extend and contribute its architecture and -design. - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: cookbook:getting_started_index - :type: ref - :text: 🔤 Getting Started - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Get your first workflow running, learn about the Flyte development lifecycle, - and see the core use cases that Flyte enables. - - --- - - .. link-button:: cookbook:userguide - :type: ref - :text: 📖 User Guide - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - A comprehensive view of Flyte's functionality for data scientists, ML engineers, - data engineers, and data analysts. - - --- - - .. link-button:: cookbook:tutorials - :type: ref - :text: 📚 Tutorials - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - End-to-end examples of Flyte for data/feature engineering, machine learning, - bioinformatics, and more. - - --- - - .. link-button:: cookbook:integrations - :type: ref - :text: 🤝 Integrations - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Learn how to leverage a rich ecosystem of third-party tools and libraries - to make your Flyte workflows even more effective. - - --- - - .. link-button:: deployment - :type: ref - :text: 🚀 Deployment Guide - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Guides for platform engineers to deploy and maintain a Flyte cluster on your - own infrastructure. - - --- - - .. link-button:: reference - :type: ref - :text: 📒 API Reference - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Reference for all of Flyte's component libraries. - - --- - - .. link-button:: divedeep - :type: ref - :text: 🧠 Concepts - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Dive deep into all of Flyte's concepts, from tasks and workflows to the underlying Flyte scheduler. - - --- - - .. link-button:: community - :type: ref - :text: 🤗 Community - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Join the fast-growing Flyte community to get help, ask questions, and contribute! - -Get Help -======== - -Have questions or need support? The best way to reach us is through Slack: - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: https://flyte-org.slack.com/archives/CP2HDHKE1 - :type: url - :text: 🤔 Ask the Community - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Ask anything related to Flyte and get a response within a few hours. - - --- - - .. link-button:: https://flyte-org.slack.com/archives/C01RXBFV1M5 - :type: url - :text: 👋 Introduce yourself - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Tell us about yourself. We'd love to know about you and what brings you to Flyte. - - --- - - .. link-button:: https://flyte-org.slack.com/archives/CPQ3ZFQ84 - :type: url - :text: 💭 Share ideas - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Share any suggestions or feedback you have on how to make Flyte better. - - --- - - .. link-button:: https://flyte-org.slack.com/archives/C01P3B761A6 - :type: url - :text: 🛠 Get help with deploment - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - If you need any help with Flyte deployment, hit us up. diff --git a/rsts/reference/index.rst b/rsts/reference/index.rst deleted file mode 100644 index 489cf2e2b3..0000000000 --- a/rsts/reference/index.rst +++ /dev/null @@ -1,75 +0,0 @@ -.. _reference: - -############# -API Reference -############# - -.. panels:: - :header: text-center - :column: col-lg-12 p-2 - - .. link-button:: https://flytectl.readthedocs.io - :type: url - :text: Flytectl - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The official Flyte Command-line Interface. - - --- - - .. link-button:: https://flyteidl.readthedocs.io - :type: url - :text: FlyteIDL - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The core language specification and backend service API specification for Flyte. - - --- - - .. link-button:: https://flytekit.readthedocs.io - :type: url - :text: Flytekit - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The Python SDK for Flyte. - - --- - - .. link-button:: https://github.com/spotify/flytekit-java - :type: url - :text: Flytekit-java - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - The Java/Scala SDK for Flyte. - - --- - - .. link-button:: https://docs.flyte.org/projects/flyteidl/en/latest/protos/docs/service/service.html - :type: url - :text: FlyteAdmin - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - Flyte Backend REST/gRPC API specification. - - --- - - .. link-button:: reference-swagger - :type: ref - :text: Flyte API Swagger Playground - :classes: btn-block stretched-link - ^^^^^^^^^^^^ - FlyteAdmin exposes a REST interface. Try out the API using an interactive environment. - - -.. toctree:: - :maxdepth: 1 - :caption: API Reference - :name: apitoc - :hidden: - - FlyteCTL - FlyteIDL - Flytekit Python - Flytekit Java - FlyteAdmin - swagger diff --git a/rsts/reference/swagger.rst b/rsts/reference/swagger.rst deleted file mode 100644 index 9344a81357..0000000000 --- a/rsts/reference/swagger.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. _reference-swagger: - -############################# -Flyte API Playground: Swagger -############################# - -.. tags:: Basic - -Flyte services expose gRPC services for efficient/low latency communication across all services as well as for external clients (FlyteCTL, FlyteConsole, Flytekit Remote, etc.). - -The service definitions are defined `here `__. -FlyteIDL also houses open API schema definitions for the exposed services: - -- `Admin `__ -- `Auth `__ -- `Identity `__ - -To view the UI, run the following command: - -.. prompt:: bash $ - - flytectl demo start - -Once sandbox setup is complete, a ready-to-explore message is shown: - -.. prompt:: - - 👨‍💻 Flyte is ready! Flyte UI is available at http://localhost:30081/console 🚀 🚀 🎉 - - -Visit ``http://localhost:30080/api/v1/openapi`` to view the swagger documentation of the payload fields.